From c7b8d09c7f180da5922801450fe0ae6a0f802377 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 20 Apr 2015 10:38:18 -0700 Subject: Add packagerepos to spack, allowing for creating multiple package repositories. --- lib/spack/spack/__init__.py | 6 ++ lib/spack/spack/build_environment.py | 25 +++++- lib/spack/spack/cmd/create.py | 12 +++ lib/spack/spack/cmd/packagerepo.py | 85 ++++++++++++++++++++ lib/spack/spack/packages.py | 147 ++++++++++++++++++++++++----------- lib/spack/spack/repo_loader.py | 115 +++++++++++++++++++++++++++ var/spack/packages/reponame | 1 + 7 files changed, 343 insertions(+), 48 deletions(-) create mode 100644 lib/spack/spack/cmd/packagerepo.py create mode 100644 lib/spack/spack/repo_loader.py create mode 100644 var/spack/packages/reponame diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index caa09eb6e0..1d67b45341 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -62,6 +62,12 @@ mock_config_path = join_path(var_path, "mock_configs") mock_site_config = join_path(mock_config_path, "site_spackconfig") mock_user_config = join_path(mock_config_path, "user_spackconfig") +# +# Setup the spack.repos namespace +# +from spack.repo_loader import RepoNamespace +repos = RepoNamespace() + # # This controls how spack lays out install prefixes and # stage directories. diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index a133faa629..03a4930259 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -157,7 +157,7 @@ def set_build_environment_variables(pkg): path_set("PKG_CONFIG_PATH", pkg_config_dirs) -def set_module_variables_for_package(pkg): +def set_module_variables_for_package(pkg, m): """Populate the module scope of install() with some useful functions. This makes things easier for package writers. """ @@ -228,11 +228,32 @@ def get_rpaths(pkg): return rpaths +def parent_class_modules(cls): + """Get list of super class modules that are all descend from spack.Package""" + if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls): + return [] + result = [] + module = sys.modules.get(cls.__module__) + if module: + result = [ module ] + for c in cls.__bases__: + result.extend(parent_class_modules(c)) + return result + + def setup_package(pkg): """Execute all environment setup routines.""" set_compiler_environment_variables(pkg) set_build_environment_variables(pkg) - set_module_variables_for_package(pkg) + + # If a user makes their own package repo, e.g. + # spack.repos.mystuff.libelf.Libelf, and they inherit from + # an existing class like spack.repos.original.libelf.Libelf, + # then set the module variables for both classes so the + # parent class can still use them if it gets called. + modules = parent_class_modules(pkg.__class__) + for mod in modules: + set_module_variables_for_package(pkg, mod) # Allow dependencies to set up environment as well. for dep_spec in pkg.spec.traverse(root=False): diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 46e6bcec14..1502942f2c 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -93,6 +93,9 @@ def setup_parser(subparser): subparser.add_argument( '-n', '--name', dest='alternate_name', default=None, help="Override the autodetected name for the created package.") + subparser.add_argument( + '-p', '--package-repo', dest='package_repo', default=None, + help="Create the package in the specified packagerepo.") subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="Overwrite any existing package file with the same name.") @@ -160,12 +163,21 @@ def create(parser, args): tty.die("Couldn't guess a name for this package. Try running:", "", "spack create --name ") + package_repo = args.package_repo + if not valid_module_name(name): tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'") tty.msg("This looks like a URL for %s version %s." % (name, version)) tty.msg("Creating template for package %s" % name) + # Create a directory for the new package. + pkg_path = spack.db.filename_for_package_name(name, package_repo) + if os.path.exists(pkg_path) and not args.force: + tty.die("%s already exists." % pkg_path) + else: + mkdirp(os.path.dirname(pkg_path)) + versions = spack.package.find_versions_of_archive(url) rkeys = sorted(versions.keys(), reverse=True) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) diff --git a/lib/spack/spack/cmd/packagerepo.py b/lib/spack/spack/cmd/packagerepo.py new file mode 100644 index 0000000000..66bad0ecbf --- /dev/null +++ b/lib/spack/spack/cmd/packagerepo.py @@ -0,0 +1,85 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse + +import llnl.util.tty as tty +from llnl.util.tty.color import colorize +from llnl.util.tty.colify import colify +from llnl.util.lang import index_by + +import spack.spec +import spack.config +from spack.util.environment import get_path + +description = "Manage package sources" + +def setup_parser(subparser): + sp = subparser.add_subparsers( + metavar='SUBCOMMAND', dest='packagerepo_command') + + add_parser = sp.add_parser('add', help=packagerepo_add.__doc__) + add_parser.add_argument('directory', help="Directory containing the packages.") + + remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__) + remove_parser.add_argument('name') + + list_parser = sp.add_parser('list', help=packagerepo_list.__doc__) + + +def packagerepo_add(args): + """Add package sources to the Spack configuration.""" + config = spack.config.get_config() + user_config = spack.config.get_config('user') + orig = None + if config.has_value('packagerepo', '', 'directories'): + orig = config.get_value('packagerepo', '', 'directories') + if orig and args.directory in orig.split(':'): + tty.die('Repo directory %s already exists in the repo list' % args.directory) + + newsetting = orig + ':' + args.directory if orig else args.directory + user_config.set_value('packagerepo', '', 'directories', newsetting) + user_config.write() + + +def packagerepo_remove(args): + """Remove a package source from the Spack configuration""" + pass + + +def packagerepo_list(args): + """List package sources and their mnemoics""" + root_names = spack.db.repos + max_len = max(len(s[0]) for s in root_names) + fmt = "%%-%ds%%s" % (max_len + 4) + for root in root_names: + print fmt % (root[0], root[1]) + + + +def packagerepo(parser, args): + action = { 'add' : packagerepo_add, + 'remove' : packagerepo_remove, + 'list' : packagerepo_list } + action[args.packagerepo_command](args) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index adfbc26c1d..79dbd60703 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -23,10 +23,14 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import exceptions import sys import inspect import glob import imp +import spack.config +import re +from contextlib import closing import llnl.util.tty as tty from llnl.util.filesystem import join_path @@ -36,13 +40,11 @@ import spack.error import spack.spec from spack.virtual import ProviderIndex from spack.util.naming import mod_to_class, validate_module_name +from sets import Set +from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name -# Name of module under which packages are imported -_imported_packages_module = 'spack.packages' - -# Name of the package file inside a package directory -_package_file_name = 'package.py' - +# Filename for package repo names +_packagerepo_filename = 'reponame' def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -55,13 +57,57 @@ def _autospec(function): class PackageDB(object): - def __init__(self, root): + def __init__(self, default_root): """Construct a new package database from a root directory.""" - self.root = root + + #Collect the repos from the config file and read their names from the file system + repo_dirs = self._repo_list_from_config() + repo_dirs.append(default_root) + self.repos = [(self._read_reponame_from_directory(dir), dir) for dir in repo_dirs] + + # Check for duplicate repo names + s = set() + dups = set(r for r in self.repos if r[0] in s or s.add(r[0])) + if dups: + reponame = list(dups)[0][0] + dir1 = list(dups)[0][1] + dir2 = dict(s)[reponame] + tty.die("Package repo %s in directory %s has the same name as the " + "repo in directory %s" % + (reponame, dir1, dir2)) + + # For each repo, create a RepoLoader + self.repo_loaders = dict([(r[0], RepoLoader(r[0], r[1])) for r in self.repos]) + self.instances = {} self.provider_index = None + def _read_reponame_from_directory(self, dir): + """For a packagerepo directory, read the repo name from the dir/reponame file""" + path = os.path.join(dir, 'reponame') + + try: + with closing(open(path, 'r')) as reponame_file: + name = reponame_file.read().lstrip().rstrip() + if not re.match(r'[a-zA-Z][a-zA-Z0-9]+', name): + tty.die("Package repo name '%s', read from %s, is an invalid name. " + "Repo names must began with a letter and only contain letters " + "and numbers." % (name, path)) + return name + except exceptions.IOError, e: + tty.die("Could not read from package repo name file %s" % path) + + + + def _repo_list_from_config(self): + """Read through the spackconfig and return the list of packagerepo directories""" + config = spack.config.get_config() + if not config.has_option('packagerepo', 'directories'): return [] + dir_string = config.get('packagerepo', 'directories') + return dir_string.split(':') + + @_autospec def get(self, spec, **kwargs): if spec.virtual: @@ -130,13 +176,33 @@ class PackageDB(object): # catching exceptions. - def dirname_for_package_name(self, pkg_name): + def repo_for_package_name(self, pkg_name, packagerepo_name=None): + """Find the dirname for a package and the packagerepo it came from + if packagerepo_name is not None, then search for the package in the + specified packagerepo""" + #Look for an existing package under any matching packagerepos + roots = [pkgrepo for pkgrepo in self.repos + if not packagerepo_name or packagerepo_name == pkgrepo[0]] + + if not roots: + tty.die("Package repo %s does not exist" % packagerepo_name) + + for pkgrepo in roots: + path = join_path(pkgrepo[1], pkg_name) + if os.path.exists(path): + return (pkgrepo[0], path) + + repo_to_add_to = roots[-1] + return (repo_to_add_to[0], join_path(repo_to_add_to[1], pkg_name)) + + + def dirname_for_package_name(self, pkg_name, packagerepo_name=None): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return join_path(self.root, pkg_name) + return self.repo_for_package_name(pkg_name, packagerepo_name)[1] - def filename_for_package_name(self, pkg_name): + def filename_for_package_name(self, pkg_name, packagerepo_name=None): """Get the filename for the module we should load for a particular package. Packages for a pacakge DB live in ``$root//package.py`` @@ -144,10 +210,15 @@ class PackageDB(object): This will return a proper package.py path even if the package doesn't exist yet, so callers will need to ensure the package exists before importing. + + If a packagerepo is specified, then return existing + or new paths in the specified packagerepo directory. If no + package repo is supplied, return an existing path from any + package repo, and new paths in the default package repo. """ validate_module_name(pkg_name) - pkg_dir = self.dirname_for_package_name(pkg_name) - return join_path(pkg_dir, _package_file_name) + pkg_dir = self.dirname_for_package_name(pkg_name, packagerepo_name) + return join_path(pkg_dir, package_file_name) def installed_package_specs(self): @@ -176,14 +247,19 @@ class PackageDB(object): @memoized def all_package_names(self): """Generator function for all packages. This looks for - ``/package.py`` files within the root direcotry""" - all_package_names = [] - for pkg_name in os.listdir(self.root): - pkg_dir = join_path(self.root, pkg_name) - pkg_file = join_path(pkg_dir, _package_file_name) - if os.path.isfile(pkg_file): - all_package_names.append(pkg_name) - all_package_names.sort() + ``/package.py`` files within the repo direcotories""" + all_packages = Set() + for repo in self.repos: + dir = repo[1] + if not os.path.isdir(dir): + continue + for pkg_name in os.listdir(dir): + pkg_dir = join_path(dir, pkg_name) + pkg_file = join_path(pkg_dir, package_file_name) + if os.path.isfile(pkg_file): + all_packages.add(pkg_name) + all_package_names = list(all_packages) + all_package_names.sort() return all_package_names @@ -200,34 +276,13 @@ class PackageDB(object): @memoized def get_class_for_package_name(self, pkg_name): - """Get an instance of the class for a particular package. - - This method uses Python's ``imp`` package to load python - source from a Spack package's ``package.py`` file. A - normal python import would only load each package once, but - because we do this dynamically, the method needs to be - memoized to ensure there is only ONE package class - instance, per package, per database. - """ - file_path = self.filename_for_package_name(pkg_name) + """Get an instance of the class for a particular package.""" + repo = self.repo_for_package_name(pkg_name) + module_name = imported_packages_module + '.' + repo[0] + '.' + pkg_name - if os.path.exists(file_path): - if not os.path.isfile(file_path): - tty.die("Something's wrong. '%s' is not a file!" % file_path) - if not os.access(file_path, os.R_OK): - tty.die("Cannot read '%s'!" % file_path) - else: - raise UnknownPackageError(pkg_name) + module = self.repo_loaders[repo[0]].get_module(pkg_name) class_name = mod_to_class(pkg_name) - try: - module_name = _imported_packages_module + '.' + pkg_name - module = imp.load_source(module_name, file_path) - - except ImportError, e: - tty.die("Error while importing %s from %s:\n%s" % ( - pkg_name, file_path, e.message)) - cls = getattr(module, class_name) if not inspect.isclass(cls): tty.die("%s.%s is not a class" % (pkg_name, class_name)) diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py new file mode 100644 index 0000000000..57c19a6c28 --- /dev/null +++ b/lib/spack/spack/repo_loader.py @@ -0,0 +1,115 @@ +import spack +import spack.repos +import re +import types +from llnl.util.lang import * + +# Name of module under which packages are imported +imported_packages_module = 'spack.repos' + +# Name of the package file inside a package directory +package_file_name = 'package.py' + +import sys +class LazyLoader: + """The LazyLoader handles cases when repo modules or classes + are imported. It watches for 'spack.repos.*' loads, then + redirects the load to the appropriate module.""" + def find_module(self, fullname, pathname): + if not fullname.startswith(imported_packages_module): + return None + partial_name = fullname[len(imported_packages_module)+1:] + repo = partial_name.split('.')[0] + module = partial_name.split('.')[1] + repo_loader = spack.db.repo_loaders.get(repo) + if repo_loader: + try: + self.mod = repo_loader.get_module(module) + return self + except (ImportError, spack.packages.UnknownPackageError): + return None + + def load_module(self, fullname): + return self.mod + +sys.meta_path.append(LazyLoader()) + +_reponames = {} +class RepoNamespace(types.ModuleType): + """The RepoNamespace holds the repository namespaces under + spack.repos. For example, when accessing spack.repos.original + this class will use __getattr__ to translate the 'original' + into one of spack's known repositories""" + def __init__(self): + import sys + sys.modules[imported_packages_module] = self + + def __getattr__(self, name): + if name in _reponames: + return _reponames[name] + raise AttributeError + + @property + def __file__(self): + return None + + @property + def __path__(self): + return [] + + +class RepoLoader(types.ModuleType): + """Each RepoLoader is associated with a repository, and the RepoLoader is + responsible for loading packages out of that repository. For example, + a RepoLoader may be responsible for spack.repos.original, and when someone + references spack.repos.original.libelf that RepoLoader will load the + libelf package.""" + def __init__(self, reponame, repopath): + self.path = repopath + self.reponame = reponame + self.module_name = imported_packages_module + '.' + reponame + if not reponame in _reponames: + _reponames[reponame] = self + spack.repos.add_repo(reponame, self) + + import sys + sys.modules[self.module_name] = self + + + @property + def __path__(self): + return [ self.path ] + + + def __getattr__(self, name): + if name[0] == '_': + raise AttributeError + return self.get_module(name) + + + @memoized + def get_module(self, pkg_name): + import os + import imp + import llnl.util.tty as tty + + file_path = os.path.join(self.path, pkg_name, package_file_name) + if os.path.exists(file_path): + if not os.path.isfile(file_path): + tty.die("Something's wrong. '%s' is not a file!" % file_path) + if not os.access(file_path, os.R_OK): + tty.die("Cannot read '%s'!" % file_path) + else: + raise spack.packages.UnknownPackageError(pkg_name) + + try: + module_name = imported_packages_module + '.' + self.reponame + '.' + pkg_name + module = imp.load_source(module_name, file_path) + + except ImportError, e: + tty.die("Error while importing %s from %s:\n%s" % ( + pkg_name, file_path, e.message)) + + return module + + diff --git a/var/spack/packages/reponame b/var/spack/packages/reponame new file mode 100644 index 0000000000..4b48deed3a --- /dev/null +++ b/var/spack/packages/reponame @@ -0,0 +1 @@ +original -- cgit v1.2.3-70-g09d2 From 7ea328659f6bbaff33319427ba7c6321cc33a637 Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 20 Apr 2015 13:27:03 -0700 Subject: Record package repo origins in .spec files --- lib/spack/spack/packages.py | 35 ++++++++++++++++++++--------------- lib/spack/spack/repo_loader.py | 2 +- lib/spack/spack/spec.py | 29 +++++++++++++++++++++++++---- 3 files changed, 46 insertions(+), 20 deletions(-) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 79dbd60703..48705948b7 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -73,12 +73,12 @@ class PackageDB(object): dir1 = list(dups)[0][1] dir2 = dict(s)[reponame] tty.die("Package repo %s in directory %s has the same name as the " - "repo in directory %s" % + "repo in directory %s" % (reponame, dir1, dir2)) # For each repo, create a RepoLoader self.repo_loaders = dict([(r[0], RepoLoader(r[0], r[1])) for r in self.repos]) - + self.instances = {} self.provider_index = None @@ -87,13 +87,13 @@ class PackageDB(object): """For a packagerepo directory, read the repo name from the dir/reponame file""" path = os.path.join(dir, 'reponame') - try: + try: with closing(open(path, 'r')) as reponame_file: - name = reponame_file.read().lstrip().rstrip() + name = reponame_file.read().lstrip().rstrip() if not re.match(r'[a-zA-Z][a-zA-Z0-9]+', name): tty.die("Package repo name '%s', read from %s, is an invalid name. " "Repo names must began with a letter and only contain letters " - "and numbers." % (name, path)) + "and numbers." % (name, path)) return name except exceptions.IOError, e: tty.die("Could not read from package repo name file %s" % path) @@ -107,7 +107,7 @@ class PackageDB(object): dir_string = config.get('packagerepo', 'directories') return dir_string.split(':') - + @_autospec def get(self, spec, **kwargs): if spec.virtual: @@ -118,7 +118,7 @@ class PackageDB(object): del self.instances[spec] if not spec in self.instances: - package_class = self.get_class_for_package_name(spec.name) + package_class = self.get_class_for_package_name(spec.name, spec.repo) try: copy = spec.copy() self.instances[copy] = package_class(copy) @@ -191,7 +191,7 @@ class PackageDB(object): path = join_path(pkgrepo[1], pkg_name) if os.path.exists(path): return (pkgrepo[0], path) - + repo_to_add_to = roots[-1] return (repo_to_add_to[0], join_path(repo_to_add_to[1], pkg_name)) @@ -259,7 +259,7 @@ class PackageDB(object): if os.path.isfile(pkg_file): all_packages.add(pkg_name) all_package_names = list(all_packages) - all_package_names.sort() + all_package_names.sort() return all_package_names @@ -275,12 +275,12 @@ class PackageDB(object): @memoized - def get_class_for_package_name(self, pkg_name): + def get_class_for_package_name(self, pkg_name, reponame = None): """Get an instance of the class for a particular package.""" - repo = self.repo_for_package_name(pkg_name) - module_name = imported_packages_module + '.' + repo[0] + '.' + pkg_name + (reponame, repodir) = self.repo_for_package_name(pkg_name, reponame) + module_name = imported_packages_module + '.' + reponame + '.' + pkg_name - module = self.repo_loaders[repo[0]].get_module(pkg_name) + module = self.repo_loaders[reponame].get_module(pkg_name) class_name = mod_to_class(pkg_name) cls = getattr(module, class_name) @@ -292,8 +292,13 @@ class PackageDB(object): class UnknownPackageError(spack.error.SpackError): """Raised when we encounter a package spack doesn't have.""" - def __init__(self, name): - super(UnknownPackageError, self).__init__("Package '%s' not found." % name) + def __init__(self, name, repo=None): + msg = None + if repo: + msg = "Package %s not found in packagerepo %s." % (name, repo) + else: + msg = "Package %s not found." % name + super(UnknownPackageError, self).__init__(msg) self.name = name diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py index 57c19a6c28..6eaa1eead2 100644 --- a/lib/spack/spack/repo_loader.py +++ b/lib/spack/spack/repo_loader.py @@ -100,7 +100,7 @@ class RepoLoader(types.ModuleType): if not os.access(file_path, os.R_OK): tty.die("Cannot read '%s'!" % file_path) else: - raise spack.packages.UnknownPackageError(pkg_name) + raise spack.packages.UnknownPackageError(pkg_name, self.reponame if self.reponame != 'original' else None) try: module_name = imported_packages_module + '.' + self.reponame + '.' + pkg_name diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index e1fbb84423..972ba9ccbb 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -112,6 +112,7 @@ from spack.version import * from spack.util.string import * from spack.util.prefix import Prefix from spack.virtual import ProviderIndex +from spack.repo_loader import imported_packages_module # Valid pattern for an identifier in Spack identifier_re = r'\w[\w-]*' @@ -412,6 +413,7 @@ class Spec(object): self.dependencies = other.dependencies self.variants = other.variants self.variants.spec = self + self.repo = other.repo # Specs are by default not assumed to be normal, but in some # cases we've read them from a file want to assume normal. @@ -1355,6 +1357,7 @@ class Spec(object): self.dependencies = DependencyMap() self.variants = other.variants.copy() self.variants.spec = self + self.repo = other.repo # If we copy dependencies, preserve DAG structure in the new spec if kwargs.get('deps', True): @@ -1503,6 +1506,7 @@ class Spec(object): in the format string. The format strings you can provide are:: $_ Package name + $. Long package name $@ Version $% Compiler $%@ Compiler & compiler version @@ -1550,6 +1554,9 @@ class Spec(object): if c == '_': out.write(fmt % self.name) + elif c == '.': + longname = '%s.%s.%s' % (imported_packages_module, self.repo, self.name) if self.repo else self.name + out.write(fmt % longname) elif c == '@': if self.versions and self.versions != _any_version: write(fmt % (c + str(self.versions)), c) @@ -1698,17 +1705,29 @@ class SpecParser(spack.parse.Parser): def spec(self): """Parse a spec out of the input. If a spec is supplied, then initialize and return it instead of creating a new one.""" - self.check_identifier() + + spec_name = None + spec_repo = None + if self.token.value.startswith(imported_packages_module): + lst = self.token.value.split('.') + spec_name = lst[-1] + spec_repo = lst[-2] + else: + spec_name = self.token.value + (spec_repo, repodir) = spack.db.repo_for_package_name(spec_name) + + self.check_identifier(spec_name) # This will init the spec without calling __init__. spec = Spec.__new__(Spec) - spec.name = self.token.value + spec.name = spec_name spec.versions = VersionList() spec.variants = VariantMap(spec) spec.architecture = None spec.compiler = None spec.dependents = DependencyMap() spec.dependencies = DependencyMap() + spec.repo = spec_repo spec._normal = False spec._concrete = False @@ -1802,12 +1821,14 @@ class SpecParser(spack.parse.Parser): return compiler - def check_identifier(self): + def check_identifier(self, id=None): """The only identifiers that can contain '.' are versions, but version ids are context-sensitive so we have to check on a case-by-case basis. Call this if we detect a version id where it shouldn't be. """ - if '.' in self.token.value: + if not id: + id = self.token.value + if '.' in id: self.last_token_error("Identifier cannot contain '.'") -- cgit v1.2.3-70-g09d2 From e58ee88a632a0855a984cba8f0faa725b8f2eddf Mon Sep 17 00:00:00 2001 From: Matthew LeGendre Date: Mon, 20 Apr 2015 13:50:09 -0700 Subject: Add 'spack packagerepo create' command --- lib/spack/spack/cmd/packagerepo.py | 51 +++++++++++++++++++++++++++++++++----- lib/spack/spack/packages.py | 4 +-- 2 files changed, 47 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/cmd/packagerepo.py b/lib/spack/spack/cmd/packagerepo.py index 66bad0ecbf..2819d0f980 100644 --- a/lib/spack/spack/cmd/packagerepo.py +++ b/lib/spack/spack/cmd/packagerepo.py @@ -23,15 +23,20 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from external import argparse - import llnl.util.tty as tty from llnl.util.tty.color import colorize from llnl.util.tty.colify import colify from llnl.util.lang import index_by +from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path +from spack.packages import packagerepo_filename + +import os +import exceptions +from contextlib import closing description = "Manage package sources" @@ -41,6 +46,10 @@ def setup_parser(subparser): add_parser = sp.add_parser('add', help=packagerepo_add.__doc__) add_parser.add_argument('directory', help="Directory containing the packages.") + + create_parser = sp.add_parser('create', help=packagerepo_create.__doc__) + create_parser.add_argument('directory', help="Directory containing the packages.") + create_parser.add_argument('name', help="Name of new package repository.") remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__) remove_parser.add_argument('name') @@ -48,19 +57,48 @@ def setup_parser(subparser): list_parser = sp.add_parser('list', help=packagerepo_list.__doc__) -def packagerepo_add(args): - """Add package sources to the Spack configuration.""" +def add_to_config(dir): config = spack.config.get_config() user_config = spack.config.get_config('user') orig = None if config.has_value('packagerepo', '', 'directories'): orig = config.get_value('packagerepo', '', 'directories') - if orig and args.directory in orig.split(':'): - tty.die('Repo directory %s already exists in the repo list' % args.directory) + if orig and dir in orig.split(':'): + return False - newsetting = orig + ':' + args.directory if orig else args.directory + newsetting = orig + ':' + dir if orig else dir user_config.set_value('packagerepo', '', 'directories', newsetting) user_config.write() + return True + + +def packagerepo_add(args): + """Add package sources to the Spack configuration.""" + if not add_to_config(args.directory): + tty.die('Repo directory %s already exists in the repo list' % dir) + + +def packagerepo_create(args): + """Create a new package repo at a directory and name""" + dir = args.directory + name = args.name + + if os.path.exists(dir) and not os.path.isdir(dir): + tty.die('File %s already exists and is not a directory' % dir) + if not os.path.exists(dir): + try: + mkdirp(dir) + except exceptions.OSError, e: + tty.die('Failed to create new directory %s' % dir) + path = os.path.join(dir, packagerepo_filename) + try: + with closing(open(path, 'w')) as repofile: + repofile.write(name + '\n') + except exceptions.IOError, e: + tty.die('Could not create new file %s' % path) + + if not add_to_config(args.directory): + tty.warn('Repo directory %s already exists in the repo list' % dir) def packagerepo_remove(args): @@ -80,6 +118,7 @@ def packagerepo_list(args): def packagerepo(parser, args): action = { 'add' : packagerepo_add, + 'create' : packagerepo_create, 'remove' : packagerepo_remove, 'list' : packagerepo_list } action[args.packagerepo_command](args) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 48705948b7..3b9d74dd6e 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -44,7 +44,7 @@ from sets import Set from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name # Filename for package repo names -_packagerepo_filename = 'reponame' +packagerepo_filename = 'reponame' def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -85,7 +85,7 @@ class PackageDB(object): def _read_reponame_from_directory(self, dir): """For a packagerepo directory, read the repo name from the dir/reponame file""" - path = os.path.join(dir, 'reponame') + path = os.path.join(dir, packagerepo_filename) try: with closing(open(path, 'r')) as reponame_file: -- cgit v1.2.3-70-g09d2 From da98b07624e2403807166e8a9d0dac3752f75c0f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 6 Jul 2015 19:57:31 -0700 Subject: Add more options to `spack edit` --- lib/spack/spack/cmd/edit.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index b8764ba391..9081d12516 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -78,9 +78,18 @@ def setup_parser(subparser): subparser.add_argument( '-f', '--force', dest='force', action='store_true', help="Open a new file in $EDITOR even if package doesn't exist.") - subparser.add_argument( - '-c', '--command', dest='edit_command', action='store_true', - help="Edit the command with the supplied name instead of a package.") + + filetypes = subparser.add_mutually_exclusive_group() + filetypes.add_argument( + '-c', '--command', dest='path', action='store_const', + const=spack.cmd.command_path, help="Edit the command with the supplied name.") + filetypes.add_argument( + '-t', '--test', dest='path', action='store_const', + const=spack.test_path, help="Edit the test with the supplied name.") + filetypes.add_argument( + '-m', '--module', dest='path', action='store_const', + const=spack.module_path, help="Edit the main spack module with the supplied name.") + subparser.add_argument( 'name', nargs='?', default=None, help="name of package to edit") @@ -88,19 +97,17 @@ def setup_parser(subparser): def edit(parser, args): name = args.name - if args.edit_command: - if not name: - path = spack.cmd.command_path - else: - path = join_path(spack.cmd.command_path, name + ".py") - if not os.path.exists(path): + path = spack.packages_path + if args.path: + path = args.path + if name: + path = join_path(path, name + ".py") + if not args.force and not os.path.exists(path): tty.die("No command named '%s'." % name) spack.editor(path) + elif name: + edit_package(name, args.force) else: # By default open the directory where packages or commands live. - if not name: - path = spack.packages_path - spack.editor(path) - else: - edit_package(name, args.force) + spack.editor(path) -- cgit v1.2.3-70-g09d2 From 92f398a897949e3b586abc38a934ae9d5cf1163c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 7 Jul 2015 00:25:20 -0700 Subject: Better `@memoized` decorator. --- lib/spack/llnl/util/lang.py | 33 +++++++++++++++++++++++++-------- 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index 9e1bef18ca..be6dad867e 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -26,6 +26,7 @@ import os import re import sys import functools +import collections import inspect # Ignore emacs backups when listing modules @@ -170,16 +171,32 @@ def has_method(cls, name): return False -def memoized(obj): +class memoized(object): """Decorator that caches the results of a function, storing them in an attribute of that function.""" - cache = obj.cache = {} - @functools.wraps(obj) - def memoizer(*args, **kwargs): - if args not in cache: - cache[args] = obj(*args, **kwargs) - return cache[args] - return memoizer + def __init__(self, func): + self.func = func + self.cache = {} + + + def __call__(self, *args): + if not isinstance(args, collections.Hashable): + # Not hashable, so just call the function. + return self.func(*args) + + if args not in self.cache: + self.cache[args] = self.func(*args) + return self.cache[args] + + + def __get__(self, obj, objtype): + """Support instance methods.""" + return functools.partial(self.__call__, obj) + + + def clear(self): + """Expunge cache so that self.func will be called again.""" + self.cache.clear() def list_modules(directory, **kwargs): -- cgit v1.2.3-70-g09d2 From a2f2e6a4ff60882d8a93754ee10e8e75245cf430 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 19 Jul 2015 21:55:27 -0700 Subject: Save changes to external repo integration --- lib/spack/spack/__init__.py | 16 +-- lib/spack/spack/cmd/packagerepo.py | 124 ---------------------- lib/spack/spack/cmd/repo.py | 129 +++++++++++++++++++++++ lib/spack/spack/config.py | 206 +++++++++++++++++++++++-------------- lib/spack/spack/packages.py | 117 +++++++++++++-------- lib/spack/spack/repo_loader.py | 35 +++++-- lib/spack/spack/test/config.py | 82 +++++++++++---- var/spack/mock_packages/repo.yaml | 2 + var/spack/packages/repo.yaml | 2 + var/spack/packages/reponame | 1 - 10 files changed, 430 insertions(+), 284 deletions(-) delete mode 100644 lib/spack/spack/cmd/packagerepo.py create mode 100644 lib/spack/spack/cmd/repo.py create mode 100644 var/spack/mock_packages/repo.yaml create mode 100644 var/spack/packages/repo.yaml delete mode 100644 var/spack/packages/reponame diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 1d67b45341..09bc9ca52a 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -47,11 +47,17 @@ install_path = join_path(opt_path, "spack") share_path = join_path(prefix, "share", "spack") # -# Set up the packages database. +# Setup the spack.repos namespace +# +from spack.repo_loader import RepoNamespace +repos = RepoNamespace() + +# +# Set up the default packages database. # from spack.packages import PackageDB packages_path = join_path(var_path, "packages") -db = PackageDB(packages_path) +db = PackageDB() # # Paths to mock files for testing. @@ -62,12 +68,6 @@ mock_config_path = join_path(var_path, "mock_configs") mock_site_config = join_path(mock_config_path, "site_spackconfig") mock_user_config = join_path(mock_config_path, "user_spackconfig") -# -# Setup the spack.repos namespace -# -from spack.repo_loader import RepoNamespace -repos = RepoNamespace() - # # This controls how spack lays out install prefixes and # stage directories. diff --git a/lib/spack/spack/cmd/packagerepo.py b/lib/spack/spack/cmd/packagerepo.py deleted file mode 100644 index 2819d0f980..0000000000 --- a/lib/spack/spack/cmd/packagerepo.py +++ /dev/null @@ -1,124 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from external import argparse -import llnl.util.tty as tty -from llnl.util.tty.color import colorize -from llnl.util.tty.colify import colify -from llnl.util.lang import index_by -from llnl.util.filesystem import join_path, mkdirp - -import spack.spec -import spack.config -from spack.util.environment import get_path -from spack.packages import packagerepo_filename - -import os -import exceptions -from contextlib import closing - -description = "Manage package sources" - -def setup_parser(subparser): - sp = subparser.add_subparsers( - metavar='SUBCOMMAND', dest='packagerepo_command') - - add_parser = sp.add_parser('add', help=packagerepo_add.__doc__) - add_parser.add_argument('directory', help="Directory containing the packages.") - - create_parser = sp.add_parser('create', help=packagerepo_create.__doc__) - create_parser.add_argument('directory', help="Directory containing the packages.") - create_parser.add_argument('name', help="Name of new package repository.") - - remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__) - remove_parser.add_argument('name') - - list_parser = sp.add_parser('list', help=packagerepo_list.__doc__) - - -def add_to_config(dir): - config = spack.config.get_config() - user_config = spack.config.get_config('user') - orig = None - if config.has_value('packagerepo', '', 'directories'): - orig = config.get_value('packagerepo', '', 'directories') - if orig and dir in orig.split(':'): - return False - - newsetting = orig + ':' + dir if orig else dir - user_config.set_value('packagerepo', '', 'directories', newsetting) - user_config.write() - return True - - -def packagerepo_add(args): - """Add package sources to the Spack configuration.""" - if not add_to_config(args.directory): - tty.die('Repo directory %s already exists in the repo list' % dir) - - -def packagerepo_create(args): - """Create a new package repo at a directory and name""" - dir = args.directory - name = args.name - - if os.path.exists(dir) and not os.path.isdir(dir): - tty.die('File %s already exists and is not a directory' % dir) - if not os.path.exists(dir): - try: - mkdirp(dir) - except exceptions.OSError, e: - tty.die('Failed to create new directory %s' % dir) - path = os.path.join(dir, packagerepo_filename) - try: - with closing(open(path, 'w')) as repofile: - repofile.write(name + '\n') - except exceptions.IOError, e: - tty.die('Could not create new file %s' % path) - - if not add_to_config(args.directory): - tty.warn('Repo directory %s already exists in the repo list' % dir) - - -def packagerepo_remove(args): - """Remove a package source from the Spack configuration""" - pass - - -def packagerepo_list(args): - """List package sources and their mnemoics""" - root_names = spack.db.repos - max_len = max(len(s[0]) for s in root_names) - fmt = "%%-%ds%%s" % (max_len + 4) - for root in root_names: - print fmt % (root[0], root[1]) - - - -def packagerepo(parser, args): - action = { 'add' : packagerepo_add, - 'create' : packagerepo_create, - 'remove' : packagerepo_remove, - 'list' : packagerepo_list } - action[args.packagerepo_command](args) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py new file mode 100644 index 0000000000..1261c7ada9 --- /dev/null +++ b/lib/spack/spack/cmd/repo.py @@ -0,0 +1,129 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import llnl.util.tty as tty +from llnl.util.tty.color import colorize +from llnl.util.tty.colify import colify +from llnl.util.lang import index_by +from llnl.util.filesystem import join_path, mkdirp + +import spack.spec +import spack.config +from spack.util.environment import get_path +from spack.packages import repo_config + +import os +import exceptions +from contextlib import closing + +description = "Manage package sources" + +def setup_parser(subparser): + sp = subparser.add_subparsers( + metavar='SUBCOMMAND', dest='repo_command') + + add_parser = sp.add_parser('add', help=repo_add.__doc__) + add_parser.add_argument('directory', help="Directory containing the packages.") + + create_parser = sp.add_parser('create', help=repo_create.__doc__) + create_parser.add_argument('directory', help="Directory containing the packages.") + create_parser.add_argument('name', help="Name of new package repository.") +<<<<<<< HEAD:lib/spack/spack/cmd/packagerepo.py + + remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__) +======= + + remove_parser = sp.add_parser('remove', help=repo_remove.__doc__) +>>>>>>> Save changes to external repo integration:lib/spack/spack/cmd/repo.py + remove_parser.add_argument('name') + + list_parser = sp.add_parser('list', help=repo_list.__doc__) + + +def add_to_config(dir): + config = spack.config.get_config() + user_config = spack.config.get_config('user') + orig = None + if config.has_value('repo', '', 'directories'): + orig = config.get_value('repo', '', 'directories') + if orig and dir in orig.split(':'): + return False + + newsetting = orig + ':' + dir if orig else dir + user_config.set_value('repo', '', 'directories', newsetting) + user_config.write() + return True + + +def repo_add(args): + """Add package sources to the Spack configuration.""" + if not add_to_config(args.directory): + tty.die('Repo directory %s already exists in the repo list' % dir) + + +def repo_create(args): + """Create a new package repo at a directory and name""" + dir = args.directory + name = args.name + + if os.path.exists(dir) and not os.path.isdir(dir): + tty.die('File %s already exists and is not a directory' % dir) + if not os.path.exists(dir): + try: + mkdirp(dir) + except exceptions.OSError, e: + tty.die('Failed to create new directory %s' % dir) + path = os.path.join(dir, repo_config) + try: + with closing(open(path, 'w')) as repofile: + repofile.write(name + '\n') + except exceptions.IOError, e: + tty.die('Could not create new file %s' % path) + + if not add_to_config(args.directory): + tty.warn('Repo directory %s already exists in the repo list' % dir) + + +def repo_remove(args): + """Remove a package source from the Spack configuration""" + pass + + +def repo_list(args): + """List package sources and their mnemoics""" + root_names = spack.db.repos + max_len = max(len(s[0]) for s in root_names) + fmt = "%%-%ds%%s" % (max_len + 4) + for root in root_names: + print fmt % (root[0], root[1]) + + + +def repo(parser, args): + action = { 'add' : repo_add, + 'create' : repo_create, + 'remove' : repo_remove, + 'list' : repo_list } + action[args.repo_command](args) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 3e91958c2c..dc59f9a5a3 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -45,11 +45,11 @@ several configuration files, such as compilers.yaml or mirrors.yaml. Configuration file format =============================== -Configuration files are formatted using YAML syntax. -This format is implemented by Python's -yaml class, and it's easy to read and versatile. +Configuration files are formatted using YAML syntax. This format is +implemented by libyaml (included with Spack as an external module), +and it's easy to read and versatile. -The config files are structured as trees, like this ``compiler`` section:: +Config files are structured as trees, like this ``compiler`` section:: compilers: chaos_5_x86_64_ib: @@ -83,62 +83,73 @@ would looks like: } } -Some routines, like get_mirrors_config and get_compilers_config may strip -off the top-levels of the tree and return subtrees. +Some convenience functions, like get_mirrors_config and +``get_compilers_config`` may strip off the top-levels of the tree and +return subtrees. + """ import os -import exceptions import sys - -from external.ordereddict import OrderedDict -from llnl.util.lang import memoized -import spack.error - +import copy from external import yaml from external.yaml.error import MarkedYAMLError + import llnl.util.tty as tty from llnl.util.filesystem import mkdirp +from llnl.util.lang import memoized + +import spack + _config_sections = {} class _ConfigCategory: name = None filename = None merge = True - def __init__(self, n, f, m): - self.name = n - self.filename = f - self.merge = m + def __init__(self, name, filename, merge, strip): + self.name = name + self.filename = filename + self.merge = merge + self.strip = strip self.files_read_from = [] self.result_dict = {} - _config_sections[n] = self + _config_sections[name] = self -_ConfigCategory('compilers', 'compilers.yaml', True) -_ConfigCategory('mirrors', 'mirrors.yaml', True) -_ConfigCategory('view', 'views.yaml', True) -_ConfigCategory('order', 'orders.yaml', True) +_ConfigCategory('config', 'config.yaml', True, False) +_ConfigCategory('compilers', 'compilers.yaml', True, True) +_ConfigCategory('mirrors', 'mirrors.yaml', True, True) +_ConfigCategory('view', 'views.yaml', True, True) +_ConfigCategory('order', 'orders.yaml', True, True) """Names of scopes and their corresponding configuration files.""" config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), ('user', os.path.expanduser('~/.spack'))] _compiler_by_arch = {} -_read_config_file_result = {} + +@memoized def _read_config_file(filename): - """Read a given YAML configuration file""" - global _read_config_file_result - if filename in _read_config_file_result: - return _read_config_file_result[filename] + """Read a YAML configuration file""" + + # Ignore nonexisting files. + if not os.path.exists(filename): + return None + + elif not os.path.isfile(filename): + tty.die("Invlaid configuration. %s exists but is not a file." % filename) + + elif not os.access(filename, os.R_OK): + tty.die("Configuration file %s is not readable." % filename) try: with open(filename) as f: - ydict = yaml.load(f) + return yaml.load(f) + except MarkedYAMLError, e: tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) - except exceptions.IOError, e: - _read_config_file_result[filename] = None - return None - _read_config_file_result[filename] = ydict - return ydict + + except IOError, e: + tty.die("Error reading configuration file %s: %s" % (filename, str(e))) def clear_config_caches(): @@ -147,41 +158,66 @@ def clear_config_caches(): for key,s in _config_sections.iteritems(): s.files_read_from = [] s.result_dict = {} - spack.config._read_config_file_result = {} + + _read_config_file.clear() spack.config._compiler_by_arch = {} spack.compilers._cached_default_compiler = None -def _merge_dicts(d1, d2): - """Recursively merges two configuration trees, with entries - in d2 taking precedence over d1""" - if not d1: - return d2.copy() - if not d2: - return d1 +def _merge_yaml(dest, source): + """Merges source into dest; entries in source take precedence over dest. - for key2, val2 in d2.iteritems(): - if not key2 in d1: - d1[key2] = val2 - continue - val1 = d1[key2] - if isinstance(val1, dict) and isinstance(val2, dict): - d1[key2] = _merge_dicts(val1, val2) - continue - if isinstance(val1, list) and isinstance(val2, list): - val1.extend(val2) - seen = set() - d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ] - continue - d1[key2] = val2 - return d1 + Config file authors can optionally end any attribute in a dict + with `::` instead of `:`, and the key will override that of the + parent instead of merging. + """ + def they_are(t): + return isinstance(dest, t) and isinstance(source, t) + # If both are None, handle specially and return None. + if source is None and dest is None: + return None -def get_config(category_name): - """Get the confguration tree for the names category. Strips off the - top-level category entry from the dict""" - global config_scopes - category = _config_sections[category_name] + # If source is None, overwrite with source. + elif source is None: + return None + + # Source list is prepended (for precedence) + if they_are(list): + seen = set(source) + dest[:] = source + [x for x in dest if x not in seen] + return dest + + # Source dict is merged into dest. Extra ':' means overwrite. + elif they_are(dict): + for sk, sv in source.iteritems(): + # allow total override with, e.g., repos:: + override = sk.endswith(':') + if override: + sk = sk.rstrip(':') + + if override or not sk in dest: + dest[sk] = copy.copy(sv) + else: + dest[sk] = _merge_yaml(dest[sk], source[sk]) + return dest + + # In any other case, overwrite with a copy of the source value. + else: + return copy.copy(source) + + +def substitute_spack_prefix(path): + """Replaces instances of $spack with Spack's prefix.""" + return path.replace('$spack', spack.prefix) + + +def get_config(category='config'): + """Get the confguration tree for a category. + + Strips off the top-level category entry from the dict + """ + category = _config_sections[category] if category.result_dict: return category.result_dict @@ -191,14 +227,18 @@ def get_config(category_name): result = _read_config_file(path) if not result: continue - if not category_name in result: - continue + + if category.strip: + if not category.name in result: + continue + result = result[category.name] + category.files_read_from.insert(0, path) - result = result[category_name] if category.merge: - category.result_dict = _merge_dicts(category.result_dict, result) + category.result_dict = _merge_yaml(category.result_dict, result) else: category.result_dict = result + return category.result_dict @@ -215,7 +255,7 @@ def get_compilers_config(arch=None): cc_config = get_config('compilers') if arch in cc_config and 'all' in cc_config: arch_compiler = dict(cc_config[arch]) - _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all']) + _compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all']) elif arch in cc_config: _compiler_by_arch[arch] = cc_config[arch] elif 'all' in cc_config: @@ -225,6 +265,13 @@ def get_compilers_config(arch=None): return _compiler_by_arch[arch] +def get_repos_config(): + config = get_config() + if 'repos' not in config: + return [] + return config['repos'] + + def get_mirror_config(): """Get the mirror configuration from config files""" return get_config('mirrors') @@ -232,7 +279,6 @@ def get_mirror_config(): def get_config_scope_dirname(scope): """For a scope return the config directory""" - global config_scopes for s,p in config_scopes: if s == scope: return p @@ -251,16 +297,16 @@ def get_config_scope_filename(scope, category_name): def add_to_config(category_name, addition_dict, scope=None): """Merge a new dict into a configuration tree and write the new configuration to disk""" - global _read_config_file_result get_config(category_name) category = _config_sections[category_name] - #If scope is specified, use it. Otherwise use the last config scope that - #we successfully parsed data from. + # If scope is specified, use it. Otherwise use the last config scope that + # we successfully parsed data from. file = None path = None if not scope and not category.files_read_from: scope = 'user' + if scope: try: dir = get_config_scope_dirname(scope) @@ -268,32 +314,37 @@ def add_to_config(category_name, addition_dict, scope=None): mkdirp(dir) path = os.path.join(dir, category.filename) file = open(path, 'w') - except exceptions.IOError, e: + except IOError, e: pass else: for p in category.files_read_from: try: file = open(p, 'w') - except exceptions.IOError, e: + except IOError, e: pass if file: path = p break; + if not file: tty.die('Unable to write to config file %s' % path) - #Merge the new information into the existing file info, then write to disk - new_dict = _read_config_file_result[path] + # Merge the new information into the existing file info, then write to disk + new_dict = _read_config_file(path) + if new_dict and category_name in new_dict: new_dict = new_dict[category_name] - new_dict = _merge_dicts(new_dict, addition_dict) + + new_dict = _merge_yaml(new_dict, addition_dict) new_dict = { category_name : new_dict } - _read_config_file_result[path] = new_dict + + # Install new dict as memoized value, and dump to disk + _read_config_file.cache[path] = new_dict yaml.dump(new_dict, stream=file, default_flow_style=False) file.close() - #Merge the new information into the cached results - category.result_dict = _merge_dicts(category.result_dict, addition_dict) + # Merge the new information into the cached results + category.result_dict = _merge_yaml(category.result_dict, addition_dict) def add_to_mirror_config(addition_dict, scope=None): @@ -311,7 +362,6 @@ def add_to_compiler_config(addition_dict, scope=None, arch=None): def remove_from_config(category_name, key_to_rm, scope=None): """Remove a configuration key and write a new configuration to disk""" - global config_scopes get_config(category_name) scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes] category = _config_sections[category_name] diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 3b9d74dd6e..c414234386 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -30,7 +30,9 @@ import glob import imp import spack.config import re -from contextlib import closing +import itertools +import traceback +from external import yaml import llnl.util.tty as tty from llnl.util.filesystem import join_path @@ -44,7 +46,7 @@ from sets import Set from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name # Filename for package repo names -packagerepo_filename = 'reponame' +repo_config = 'repo.yaml' def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -56,56 +58,85 @@ def _autospec(function): return converter +def sliding_window(seq, n): + it = iter(seq) + result = tuple(itertools.islice(it, n)) + if len(result) == n: + yield result + for elem in it: + result = result[1:] + (elem,) + yield result + + class PackageDB(object): - def __init__(self, default_root): - """Construct a new package database from a root directory.""" - - #Collect the repos from the config file and read their names from the file system - repo_dirs = self._repo_list_from_config() - repo_dirs.append(default_root) - self.repos = [(self._read_reponame_from_directory(dir), dir) for dir in repo_dirs] - - # Check for duplicate repo names - s = set() - dups = set(r for r in self.repos if r[0] in s or s.add(r[0])) - if dups: - reponame = list(dups)[0][0] - dir1 = list(dups)[0][1] - dir2 = dict(s)[reponame] - tty.die("Package repo %s in directory %s has the same name as the " - "repo in directory %s" % - (reponame, dir1, dir2)) + def __init__(self, *repo_dirs): + """Construct a new package database from a list of directories. + + Args: + repo_dirs List of directories containing packages. + + If ``repo_dirs`` is empty, gets repository list from Spack configuration. + """ + if not repo_dirs: + repo_dirs = spack.config.get_repos_config() + if not repo_dirs: + tty.die("Spack configuration contains no package repositories.") + + # Collect the repos from the config file and read their names + # from the file system + repo_dirs = [spack.config.substitute_spack_prefix(rd) for rd in repo_dirs] + + self.repos = [] + for rdir in repo_dirs: + rname = self._read_reponame_from_directory(rdir) + if rname: + self.repos.append((self._read_reponame_from_directory(rdir), rdir)) + + + by_path = sorted(self.repos, key=lambda r:r[1]) + by_name = sorted(self.repos, key=lambda r:r[0]) + + for r1, r2 in by_path: + if r1[1] == r2[1]: + tty.die("Package repos are the same:", + " %20s %s" % r1, " %20s %s" % r2) + + for r1, r2 in by_name: + if r1[0] == r2[0]: + tty.die("Package repos cannot have the same name:", + " %20s %s" % r1, " %20s %s" % r2) # For each repo, create a RepoLoader - self.repo_loaders = dict([(r[0], RepoLoader(r[0], r[1])) for r in self.repos]) + self.repo_loaders = dict((name, RepoLoader(name, path)) + for name, path in self.repos) self.instances = {} self.provider_index = None def _read_reponame_from_directory(self, dir): - """For a packagerepo directory, read the repo name from the dir/reponame file""" - path = os.path.join(dir, packagerepo_filename) + """For a packagerepo directory, read the repo name from the + $root/repo.yaml file""" + path = os.path.join(dir, repo_config) try: - with closing(open(path, 'r')) as reponame_file: - name = reponame_file.read().lstrip().rstrip() - if not re.match(r'[a-zA-Z][a-zA-Z0-9]+', name): - tty.die("Package repo name '%s', read from %s, is an invalid name. " - "Repo names must began with a letter and only contain letters " - "and numbers." % (name, path)) + with open(path) as reponame_file: + yaml_data = yaml.load(reponame_file) + + if (not yaml_data or + 'repo' not in yaml_data or + 'namespace' not in yaml_data['repo']): + tty.die("Invalid %s in %s" % (repo_config, dir)) + + name = yaml_data['repo']['namespace'] + if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', name): + tty.die( + "Package repo name '%s', read from %s, is an invalid name. " + "Repo names must began with a letter and only contain " + "letters and numbers." % (name, path)) return name except exceptions.IOError, e: - tty.die("Could not read from package repo name file %s" % path) - - - - def _repo_list_from_config(self): - """Read through the spackconfig and return the list of packagerepo directories""" - config = spack.config.get_config() - if not config.has_option('packagerepo', 'directories'): return [] - dir_string = config.get('packagerepo', 'directories') - return dir_string.split(':') + tty.die("Error reading %s when opening %s" % (repo_config, dir)) @_autospec @@ -125,7 +156,7 @@ class PackageDB(object): except Exception, e: if spack.debug: sys.excepthook(*sys.exc_info()) - raise FailedConstructorError(spec.name, e) + raise FailedConstructorError(spec.name, *sys.exc_info()) return self.instances[spec] @@ -304,8 +335,10 @@ class UnknownPackageError(spack.error.SpackError): class FailedConstructorError(spack.error.SpackError): """Raised when a package's class constructor fails.""" - def __init__(self, name, reason): + def __init__(self, name, exc_type, exc_obj, exc_tb): super(FailedConstructorError, self).__init__( "Class constructor failed for package '%s'." % name, - str(reason)) + '\nCaused by:\n' + + ('%s: %s\n' % (exc_type.__name__, exc_obj)) + + ''.join(traceback.format_tb(exc_tb))) self.name = name diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py index 6eaa1eead2..92da1cf709 100644 --- a/lib/spack/spack/repo_loader.py +++ b/lib/spack/spack/repo_loader.py @@ -1,8 +1,10 @@ -import spack -import spack.repos import re +import sys import types +import traceback + from llnl.util.lang import * +import spack # Name of module under which packages are imported imported_packages_module = 'spack.repos' @@ -13,14 +15,30 @@ package_file_name = 'package.py' import sys class LazyLoader: """The LazyLoader handles cases when repo modules or classes - are imported. It watches for 'spack.repos.*' loads, then + are imported. It watches for 'spack.repos.*' loads, then redirects the load to the appropriate module.""" def find_module(self, fullname, pathname): if not fullname.startswith(imported_packages_module): return None + + print "HERE ===" + print + for line in traceback.format_stack(): + print " ", line.strip() + print + print "full: ", fullname + print "path: ", pathname + print + partial_name = fullname[len(imported_packages_module)+1:] - repo = partial_name.split('.')[0] - module = partial_name.split('.')[1] + + print "partial: ", partial_name + print + + last_dot = partial_name.rfind('.') + repo = partial_name[:last_dot] + module = partial_name[last_dot+1:] + repo_loader = spack.db.repo_loaders.get(repo) if repo_loader: try: @@ -43,7 +61,7 @@ class RepoNamespace(types.ModuleType): def __init__(self): import sys sys.modules[imported_packages_module] = self - + def __getattr__(self, name): if name in _reponames: return _reponames[name] @@ -62,7 +80,7 @@ class RepoLoader(types.ModuleType): """Each RepoLoader is associated with a repository, and the RepoLoader is responsible for loading packages out of that repository. For example, a RepoLoader may be responsible for spack.repos.original, and when someone - references spack.repos.original.libelf that RepoLoader will load the + references spack.repos.original.libelf that RepoLoader will load the libelf package.""" def __init__(self, reponame, repopath): self.path = repopath @@ -70,7 +88,6 @@ class RepoLoader(types.ModuleType): self.module_name = imported_packages_module + '.' + reponame if not reponame in _reponames: _reponames[reponame] = self - spack.repos.add_repo(reponame, self) import sys sys.modules[self.module_name] = self @@ -111,5 +128,3 @@ class RepoLoader(types.ModuleType): pkg_name, file_path, e.message)) return module - - diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 790b22f3b0..eed182a257 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -30,45 +30,85 @@ import spack from spack.packages import PackageDB from spack.test.mock_packages_test import * +# Some sample compiler config data +a_comps = { + "gcc@4.7.3" : { + "cc" : "/gcc473", + "cxx" : "/g++473", + "f77" : None, + "f90" : None }, + "gcc@4.5.0" : { + "cc" : "/gcc450", + "cxx" : "/g++450", + "f77" : "/gfortran", + "f90" : "/gfortran" }, + "clang@3.3" : { + "cc" : "", + "cxx" : "", + "f77" : "", + "f90" : "" } +} + +b_comps = { + "icc@10.0" : { + "cc" : "/icc100", + "cxx" : "/icc100", + "f77" : None, + "f90" : None }, + "icc@11.1" : { + "cc" : "/icc111", + "cxx" : "/icp111", + "f77" : "/ifort", + "f90" : "/ifort" }, + "clang@3.3" : { + "cc" : "/clang", + "cxx" : "/clang++", + "f77" : None, + "f90" : None} +} + class ConfigTest(MockPackagesTest): def setUp(self): - self.initmock() + super(ConfigTest, self).setUp() self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') - spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')), - ('test_high_priority', os.path.join(self.tmp_dir, 'high'))] + spack.config.config_scopes = [ + ('test_low_priority', os.path.join(self.tmp_dir, 'low')), + ('test_high_priority', os.path.join(self.tmp_dir, 'high'))] + def tearDown(self): - self.cleanmock() + super(ConfigTest, self).tearDown() shutil.rmtree(self.tmp_dir, True) - def check_config(self, comps): + + def check_config(self, comps, *compiler_names): + """Check that named compilers in comps match Spack's config.""" config = spack.config.get_compilers_config() compiler_list = ['cc', 'cxx', 'f77', 'f90'] - for key in comps: + for key in compiler_names: for c in compiler_list: - if comps[key][c] == '/bad': - continue self.assertEqual(comps[key][c], config[key][c]) - def test_write_key(self): - a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None }, - "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" }, - "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }} + def test_write_key_in_memory(self): + # Write b_comps "on top of" a_comps. + spack.config.add_to_compiler_config(a_comps, 'test_low_priority') + spack.config.add_to_compiler_config(b_comps, 'test_high_priority') + + # Make sure the config looks how we expect. + self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') + self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3') - b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None }, - "icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" }, - "clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}} + def test_write_key_to_disk(self): + # Write b_comps "on top of" a_comps. spack.config.add_to_compiler_config(a_comps, 'test_low_priority') spack.config.add_to_compiler_config(b_comps, 'test_high_priority') - self.check_config(a_comps) - self.check_config(b_comps) - + # Clear caches so we're forced to read from disk. spack.config.clear_config_caches() - self.check_config(a_comps) - self.check_config(b_comps) - + # Same check again, to ensure consistency. + self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') + self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3') diff --git a/var/spack/mock_packages/repo.yaml b/var/spack/mock_packages/repo.yaml new file mode 100644 index 0000000000..d065896006 --- /dev/null +++ b/var/spack/mock_packages/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: mock diff --git a/var/spack/packages/repo.yaml b/var/spack/packages/repo.yaml new file mode 100644 index 0000000000..4a371e1cad --- /dev/null +++ b/var/spack/packages/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: gov.llnl.spack diff --git a/var/spack/packages/reponame b/var/spack/packages/reponame deleted file mode 100644 index 4b48deed3a..0000000000 --- a/var/spack/packages/reponame +++ /dev/null @@ -1 +0,0 @@ -original -- cgit v1.2.3-70-g09d2 From 360b307f683e146151a65e1d788ce1d154c47ace Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 24 Aug 2015 09:14:16 -0700 Subject: Save progress. import gov.llnl.spack.mpich works. --- lib/spack/llnl/util/tty/colify.py | 7 + lib/spack/spack/__init__.py | 12 +- lib/spack/spack/cmd/repo.py | 17 +- lib/spack/spack/config.py | 4 +- lib/spack/spack/packages.py | 530 +++++++++++++++++++---------- lib/spack/spack/repo_loader.py | 22 +- lib/spack/spack/spec.py | 2 +- lib/spack/spack/test/directory_layout.py | 9 +- lib/spack/spack/test/mock_packages_test.py | 12 +- lib/spack/spack/test/package_sanity.py | 6 +- lib/spack/spack/test/packages.py | 6 +- lib/spack/spack/util/naming.py | 31 ++ var/spack/mock_packages/_repo.yaml | 2 + var/spack/mock_packages/repo.yaml | 2 - var/spack/packages/_repo.yaml | 2 + var/spack/packages/repo.yaml | 2 - 16 files changed, 435 insertions(+), 231 deletions(-) create mode 100644 var/spack/mock_packages/_repo.yaml delete mode 100644 var/spack/mock_packages/repo.yaml create mode 100644 var/spack/packages/_repo.yaml delete mode 100644 var/spack/packages/repo.yaml diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py index 66c52c3968..acf64c1e13 100644 --- a/lib/spack/llnl/util/tty/colify.py +++ b/lib/spack/llnl/util/tty/colify.py @@ -220,6 +220,13 @@ def colify(elts, **options): def colify_table(table, **options): + """Version of colify() for data expressed in rows, (list of lists). + + Same as regular colify but takes a list of lists, where each + sub-list must be the same length, and each is interpreted as a + row in a table. Regular colify displays a sequential list of + values in columns. + """ if table is None: raise TypeError("Can't call colify_table on NoneType") elif not table or not table[0]: diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 09bc9ca52a..71e3ac3715 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -23,8 +23,10 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import sys import tempfile from llnl.util.filesystem import * +import llnl.util.tty as tty # This lives in $prefix/lib/spack/spack/__file__ prefix = ancestor(__file__, 4) @@ -42,6 +44,7 @@ test_path = join_path(module_path, "test") hooks_path = join_path(module_path, "hooks") var_path = join_path(prefix, "var", "spack") stage_path = join_path(var_path, "stage") +packages_path = join_path(var_path, "packages") opt_path = join_path(prefix, "opt") install_path = join_path(opt_path, "spack") share_path = join_path(prefix, "share", "spack") @@ -55,9 +58,12 @@ repos = RepoNamespace() # # Set up the default packages database. # -from spack.packages import PackageDB -packages_path = join_path(var_path, "packages") -db = PackageDB() +import spack.packages +_repo_paths = spack.config.get_repos_config() +if not _repo_paths: + tty.die("Spack configuration contains no package repositories.") +db = spack.packages.PackageFinder(*_repo_paths) +sys.meta_path.append(db) # # Paths to mock files for testing. diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 1261c7ada9..e290f60b7b 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -32,7 +32,7 @@ from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path -from spack.packages import repo_config +from spack.packages import repo_config_filename import os import exceptions @@ -50,13 +50,8 @@ def setup_parser(subparser): create_parser = sp.add_parser('create', help=repo_create.__doc__) create_parser.add_argument('directory', help="Directory containing the packages.") create_parser.add_argument('name', help="Name of new package repository.") -<<<<<<< HEAD:lib/spack/spack/cmd/packagerepo.py - - remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__) -======= remove_parser = sp.add_parser('remove', help=repo_remove.__doc__) ->>>>>>> Save changes to external repo integration:lib/spack/spack/cmd/repo.py remove_parser.add_argument('name') list_parser = sp.add_parser('list', help=repo_list.__doc__) @@ -81,7 +76,7 @@ def repo_add(args): """Add package sources to the Spack configuration.""" if not add_to_config(args.directory): tty.die('Repo directory %s already exists in the repo list' % dir) - + def repo_create(args): """Create a new package repo at a directory and name""" @@ -95,13 +90,13 @@ def repo_create(args): mkdirp(dir) except exceptions.OSError, e: tty.die('Failed to create new directory %s' % dir) - path = os.path.join(dir, repo_config) + path = os.path.join(dir, repo_config_filename) try: with closing(open(path, 'w')) as repofile: repofile.write(name + '\n') except exceptions.IOError, e: tty.die('Could not create new file %s' % path) - + if not add_to_config(args.directory): tty.warn('Repo directory %s already exists in the repo list' % dir) @@ -118,8 +113,8 @@ def repo_list(args): fmt = "%%-%ds%%s" % (max_len + 4) for root in root_names: print fmt % (root[0], root[1]) - - + + def repo(parser, args): action = { 'add' : repo_add, diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index dc59f9a5a3..66da91f629 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -269,7 +269,9 @@ def get_repos_config(): config = get_config() if 'repos' not in config: return [] - return config['repos'] + + repo_list = config['repos'] + return [substitute_spack_prefix(repo) for repo in repo_list] def get_mirror_config(): diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index c414234386..df54b12324 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -28,7 +28,6 @@ import sys import inspect import glob import imp -import spack.config import re import itertools import traceback @@ -41,149 +40,327 @@ from llnl.util.lang import * import spack.error import spack.spec from spack.virtual import ProviderIndex -from spack.util.naming import mod_to_class, validate_module_name -from sets import Set -from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name +from spack.util.naming import * # Filename for package repo names -repo_config = 'repo.yaml' +repo_config_filename = '_repo.yaml' + +# Filename for packages in repos. +package_file_name = 'package.py' def _autospec(function): """Decorator that automatically converts the argument of a single-arg function to a Spec.""" - def converter(self, spec_like, **kwargs): + def converter(self, spec_like, *args, **kwargs): if not isinstance(spec_like, spack.spec.Spec): spec_like = spack.spec.Spec(spec_like) - return function(self, spec_like, **kwargs) + return function(self, spec_like, *args, **kwargs) return converter -def sliding_window(seq, n): - it = iter(seq) - result = tuple(itertools.islice(it, n)) - if len(result) == n: - yield result - for elem in it: - result = result[1:] + (elem,) - yield result +class NamespaceTrie(object): + def __init__(self): + self._elements = {} -class PackageDB(object): + def __setitem__(self, namespace, repo): + parts = namespace.split('.') + cur = self._elements + for p in parts[:-1]: + if p not in cur: + cur[p] = {} + cur = cur[p] + + cur[parts[-1]] = repo + + + def __getitem__(self, namespace): + parts = namespace.split('.') + cur = self._elements + for p in parts: + if p not in cur: + raise KeyError("Can't find namespace %s in trie" % namespace) + cur = cur[p] + return cur + + + def __contains__(self, namespace): + parts = namespace.split('.') + cur = self._elements + for p in parts: + if not isinstance(cur, dict): + return False + if p not in cur: + return False + cur = cur[p] + return True + + + +class PackageFinder(object): + """A PackageFinder is a wrapper around a list of PackageDBs. + + It functions exactly like a PackageDB, but it operates on the + combined results of the PackageDBs in its list instead of on a + single package repository. + """ def __init__(self, *repo_dirs): - """Construct a new package database from a list of directories. + self.repos = [] + self.by_namespace = NamespaceTrie() + self.by_path = {} + + for root in repo_dirs: + repo = PackageDB(root) + self.put_last(repo) + + + def _check_repo(self, repo): + if repo.root in self.by_path: + raise DuplicateRepoError("Package repos are the same", + repo, self.by_path[repo.root]) + + if repo.namespace in self.by_namespace: + tty.error("Package repos cannot have the same name", + repo, self.by_namespace[repo.namespace]) + + + def _add(self, repo): + self._check_repo(repo) + self.by_namespace[repo.namespace] = repo + self.by_path[repo.root] = repo + + + def put_first(self, repo): + self._add(repo) + self.repos.insert(0, repo) + + + def put_last(self, repo): + self._add(repo) + self.repos.append(repo) + - Args: - repo_dirs List of directories containing packages. + def remove(self, repo): + if repo in self.repos: + self.repos.remove(repo) - If ``repo_dirs`` is empty, gets repository list from Spack configuration. + + def swap(self, other): + repos = self.repos + by_namespace = self.by_namespace + by_path = self.by_path + + self.repos = other.repos + self.by_namespace = other.by_namespace + self.by_pah = other.by_path + + other.repos = repos + other.by_namespace = by_namespace + other.by_path = by_path + + + def all_package_names(self): + all_pkgs = set() + for repo in self.repos: + all_pkgs.update(set(repo.all_package_names())) + return all_pkgs + + + def all_packages(self): + for name in self.all_package_names(): + yield self.get(name) + + + def providers_for(self, vpkg_name): + # TODO: USE MORE THAN FIRST REPO + return self.repos[0].providers_for(vpkg_name) + + + def _get_spack_pkg_name(self, repo, py_module_name): + """Allow users to import Spack packages using legal Python identifiers. + + A python identifier might map to many different Spack package + names due to hyphen/underscore ambiguity. + + Easy example: + num3proxy -> 3proxy + + Ambiguous: + foo_bar -> foo_bar, foo-bar + + More ambiguous: + foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz """ - if not repo_dirs: - repo_dirs = spack.config.get_repos_config() - if not repo_dirs: - tty.die("Spack configuration contains no package repositories.") + if py_module_name in repo: + return py_module_name - # Collect the repos from the config file and read their names - # from the file system - repo_dirs = [spack.config.substitute_spack_prefix(rd) for rd in repo_dirs] + options = possible_spack_module_names(py_module_name) + options.remove(py_module_name) + for name in options: + if name in repo: + return name - self.repos = [] - for rdir in repo_dirs: - rname = self._read_reponame_from_directory(rdir) - if rname: - self.repos.append((self._read_reponame_from_directory(rdir), rdir)) + return None - by_path = sorted(self.repos, key=lambda r:r[1]) - by_name = sorted(self.repos, key=lambda r:r[0]) + def find_module(self, fullname, path=None): + if fullname in self.by_namespace: + return self - for r1, r2 in by_path: - if r1[1] == r2[1]: - tty.die("Package repos are the same:", - " %20s %s" % r1, " %20s %s" % r2) + namespace, dot, module_name = fullname.rpartition('.') + if namespace not in self.by_namespace: + return None - for r1, r2 in by_name: - if r1[0] == r2[0]: - tty.die("Package repos cannot have the same name:", - " %20s %s" % r1, " %20s %s" % r2) + repo = self.by_namespace[namespace] + name = self._get_spack_pkg_name(repo, module_name) + if not name: + return None - # For each repo, create a RepoLoader - self.repo_loaders = dict((name, RepoLoader(name, path)) - for name, path in self.repos) + return self - self.instances = {} - self.provider_index = None + def load_module(self, fullname): + if fullname in sys.modules: + return sys.modules[fullname] - def _read_reponame_from_directory(self, dir): - """For a packagerepo directory, read the repo name from the - $root/repo.yaml file""" - path = os.path.join(dir, repo_config) + if fullname in self.by_namespace: + ns = self.by_namespace[fullname] + module = imp.new_module(fullname) + module.__file__ = "" + module.__path__ = [] + module.__package__ = fullname + else: + namespace, dot, module_name = fullname.rpartition('.') + if namespace not in self.by_namespace: + raise ImportError( + "No Spack repository with namespace %s" % namespace) + + repo = self.by_namespace[namespace] + name = self._get_spack_pkg_name(repo, module_name) + if not name: + raise ImportError( + "No module %s in Spack repository %s" % (module_name, repo)) + + fullname = namespace + '.' + name + file_path = os.path.join(repo.root, name, package_file_name) + module = imp.load_source(fullname, file_path) + module.__package__ = namespace + + module.__loader__ = self + sys.modules[fullname] = module + return module + + + @_autospec + def get(self, spec, new=False): + for repo in self.repos: + if spec.name in repo: + return repo.get(spec, new) + raise UnknownPackageError(spec.name) + + + def get_repo(self, namespace): + if namespace in self.by_namespace: + repo = self.by_namespace[namespace] + if isinstance(repo, PackageDB): + return repo + return None + + + def exists(self, pkg_name): + return any(repo.exists(pkg_name) for repo in self.repos) + + + def __contains__(self, pkg_name): + return self.exists(pkg_name) + + + +class PackageDB(object): + """Class representing a package repository in the filesystem. + + Each package repository must have a top-level configuration file + called `_repo.yaml`. + + Currently, `_repo.yaml` this must define: + + `namespace`: + A Python namespace where the repository's packages should live. + + """ + def __init__(self, root): + """Instantiate a package repository from a filesystem path.""" + # Root directory, containing _repo.yaml and package dirs + self.root = root + + # Config file in /_repo.yaml + self.config_file = os.path.join(self.root, repo_config_filename) + + # Read configuration from _repo.yaml + config = self._read_config() + if not 'namespace' in config: + tty.die('Package repo in %s must define a namespace in %s.' + % (self.root, repo_config_filename)) + + # Check namespace in the repository configuration. + self.namespace = config['namespace'] + if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace): + tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be " + "valid python identifiers separated by '.'") + % (self.namespace, self.root)) + + # These are internal cache variables. + self._instances = {} + self._provider_index = None + + + def _read_config(self): + """Check for a YAML config file in this db's root directory.""" try: - with open(path) as reponame_file: + with open(self.config_file) as reponame_file: yaml_data = yaml.load(reponame_file) - if (not yaml_data or - 'repo' not in yaml_data or - 'namespace' not in yaml_data['repo']): - tty.die("Invalid %s in %s" % (repo_config, dir)) - - name = yaml_data['repo']['namespace'] - if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', name): - tty.die( - "Package repo name '%s', read from %s, is an invalid name. " - "Repo names must began with a letter and only contain " - "letters and numbers." % (name, path)) - return name + if (not yaml_data or 'repo' not in yaml_data or + not isinstance(yaml_data['repo'], dict)): + tty.die("Invalid %s in repository %s" + % (repo_config_filename, self.root)) + + return yaml_data['repo'] + except exceptions.IOError, e: - tty.die("Error reading %s when opening %s" % (repo_config, dir)) + tty.die("Error reading %s when opening %s" + % (self.config_file, self.root)) @_autospec - def get(self, spec, **kwargs): + def get(self, spec, new=False): if spec.virtual: raise UnknownPackageError(spec.name) - if kwargs.get('new', False): - if spec in self.instances: - del self.instances[spec] + if new: + if spec in self._instances: + del self._instances[spec] - if not spec in self.instances: + if not spec in self._instances: package_class = self.get_class_for_package_name(spec.name, spec.repo) try: copy = spec.copy() - self.instances[copy] = package_class(copy) + self._instances[copy] = package_class(copy) except Exception, e: if spack.debug: sys.excepthook(*sys.exc_info()) raise FailedConstructorError(spec.name, *sys.exc_info()) - return self.instances[spec] - - - @_autospec - def delete(self, spec): - """Force a package to be recreated.""" - del self.instances[spec] - - - def purge(self): - """Clear entire package instance cache.""" - self.instances.clear() - - - @_autospec - def get_installed(self, spec): - """Get all the installed specs that satisfy the provided spec constraint.""" - return [s for s in self.installed_package_specs() if s.satisfies(spec)] + return self._instances[spec] @_autospec def providers_for(self, vpkg_spec): - if self.provider_index is None: - self.provider_index = ProviderIndex(self.all_package_names()) + if self._provider_index is None: + self._provider_index = ProviderIndex(self.all_package_names()) - providers = self.provider_index.providers_for(vpkg_spec) + providers = self._provider_index.providers_for(vpkg_spec) if not providers: raise UnknownPackageError(vpkg_spec.name) return providers @@ -194,46 +371,13 @@ class PackageDB(object): return [p for p in self.all_packages() if p.extends(extendee_spec)] - @_autospec - def installed_extensions_for(self, extendee_spec): - for s in self.installed_package_specs(): - try: - if s.package.extends(extendee_spec): - yield s.package - except UnknownPackageError, e: - # Skip packages we know nothing about - continue - # TODO: add some conditional way to do this instead of - # catching exceptions. - - - def repo_for_package_name(self, pkg_name, packagerepo_name=None): - """Find the dirname for a package and the packagerepo it came from - if packagerepo_name is not None, then search for the package in the - specified packagerepo""" - #Look for an existing package under any matching packagerepos - roots = [pkgrepo for pkgrepo in self.repos - if not packagerepo_name or packagerepo_name == pkgrepo[0]] - - if not roots: - tty.die("Package repo %s does not exist" % packagerepo_name) - - for pkgrepo in roots: - path = join_path(pkgrepo[1], pkg_name) - if os.path.exists(path): - return (pkgrepo[0], path) - - repo_to_add_to = roots[-1] - return (repo_to_add_to[0], join_path(repo_to_add_to[1], pkg_name)) - - - def dirname_for_package_name(self, pkg_name, packagerepo_name=None): + def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return self.repo_for_package_name(pkg_name, packagerepo_name)[1] + return join_path(self.root, pkg_name) - def filename_for_package_name(self, pkg_name, packagerepo_name=None): + def filename_for_package_name(self, pkg_name): """Get the filename for the module we should load for a particular package. Packages for a pacakge DB live in ``$root//package.py`` @@ -241,57 +385,25 @@ class PackageDB(object): This will return a proper package.py path even if the package doesn't exist yet, so callers will need to ensure the package exists before importing. - - If a packagerepo is specified, then return existing - or new paths in the specified packagerepo directory. If no - package repo is supplied, return an existing path from any - package repo, and new paths in the default package repo. """ validate_module_name(pkg_name) - pkg_dir = self.dirname_for_package_name(pkg_name, packagerepo_name) + pkg_dir = self.dirname_for_package_name(pkg_name) return join_path(pkg_dir, package_file_name) - def installed_package_specs(self): - """Read installed package names straight from the install directory - layout. - """ - # Get specs from the directory layout but ensure that they're - # all normalized properly. - installed = [] - for spec in spack.install_layout.all_specs(): - spec.normalize() - installed.append(spec) - return installed - - - def installed_known_package_specs(self): - """Read installed package names straight from the install - directory layout, but return only specs for which the - package is known to this version of spack. - """ - for spec in spack.install_layout.all_specs(): - if self.exists(spec.name): - yield spec - - @memoized def all_package_names(self): """Generator function for all packages. This looks for ``/package.py`` files within the repo direcotories""" - all_packages = Set() - for repo in self.repos: - dir = repo[1] - if not os.path.isdir(dir): - continue - for pkg_name in os.listdir(dir): - pkg_dir = join_path(dir, pkg_name) - pkg_file = join_path(pkg_dir, package_file_name) - if os.path.isfile(pkg_file): - all_packages.add(pkg_name) - all_package_names = list(all_packages) - all_package_names.sort() - return all_package_names + all_package_names = [] + + for pkg_name in os.listdir(self.root): + pkg_dir = join_path(self.root, pkg_name) + pkg_file = join_path(pkg_dir, package_file_name) + if os.path.isfile(pkg_file): + all_package_names.append(pkg_name) + + return sorted(all_package_names) def all_packages(self): @@ -301,19 +413,25 @@ class PackageDB(object): @memoized def exists(self, pkg_name): - """Whether a package with the supplied name exists .""" + """Whether a package with the supplied name exists.""" return os.path.exists(self.filename_for_package_name(pkg_name)) @memoized def get_class_for_package_name(self, pkg_name, reponame = None): """Get an instance of the class for a particular package.""" - (reponame, repodir) = self.repo_for_package_name(pkg_name, reponame) - module_name = imported_packages_module + '.' + reponame + '.' + pkg_name + file_path = self.filename_for_package_name(pkg_name) - module = self.repo_loaders[reponame].get_module(pkg_name) + if os.path.exists(file_path): + if not os.path.isfile(file_path): + tty.die("Something's wrong. '%s' is not a file!" % file_path) + if not os.access(file_path, os.R_OK): + tty.die("Cannot read '%s'!" % file_path) + else: + raise UnknownPackageError(pkg_name, self.namespace) class_name = mod_to_class(pkg_name) + module = __import__(self.namespace + '.' + pkg_name, fromlist=[class_name]) cls = getattr(module, class_name) if not inspect.isclass(cls): tty.die("%s.%s is not a class" % (pkg_name, class_name)) @@ -321,6 +439,63 @@ class PackageDB(object): return cls + def __str__(self): + return "" % (self.namespace, self.root) + + + def __repr__(self): + return self.__str__() + + + def __contains__(self, pkg_name): + return self.exists(pkg_name) + + + # + # Below functions deal with installed packages, and should be + # moved to some other part of Spack (conbine with + # directory_layout?) + # + @_autospec + def get_installed(self, spec): + """Get all the installed specs that satisfy the provided spec constraint.""" + return [s for s in self.installed_package_specs() if s.satisfies(spec)] + + + @_autospec + def installed_extensions_for(self, extendee_spec): + for s in self.installed_package_specs(): + try: + if s.package.extends(extendee_spec): + yield s.package + except UnknownPackageError, e: + # Skip packages we know nothing about + continue + + + def installed_package_specs(self): + """Read installed package names straight from the install directory + layout. + """ + # Get specs from the directory layout but ensure that they're + # all normalized properly. + installed = [] + for spec in spack.install_layout.all_specs(): + spec.normalize() + installed.append(spec) + return installed + + + def installed_known_package_specs(self): + """Read installed package names straight from the install + directory layout, but return only specs for which the + package is known to this version of spack. + """ + for spec in spack.install_layout.all_specs(): + if self.exists(spec.name): + yield spec + + class UnknownPackageError(spack.error.SpackError): """Raised when we encounter a package spack doesn't have.""" def __init__(self, name, repo=None): @@ -333,6 +508,13 @@ class UnknownPackageError(spack.error.SpackError): self.name = name +class DuplicateRepoError(spack.error.SpackError): + """Raised when duplicate repos are added to a PackageFinder.""" + def __init__(self, msg, repo1, repo2): + super(UnknownPackageError, self).__init__( + "%s: %s, %s" % (msg, repo1, repo2)) + + class FailedConstructorError(spack.error.SpackError): """Raised when a package's class constructor fails.""" def __init__(self, name, exc_type, exc_obj, exc_tb): diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py index 92da1cf709..441011cf98 100644 --- a/lib/spack/spack/repo_loader.py +++ b/lib/spack/spack/repo_loader.py @@ -12,7 +12,6 @@ imported_packages_module = 'spack.repos' # Name of the package file inside a package directory package_file_name = 'package.py' -import sys class LazyLoader: """The LazyLoader handles cases when repo modules or classes are imported. It watches for 'spack.repos.*' loads, then @@ -21,15 +20,6 @@ class LazyLoader: if not fullname.startswith(imported_packages_module): return None - print "HERE ===" - print - for line in traceback.format_stack(): - print " ", line.strip() - print - print "full: ", fullname - print "path: ", pathname - print - partial_name = fullname[len(imported_packages_module)+1:] print "partial: ", partial_name @@ -50,7 +40,7 @@ class LazyLoader: def load_module(self, fullname): return self.mod -sys.meta_path.append(LazyLoader()) +#sys.meta_path.append(LazyLoader()) _reponames = {} class RepoNamespace(types.ModuleType): @@ -59,7 +49,6 @@ class RepoNamespace(types.ModuleType): this class will use __getattr__ to translate the 'original' into one of spack's known repositories""" def __init__(self): - import sys sys.modules[imported_packages_module] = self def __getattr__(self, name): @@ -89,7 +78,6 @@ class RepoLoader(types.ModuleType): if not reponame in _reponames: _reponames[reponame] = self - import sys sys.modules[self.module_name] = self @@ -110,14 +98,6 @@ class RepoLoader(types.ModuleType): import imp import llnl.util.tty as tty - file_path = os.path.join(self.path, pkg_name, package_file_name) - if os.path.exists(file_path): - if not os.path.isfile(file_path): - tty.die("Something's wrong. '%s' is not a file!" % file_path) - if not os.access(file_path, os.R_OK): - tty.die("Cannot read '%s'!" % file_path) - else: - raise spack.packages.UnknownPackageError(pkg_name, self.reponame if self.reponame != 'original' else None) try: module_name = imported_packages_module + '.' + self.reponame + '.' + pkg_name diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 972ba9ccbb..1666457502 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1714,7 +1714,7 @@ class SpecParser(spack.parse.Parser): spec_repo = lst[-2] else: spec_name = self.token.value - (spec_repo, repodir) = spack.db.repo_for_package_name(spec_name) + spec_repo = 'gov.llnl.spack' self.check_identifier(spec_name) diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index b3ad8efec4..55b3f0b18f 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -34,7 +34,7 @@ from llnl.util.filesystem import * import spack from spack.spec import Spec -from spack.packages import PackageDB +from spack.packages import PackageFinder from spack.directory_layout import YamlDirectoryLayout # number of packages to test (to reduce test time) @@ -123,7 +123,7 @@ class DirectoryLayoutTest(unittest.TestCase): information about installed packages' specs to uninstall or query them again if the package goes away. """ - mock_db = PackageDB(spack.mock_packages_path) + mock_db = PackageFinder(spack.mock_packages_path) not_in_mock = set.difference( set(spack.db.all_package_names()), @@ -145,8 +145,7 @@ class DirectoryLayoutTest(unittest.TestCase): self.layout.create_install_directory(spec) installed_specs[spec] = self.layout.path_for_spec(spec) - tmp = spack.db - spack.db = mock_db + spack.db.swap(mock_db) # Now check that even without the package files, we know # enough to read a spec from the spec file. @@ -161,7 +160,7 @@ class DirectoryLayoutTest(unittest.TestCase): self.assertTrue(spec.eq_dag(spec_from_file)) self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash()) - spack.db = tmp + spack.db.swap(mock_db) def test_find(self): diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 00f81114af..1f46d65790 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -22,11 +22,12 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import sys import unittest import spack import spack.config -from spack.packages import PackageDB +from spack.packages import PackageFinder from spack.spec import Spec @@ -43,8 +44,8 @@ class MockPackagesTest(unittest.TestCase): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with # real ones. - self.real_db = spack.db - spack.db = PackageDB(spack.mock_packages_path) + self.db = PackageFinder(spack.mock_packages_path) + spack.db.swap(self.db) spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes @@ -55,7 +56,8 @@ class MockPackagesTest(unittest.TestCase): def cleanmock(self): """Restore the real packages path after any test.""" - spack.db = self.real_db + spack.db.swap(self.db) + spack.config.config_scopes = self.real_scopes spack.config.clear_config_caches() @@ -66,5 +68,3 @@ class MockPackagesTest(unittest.TestCase): def tearDown(self): self.cleanmock() - - diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 6222e7b5f8..70b5d6a478 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -47,10 +47,10 @@ class PackageSanityTest(unittest.TestCase): def test_get_all_mock_packages(self): """Get the mock packages once each too.""" - tmp = spack.db - spack.db = PackageDB(spack.mock_packages_path) + db = PackageFinder(spack.mock_packages_path) + spack.db.swap(db) self.check_db() - spack.db = tmp + spack.db.swap(db) def test_url_versions(self): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index a8183cf6a6..42bd91ec5c 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -44,7 +44,8 @@ class PackagesTest(MockPackagesTest): def test_package_filename(self): - filename = spack.db.filename_for_package_name('mpich') + repo = spack.db.get_repo('gov.llnl.spack.mock') + filename = repo.filename_for_package_name('mpich') self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py')) @@ -54,7 +55,8 @@ class PackagesTest(MockPackagesTest): def test_nonexisting_package_filename(self): - filename = spack.db.filename_for_package_name('some-nonexisting-package') + repo = spack.db.get_repo('gov.llnl.spack.mock') + filename = repo.filename_for_package_name('some-nonexisting-package') self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py')) diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index 782afbd4bb..a7b6e2b436 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -1,10 +1,14 @@ # Need this because of spack.util.string from __future__ import absolute_import import string +import itertools import re import spack +__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name', + 'validate_module_name', 'possible_spack_module_names'] + # Valid module names can contain '-' but can't start with it. _valid_module_re = r'^\w[\w-]*$' @@ -42,6 +46,33 @@ def mod_to_class(mod_name): return class_name +def spack_module_to_python_module(mod_name): + """Given a Spack module name, returns the name by which it can be + imported in Python. + """ + if re.match(r'[0-9]', mod_name): + mod_name = 'num' + mod_name + + return mod_name.replace('-', '_') + + +def possible_spack_module_names(python_mod_name): + """Given a Python module name, return a list of all possible spack module + names that could correspond to it.""" + mod_name = re.sub(r'^num(\d)', r'\1', python_mod_name) + + parts = re.split(r'(_)', mod_name) + options = [['_', '-']] * mod_name.count('_') + + results = [] + for subs in itertools.product(*options): + s = list(parts) + s[1::2] = subs + results.append(''.join(s)) + + return results + + def valid_module_name(mod_name): """Return whether the mod_name is valid for use in Spack.""" return bool(re.match(_valid_module_re, mod_name)) diff --git a/var/spack/mock_packages/_repo.yaml b/var/spack/mock_packages/_repo.yaml new file mode 100644 index 0000000000..b97b978de3 --- /dev/null +++ b/var/spack/mock_packages/_repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: gov.llnl.spack.mock diff --git a/var/spack/mock_packages/repo.yaml b/var/spack/mock_packages/repo.yaml deleted file mode 100644 index d065896006..0000000000 --- a/var/spack/mock_packages/repo.yaml +++ /dev/null @@ -1,2 +0,0 @@ -repo: - namespace: mock diff --git a/var/spack/packages/_repo.yaml b/var/spack/packages/_repo.yaml new file mode 100644 index 0000000000..4a371e1cad --- /dev/null +++ b/var/spack/packages/_repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: gov.llnl.spack diff --git a/var/spack/packages/repo.yaml b/var/spack/packages/repo.yaml deleted file mode 100644 index 4a371e1cad..0000000000 --- a/var/spack/packages/repo.yaml +++ /dev/null @@ -1,2 +0,0 @@ -repo: - namespace: gov.llnl.spack -- cgit v1.2.3-70-g09d2 From 6dff42be0973c5949d836eab51cfdffb0eda0a69 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 17 Sep 2015 00:21:33 -0700 Subject: WIP for Matt's branch --- bin/spack | 4 ++-- lib/spack/spack/directives.py | 4 ++-- lib/spack/spack/package.py | 6 +++++ lib/spack/spack/packages.py | 18 ++++++++++----- lib/spack/spack/patch.py | 6 ++++- lib/spack/spack/spec.py | 21 ++++++------------ lib/spack/spack/test/mock_packages_test.py | 35 ++++++++++++++++++++++-------- lib/spack/spack/test/package_sanity.py | 2 +- lib/spack/spack/test/spec_dag.py | 12 +++++----- 9 files changed, 68 insertions(+), 40 deletions(-) diff --git a/bin/spack b/bin/spack index 5c042edd2d..baf08d1481 100755 --- a/bin/spack +++ b/bin/spack @@ -113,8 +113,8 @@ def main(): spack.spack_working_dir = working_dir if args.mock: - from spack.packages import PackageDB - spack.db = PackageDB(spack.mock_packages_path) + from spack.packages import PackageFinder + spack.db.swap(PackageFinder(spack.mock_packages_path)) # If the user asked for it, don't check ssl certs. if args.insecure: diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 9297d6dac3..aa2cfd2846 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -240,11 +240,11 @@ def patch(pkg, url_or_filename, level=1, when=None): when_spec = parse_anonymous_spec(when, pkg.name) if when_spec not in pkg.patches: - pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)] + pkg.patches[when_spec] = [Patch(pkg, pkg.name, url_or_filename, level)] else: # if this spec is identical to some other, then append this # patch to the existing list. - pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level)) + pkg.patches[when_spec].append(Patch(pkg, pkg.name, url_or_filename, level)) @directive('variants') diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 3507807373..090349685b 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -712,6 +712,12 @@ class Package(object): tty.msg("Patched %s" % self.name) + @property + def namespace(self): + namespace, dot, module = self.__module__.rpartition('.') + return namespace + + def do_fake_install(self): """Make a fake install directory contaiing a 'fake' file in bin.""" mkdirp(self.prefix.bin) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index df54b12324..b21316ebf7 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -172,7 +172,7 @@ class PackageFinder(object): def providers_for(self, vpkg_name): - # TODO: USE MORE THAN FIRST REPO + # TODO: THIS IS WRONG; shoudl use more than first repo return self.repos[0].providers_for(vpkg_name) @@ -252,14 +252,22 @@ class PackageFinder(object): return module - @_autospec - def get(self, spec, new=False): + def _find_repo_for_spec(self, spec): + """Find a repo that contains the supplied spec's package. + + Raises UnknownPackageErrorfor if not found. + """ for repo in self.repos: if spec.name in repo: - return repo.get(spec, new) + return repo raise UnknownPackageError(spec.name) + @_autospec + def get(self, spec, new=False): + return self._find_repo_for_spec(spec).get(spec, new) + + def get_repo(self, namespace): if namespace in self.by_namespace: repo = self.by_namespace[namespace] @@ -343,7 +351,7 @@ class PackageDB(object): del self._instances[spec] if not spec in self._instances: - package_class = self.get_class_for_package_name(spec.name, spec.repo) + package_class = self.get_class_for_package_name(spec.name, spec.namespace) try: copy = spec.copy() self._instances[copy] = package_class(copy) diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index b1b6e07738..e89cf11b2f 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -41,7 +41,11 @@ class Patch(object): """This class describes a patch to be applied to some expanded source code.""" - def __init__(self, pkg_name, path_or_url, level): + def __init__(self, pkg, pkg_name, path_or_url, level): + print pkg, pkg.name, type(pkg) + print "pkg:", dir(pkg.__module__) + print "NAMESPACE", pkg.namespace() + self.pkg_name = pkg_name self.path_or_url = path_or_url self.path = None diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 1666457502..0d49b1fa95 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -112,7 +112,6 @@ from spack.version import * from spack.util.string import * from spack.util.prefix import Prefix from spack.virtual import ProviderIndex -from spack.repo_loader import imported_packages_module # Valid pattern for an identifier in Spack identifier_re = r'\w[\w-]*' @@ -413,7 +412,7 @@ class Spec(object): self.dependencies = other.dependencies self.variants = other.variants self.variants.spec = self - self.repo = other.repo + self.namespace = other.namespace # Specs are by default not assumed to be normal, but in some # cases we've read them from a file want to assume normal. @@ -1357,7 +1356,7 @@ class Spec(object): self.dependencies = DependencyMap() self.variants = other.variants.copy() self.variants.spec = self - self.repo = other.repo + self.namespace = other.namespace # If we copy dependencies, preserve DAG structure in the new spec if kwargs.get('deps', True): @@ -1555,7 +1554,7 @@ class Spec(object): if c == '_': out.write(fmt % self.name) elif c == '.': - longname = '%s.%s.%s' % (imported_packages_module, self.repo, self.name) if self.repo else self.name + longname = '%s.%s.%s' % (self.namespace, self.name) if self.namespace else self.name out.write(fmt % longname) elif c == '@': if self.versions and self.versions != _any_version: @@ -1706,15 +1705,9 @@ class SpecParser(spack.parse.Parser): """Parse a spec out of the input. If a spec is supplied, then initialize and return it instead of creating a new one.""" - spec_name = None - spec_repo = None - if self.token.value.startswith(imported_packages_module): - lst = self.token.value.split('.') - spec_name = lst[-1] - spec_repo = lst[-2] - else: - spec_name = self.token.value - spec_repo = 'gov.llnl.spack' + spec_namespace, dot, spec_name = self.token.value.rpartition('.') + if not spec_namespace: + spec_namespace = None self.check_identifier(spec_name) @@ -1727,7 +1720,7 @@ class SpecParser(spack.parse.Parser): spec.compiler = None spec.dependents = DependencyMap() spec.dependencies = DependencyMap() - spec.repo = spec_repo + spec.namespace = spec_namespace spec._normal = False spec._concrete = False diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 1f46d65790..071c21b7e0 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -31,14 +31,6 @@ from spack.packages import PackageFinder from spack.spec import Spec -def set_pkg_dep(pkg, spec): - """Alters dependence information for a package. - Use this to mock up constraints. - """ - spec = Spec(spec) - spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec } - - class MockPackagesTest(unittest.TestCase): def initmock(self): # Use the mock packages database for these tests. This allows @@ -53,14 +45,39 @@ class MockPackagesTest(unittest.TestCase): ('site', spack.mock_site_config), ('user', spack.mock_user_config)] + # Store changes to the package's dependencies so we can + # restore later. + self.saved_deps = {} + + + def set_pkg_dep(self, pkg_name, spec): + """Alters dependence information for a package. + + Adds a dependency on to pkg. + Use this to mock up constraints. + """ + spec = Spec(spec) + + # Save original dependencies before making any changes. + pkg = spack.db.get(pkg_name) + if pkg_name not in self.saved_deps: + self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy()) + + # Change dep spec + pkg.dependencies[spec.name] = { Spec(pkg_name) : spec } + def cleanmock(self): """Restore the real packages path after any test.""" spack.db.swap(self.db) - spack.config.config_scopes = self.real_scopes spack.config.clear_config_caches() + # Restore dependency changes that happened during the test + for pkg_name, (pkg, deps) in self.saved_deps.items(): + pkg.dependencies.clear() + pkg.dependencies.update(deps) + def setUp(self): self.initmock() diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 70b5d6a478..a5925ea066 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -29,7 +29,7 @@ import unittest import spack import spack.url as url -from spack.packages import PackageDB +from spack.packages import PackageFinder class PackageSanityTest(unittest.TestCase): diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 549f829d3e..a71026d183 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -40,8 +40,8 @@ from spack.test.mock_packages_test import * class SpecDagTest(MockPackagesTest): def test_conflicting_package_constraints(self): - set_pkg_dep('mpileaks', 'mpich@1.0') - set_pkg_dep('callpath', 'mpich@2.0') + self.set_pkg_dep('mpileaks', 'mpich@1.0') + self.set_pkg_dep('callpath', 'mpich@2.0') spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf') @@ -223,25 +223,25 @@ class SpecDagTest(MockPackagesTest): def test_unsatisfiable_version(self): - set_pkg_dep('mpileaks', 'mpich@1.0') + self.set_pkg_dep('mpileaks', 'mpich@1.0') spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf') self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize) def test_unsatisfiable_compiler(self): - set_pkg_dep('mpileaks', 'mpich%gcc') + self.set_pkg_dep('mpileaks', 'mpich%gcc') spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf') self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) def test_unsatisfiable_compiler_version(self): - set_pkg_dep('mpileaks', 'mpich%gcc@4.6') + self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6') spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf') self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize) def test_unsatisfiable_architecture(self): - set_pkg_dep('mpileaks', 'mpich=bgqos_0') + self.set_pkg_dep('mpileaks', 'mpich=bgqos_0') spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf') self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize) -- cgit v1.2.3-70-g09d2 From 72c9604bcb9f0f28d0e7d63be9500b99960d118b Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 12 Nov 2015 15:17:09 -0800 Subject: Small fix to prevent this test from interfering with others. --- lib/spack/spack/test/unit_install.py | 39 ++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index c4b9092f05..7168272997 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -26,16 +26,16 @@ import unittest import itertools import spack -test_install = __import__("spack.cmd.test-install", +test_install = __import__("spack.cmd.test-install", fromlist=["BuildId", "create_test_output", "TestResult"]) class MockOutput(object): def __init__(self): self.results = {} - + def add_test(self, buildId, passed=True, buildInfo=None): self.results[buildId] = passed - + def write_to(self, stream): pass @@ -45,14 +45,14 @@ class MockSpec(object): self.name = name self.version = version self.hash = hashStr if hashStr else hash((name, version)) - + def traverse(self, order=None): - allDeps = itertools.chain.from_iterable(i.traverse() for i in + allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues()) return set(itertools.chain([self], allDeps)) - + def dag_hash(self): - return self.hash + return self.hash def to_yaml(self): return "<<>>".format(test_install.BuildId(self).stringId()) @@ -75,47 +75,52 @@ class UnitInstallTest(unittest.TestCase): def setUp(self): super(UnitInstallTest, self).setUp() - + pkgX.installed = False pkgY.installed = False + self.saved_db = spack.db pkgDb = MockPackageDb({specX:pkgX, specY:pkgY}) spack.db = pkgDb + def tearDown(self): super(UnitInstallTest, self).tearDown() - + + spack.db = self.saved_db + def test_installing_both(self): mo = MockOutput() - + pkgX.installed = True pkgY.installed = True test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=test_fetch_log) - - self.assertEqual(mo.results, - {bIdX:test_install.TestResult.PASSED, + + self.assertEqual(mo.results, + {bIdX:test_install.TestResult.PASSED, bIdY:test_install.TestResult.PASSED}) + def test_dependency_already_installed(self): mo = MockOutput() - + pkgX.installed = True pkgY.installed = True test_install.create_test_output(specX, [specX], mo, getLogFunc=test_fetch_log) - + self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED}) #TODO: add test(s) where Y fails to install + class MockPackageDb(object): def __init__(self, init=None): self.specToPkg = {} if init: self.specToPkg.update(init) - + def get(self, spec): return self.specToPkg[spec] def test_fetch_log(path): return [] - -- cgit v1.2.3-70-g09d2 From 38fdd063d9bc90aff7d934ab2bd2f02df71d9138 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 12 Nov 2015 15:17:39 -0800 Subject: Fix and move NamespaceTrie to spack.util.naming - fix up routines in namespace trie. - trie can now hold intermediate elements. - trie now has a test case. --- lib/spack/spack/packages.py | 41 +---------------- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/namespace_trie.py | 83 ++++++++++++++++++++++++++++++++++ lib/spack/spack/util/naming.py | 69 +++++++++++++++++++++++++++- 4 files changed, 154 insertions(+), 42 deletions(-) create mode 100644 lib/spack/spack/test/namespace_trie.py diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 6005523bc0..3a74ad6790 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -58,45 +58,6 @@ def _autospec(function): return converter -class NamespaceTrie(object): - def __init__(self): - self._elements = {} - - - def __setitem__(self, namespace, repo): - parts = namespace.split('.') - cur = self._elements - for p in parts[:-1]: - if p not in cur: - cur[p] = {} - cur = cur[p] - - cur[parts[-1]] = repo - - - def __getitem__(self, namespace): - parts = namespace.split('.') - cur = self._elements - for p in parts: - if p not in cur: - raise KeyError("Can't find namespace %s in trie" % namespace) - cur = cur[p] - return cur - - - def __contains__(self, namespace): - parts = namespace.split('.') - cur = self._elements - for p in parts: - if not isinstance(cur, dict): - return False - if p not in cur: - return False - cur = cur[p] - return True - - - class PackageFinder(object): """A PackageFinder is a wrapper around a list of PackageDBs. @@ -172,7 +133,7 @@ class PackageFinder(object): def providers_for(self, vpkg_name): - # TODO: THIS IS WRONG; shoudl use more than first repo + # TODO: THIS IS WRONG; should use more than first repo return self.repos[0].providers_for(vpkg_name) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 0f776bfea4..620d1fd362 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -59,7 +59,8 @@ test_names = ['versions', 'configure_guess', 'unit_install', 'lock', - 'database'] + 'database', + 'namespace_trie'] def list_tests(): diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py new file mode 100644 index 0000000000..191abbe9e6 --- /dev/null +++ b/lib/spack/spack/test/namespace_trie.py @@ -0,0 +1,83 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import unittest +from spack.util.naming import NamespaceTrie + + +class NamespaceTrieTest(unittest.TestCase): + + def setUp(self): + self.trie = NamespaceTrie() + + + def test_add_single(self): + self.trie['foo'] = 'bar' + self.assertEqual(self.trie['foo'], 'bar') + self.assertTrue('foo' in self.trie) + + + def test_add_multiple(self): + self.trie['foo.bar'] = 'baz' + self.assertEqual(self.trie['foo.bar'], 'baz') + + self.assertFalse('foo' in self.trie) + self.assertFalse('foo.bar.baz' in self.trie) + self.assertTrue('foo.bar' in self.trie) + + + def test_add_three(self): + # add a three-level namespace + self.trie['foo.bar.baz'] = 'quux' + self.assertEqual(self.trie['foo.bar.baz'], 'quux') + + self.assertFalse('foo' in self.trie) + self.assertFalse('foo.bar' in self.trie) + self.assertTrue('foo.bar.baz' in self.trie) + + # Try to add a second element in a higher space + self.trie['foo.bar'] = 'blah' + + self.assertFalse('foo' in self.trie) + + self.assertTrue('foo.bar' in self.trie) + self.assertEqual(self.trie['foo.bar'], 'blah') + + self.assertTrue('foo.bar.baz' in self.trie) + self.assertEqual(self.trie['foo.bar.baz'], 'quux') + + + def test_add_none_single(self): + self.trie['foo'] = None + self.assertEqual(self.trie['foo'], None) + self.assertTrue('foo' in self.trie) + + + def test_add_none_multiple(self): + self.trie['foo.bar'] = None + self.assertEqual(self.trie['foo.bar'], None) + + self.assertFalse('foo' in self.trie) + self.assertFalse('foo.bar.baz' in self.trie) + self.assertTrue('foo.bar' in self.trie) diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index a7b6e2b436..475062bb38 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -3,11 +3,12 @@ from __future__ import absolute_import import string import itertools import re +from StringIO import StringIO import spack __all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name', - 'validate_module_name', 'possible_spack_module_names'] + 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie'] # Valid module names can contain '-' but can't start with it. _valid_module_re = r'^\w[\w-]*$' @@ -90,3 +91,69 @@ class InvalidModuleNameError(spack.error.SpackError): super(InvalidModuleNameError, self).__init__( "Invalid module name: " + name) self.name = name + + +class NamespaceTrie(object): + class Element(object): + def __init__(self, value): + self.value = value + + + def __init__(self, separator='.'): + self._subspaces = {} + self._value = None + self._sep = separator + + + def __setitem__(self, namespace, value): + first, sep, rest = namespace.partition(self._sep) + + if not first: + self._value = NamespaceTrie.Element(value) + return + + if first not in self._subspaces: + self._subspaces[first] = NamespaceTrie() + + self._subspaces[first][rest] = value + + + def _get_helper(self, namespace, full_name): + first, sep, rest = namespace.partition(self._sep) + if not first: + if not self._value: + raise KeyError("Can't find namespace '%s' in trie" % full_name) + return self._value.value + elif first not in self._subspaces: + raise KeyError("Can't find namespace '%s' in trie" % full_name) + else: + return self._subspaces[first]._get_helper(rest, full_name) + + + def __getitem__(self, namespace): + return self._get_helper(namespace, namespace) + + + def __contains__(self, namespace): + first, sep, rest = namespace.partition(self._sep) + if not first: + return self._value is not None + elif first not in self._subspaces: + return False + else: + return rest in self._subspaces[first] + + + def _str_helper(self, stream, level=0): + indent = (level * ' ') + for name in sorted(self._subspaces): + stream.write(indent + name + '\n') + if self._value: + stream.write(indent + ' ' + repr(self._value.value)) + stream.write(self._subspaces[name]._str_helper(stream, level+1)) + + + def __str__(self): + stream = StringIO() + self._str_helper(stream) + return stream.getvalue() -- cgit v1.2.3-70-g09d2 From 8c06b92225699fdd85185688b88d559a54c8a794 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 22 Nov 2015 20:28:59 -0800 Subject: Working PackageFinder / Repo --- .mailmap | 2 + lib/spack/spack/__init__.py | 6 - lib/spack/spack/packages.py | 375 ++++++++++++++++++++----------- lib/spack/spack/patch.py | 2 +- lib/spack/spack/test/config.py | 1 - lib/spack/spack/test/namespace_trie.py | 61 +++-- lib/spack/spack/test/package_sanity.py | 5 +- lib/spack/spack/test/packages.py | 6 +- lib/spack/spack/test/url_substitution.py | 1 - lib/spack/spack/util/naming.py | 32 ++- 10 files changed, 331 insertions(+), 160 deletions(-) diff --git a/.mailmap b/.mailmap index 1cc13c1eb1..1b99da32b5 100644 --- a/.mailmap +++ b/.mailmap @@ -9,3 +9,5 @@ Saravan Pantham Saravan Pantham Tom Scogland Tom Scogland Tom Scogland Joachim Protze jprotze +Gregory L. Lee Gregory L. Lee +Gregory L. Lee Gregory Lee diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index eccec12d3b..549d0a9a0f 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -51,12 +51,6 @@ opt_path = join_path(prefix, "opt") install_path = join_path(opt_path, "spack") etc_path = join_path(prefix, "etc") -# -# Setup the spack.repos namespace -# -from spack.repo_loader import RepoNamespace -repos = RepoNamespace() - # # Set up the default packages database. # diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index 3a74ad6790..8114b7f1aa 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -31,6 +31,7 @@ import imp import re import itertools import traceback +from bisect import bisect_left from external import yaml import llnl.util.tty as tty @@ -58,11 +59,19 @@ def _autospec(function): return converter +def _make_namespace_module(ns): + module = imp.new_module(ns) + module.__file__ = "(spack namespace)" + module.__path__ = [] + module.__package__ = ns + return module + + class PackageFinder(object): - """A PackageFinder is a wrapper around a list of PackageDBs. + """A PackageFinder is a wrapper around a list of Repos. - It functions exactly like a PackageDB, but it operates on the - combined results of the PackageDBs in its list instead of on a + It functions exactly like a Repo, but it operates on the + combined results of the Repos in its list instead of on a single package repository. """ def __init__(self, *repo_dirs): @@ -70,61 +79,74 @@ class PackageFinder(object): self.by_namespace = NamespaceTrie() self.by_path = {} + self._all_package_names = [] + self._provider_index = None + for root in repo_dirs: - repo = PackageDB(root) + repo = Repo(root) self.put_last(repo) - def _check_repo(self, repo): + def swap(self, other): + """Convenience function to make swapping repostiories easier. + + This is currently used by mock tests. + TODO: Maybe there is a cleaner way. + + """ + attrs = ['repos', 'by_namespace', 'by_path', '_all_package_names', '_provider_index'] + for attr in attrs: + tmp = getattr(self, attr) + setattr(self, attr, getattr(other, attr)) + setattr(other, attr, tmp) + + + def _add(self, repo): + """Add a repository to the namespace and path indexes. + + Checks for duplicates -- two repos can't have the same root + directory, and they provide have the same namespace. + + """ if repo.root in self.by_path: raise DuplicateRepoError("Package repos are the same", repo, self.by_path[repo.root]) if repo.namespace in self.by_namespace: - tty.error("Package repos cannot have the same name", - repo, self.by_namespace[repo.namespace]) + raise DuplicateRepoError("Package repos cannot have the same name", + repo, self.by_namespace[repo.namespace]) - - def _add(self, repo): - self._check_repo(repo) + # Add repo to the pkg indexes self.by_namespace[repo.namespace] = repo self.by_path[repo.root] = repo + # add names to the cached name list + new_pkgs = set(repo.all_package_names()) + new_pkgs.update(set(self._all_package_names)) + self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower()) + def put_first(self, repo): + """Add repo first in the search path.""" self._add(repo) self.repos.insert(0, repo) def put_last(self, repo): + """Add repo last in the search path.""" self._add(repo) self.repos.append(repo) def remove(self, repo): + """Remove a repo from the search path.""" if repo in self.repos: self.repos.remove(repo) - def swap(self, other): - repos = self.repos - by_namespace = self.by_namespace - by_path = self.by_path - - self.repos = other.repos - self.by_namespace = other.by_namespace - self.by_pah = other.by_path - - other.repos = repos - other.by_namespace = by_namespace - other.by_path = by_path - - def all_package_names(self): - all_pkgs = set() - for repo in self.repos: - all_pkgs.update(set(repo.all_package_names())) - return all_pkgs + """Return all unique package names in all repositories.""" + return self._all_package_names def all_packages(self): @@ -132,109 +154,82 @@ class PackageFinder(object): yield self.get(name) - def providers_for(self, vpkg_name): - # TODO: THIS IS WRONG; should use more than first repo - return self.repos[0].providers_for(vpkg_name) - + @_autospec + def providers_for(self, vpkg_spec): + if self._provider_index is None: + self._provider_index = ProviderIndex(self.all_package_names()) - def _get_spack_pkg_name(self, repo, py_module_name): - """Allow users to import Spack packages using legal Python identifiers. + providers = self._provider_index.providers_for(vpkg_spec) + if not providers: + raise UnknownPackageError(vpkg_spec.name) + return providers - A python identifier might map to many different Spack package - names due to hyphen/underscore ambiguity. - Easy example: - num3proxy -> 3proxy + def find_module(self, fullname, path=None): + """Implements precedence for overlaid namespaces. - Ambiguous: - foo_bar -> foo_bar, foo-bar + Loop checks each namespace in self.repos for packages, and + also handles loading empty containing namespaces. - More ambiguous: - foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz """ - if py_module_name in repo: - return py_module_name - - options = possible_spack_module_names(py_module_name) - options.remove(py_module_name) - for name in options: - if name in repo: - return name - - return None + # namespaces are added to repo, and package modules are leaves. + namespace, dot, module_name = fullname.rpartition('.') + # If it's a module in some repo, or if it is the repo's + # namespace, let the repo handle it. + for repo in self.repos: + if namespace == repo.namespace: + if repo.real_name(module_name): + return repo + elif fullname == repo.namespace: + return repo - def find_module(self, fullname, path=None): - if fullname in self.by_namespace: + # No repo provides the namespace, but it is a valid prefix of + # something in the PackageFinder. + if self.by_namespace.is_prefix(fullname): return self - namespace, dot, module_name = fullname.rpartition('.') - if namespace not in self.by_namespace: - return None - - repo = self.by_namespace[namespace] - name = self._get_spack_pkg_name(repo, module_name) - if not name: - return None - - return self + return None def load_module(self, fullname): + """Loads containing namespaces when necessary. + + See ``Repo`` for how actual package modules are loaded. + """ if fullname in sys.modules: return sys.modules[fullname] - if fullname in self.by_namespace: - ns = self.by_namespace[fullname] - module = imp.new_module(fullname) - module.__file__ = "" - module.__path__ = [] - module.__package__ = fullname + # partition fullname into prefix and module name. + namespace, dot, module_name = fullname.rpartition('.') - else: - namespace, dot, module_name = fullname.rpartition('.') - if namespace not in self.by_namespace: - raise ImportError( - "No Spack repository with namespace %s" % namespace) - - repo = self.by_namespace[namespace] - name = self._get_spack_pkg_name(repo, module_name) - if not name: - raise ImportError( - "No module %s in Spack repository %s" % (module_name, repo)) - - fullname = namespace + '.' + name - file_path = os.path.join(repo.root, name, package_file_name) - module = imp.load_source(fullname, file_path) - module.__package__ = namespace + if not self.by_namespace.is_prefix(fullname): + raise ImportError("No such Spack repo: %s" % fullname) + module = _make_namespace_module(namespace) module.__loader__ = self sys.modules[fullname] = module return module - def _find_repo_for_spec(self, spec): - """Find a repo that contains the supplied spec's package. - - Raises UnknownPackageErrorfor if not found. - """ + def repo_for_pkg(self, pkg_name): for repo in self.repos: - if spec.name in repo: + if pkg_name in repo: return repo - raise UnknownPackageError(spec.name) + raise UnknownPackageError(pkg_name) @_autospec def get(self, spec, new=False): - return self._find_repo_for_spec(spec).get(spec, new) + """Find a repo that contains the supplied spec's package. + Raises UnknownPackageError if not found. + """ + return self.repo_for_pkg(spec.name).get(spec) - def get_repo(self, namespace): - if namespace in self.by_namespace: - repo = self.by_namespace[namespace] - if isinstance(repo, PackageDB): - return repo - return None + + def dirname_for_package_name(self, pkg_name): + return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name) def exists(self, pkg_name): @@ -246,7 +241,7 @@ class PackageFinder(object): -class PackageDB(object): +class Repo(object): """Class representing a package repository in the filesystem. Each package repository must have a top-level configuration file @@ -278,10 +273,107 @@ class PackageDB(object): tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be " "valid python identifiers separated by '.'") % (self.namespace, self.root)) + self._names = self.namespace.split('.') # These are internal cache variables. + self._modules = {} + self._classes = {} self._instances = {} + self._provider_index = None + self._all_package_names = None + + # make sure the namespace for packages in this repo exists. + self._create_namespace() + + + def _create_namespace(self): + """Create this repo's namespace module and insert it into sys.modules. + + Ensures that modules loaded via the repo have a home, and that + we don't get runtime warnings from Python's module system. + + """ + for l in range(1, len(self._names)+1): + ns = '.'.join(self._names[:l]) + if not ns in sys.modules: + sys.modules[ns] = _make_namespace_module(ns) + sys.modules[ns].__loader__ = self + + + def real_name(self, import_name): + """Allow users to import Spack packages using Python identifiers. + + A python identifier might map to many different Spack package + names due to hyphen/underscore ambiguity. + + Easy example: + num3proxy -> 3proxy + + Ambiguous: + foo_bar -> foo_bar, foo-bar + + More ambiguous: + foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz + """ + if import_name in self: + return import_name + + options = possible_spack_module_names(import_name) + options.remove(import_name) + for name in options: + if name in self: + return name + return None + + + def is_prefix(self, fullname): + """True if fullname is a prefix of this Repo's namespace.""" + parts = fullname.split('.') + return self._names[:len(parts)] == parts + + + def find_module(self, fullname, path=None): + """Python find_module import hook. + + Returns this Repo if it can load the module; None if not. + """ + if self.is_prefix(fullname): + return self + + namespace, dot, module_name = fullname.rpartition('.') + if namespace == self.namespace: + if self.real_name(module_name): + return self + + return None + + + def load_module(self, fullname): + """Python importer load hook. + + Tries to load the module; raises an ImportError if it can't. + """ + if fullname in sys.modules: + return sys.modules[fullname] + + namespace, dot, module_name = fullname.rpartition('.') + + if self.is_prefix(fullname): + module = _make_namespace_module(fullname) + + elif namespace == self.namespace: + real_name = self.real_name(module_name) + if not real_name: + raise ImportError("No module %s in repo %s" % (module_name, namespace)) + module = self._get_pkg_module(real_name) + + else: + raise ImportError("No module %s in repo %s" % (fullname, self.namespace)) + + module.__loader__ = self + sys.modules[fullname] = module + return module def _read_config(self): @@ -307,15 +399,14 @@ class PackageDB(object): if spec.virtual: raise UnknownPackageError(spec.name) - if new: - if spec in self._instances: - del self._instances[spec] + if new and spec in self._instances: + del self._instances[spec] if not spec in self._instances: - package_class = self.get_class_for_package_name(spec.name, spec.namespace) + PackageClass = self._get_pkg_class(spec.name) try: copy = spec.copy() - self._instances[copy] = package_class(copy) + self._instances[copy] = PackageClass(copy) except Exception, e: if spack.debug: sys.excepthook(*sys.exc_info()) @@ -353,7 +444,7 @@ class PackageDB(object): def filename_for_package_name(self, pkg_name): """Get the filename for the module we should load for a particular - package. Packages for a pacakge DB live in + package. Packages for a Repo live in ``$root//package.py`` This will return a proper package.py path even if the @@ -366,19 +457,20 @@ class PackageDB(object): return join_path(pkg_dir, package_file_name) - @memoized def all_package_names(self): - """Generator function for all packages. This looks for - ``/package.py`` files within the repo direcotories""" - all_package_names = [] + """Returns a sorted list of all package names in the Repo.""" + if self._all_package_names is None: + self._all_package_names = [] + + for pkg_name in os.listdir(self.root): + pkg_dir = join_path(self.root, pkg_name) + pkg_file = join_path(pkg_dir, package_file_name) + if os.path.isfile(pkg_file): + self._all_package_names.append(pkg_name) - for pkg_name in os.listdir(self.root): - pkg_dir = join_path(self.root, pkg_name) - pkg_file = join_path(pkg_dir, package_file_name) - if os.path.isfile(pkg_file): - all_package_names.append(pkg_name) + self._all_package_names.sort() - return sorted(all_package_names) + return self._all_package_names def all_packages(self): @@ -386,27 +478,54 @@ class PackageDB(object): yield self.get(name) - @memoized def exists(self, pkg_name): """Whether a package with the supplied name exists.""" - return os.path.exists(self.filename_for_package_name(pkg_name)) + # This does a binary search in the sorted list. + idx = bisect_left(self.all_package_names(), pkg_name) + return self._all_package_names[idx] == pkg_name - @memoized - def get_class_for_package_name(self, pkg_name, reponame = None): - """Get an instance of the class for a particular package.""" - file_path = self.filename_for_package_name(pkg_name) + def _get_pkg_module(self, pkg_name): + """Create a module for a particular package. + + This caches the module within this Repo *instance*. It does + *not* add it to ``sys.modules``. So, you can construct + multiple Repos for testing and ensure that the module will be + loaded once per repo. + + """ + if pkg_name not in self._modules: + file_path = self.filename_for_package_name(pkg_name) + + if not os.path.exists(file_path): + raise UnknownPackageError(pkg_name, self.namespace) - if os.path.exists(file_path): if not os.path.isfile(file_path): tty.die("Something's wrong. '%s' is not a file!" % file_path) + if not os.access(file_path, os.R_OK): tty.die("Cannot read '%s'!" % file_path) - else: - raise UnknownPackageError(pkg_name, self.namespace) + fullname = "%s.%s" % (self.namespace, pkg_name) + + module = imp.load_source(fullname, file_path) + module.__package__ = self.namespace + module.__loader__ = self + self._modules[pkg_name] = module + + return self._modules[pkg_name] + + + def _get_pkg_class(self, pkg_name): + """Get the class for the package out of its module. + + First loads (or fetches from cache) a module for the + package. Then extracts the package class from the module + according to Spack's naming convention. + """ class_name = mod_to_class(pkg_name) - module = __import__(self.namespace + '.' + pkg_name, fromlist=[class_name]) + module = self._get_pkg_module(pkg_name) + cls = getattr(module, class_name) if not inspect.isclass(cls): tty.die("%s.%s is not a class" % (pkg_name, class_name)) @@ -415,7 +534,7 @@ class PackageDB(object): def __str__(self): - return "" % (self.namespace, self.root) + return "" % (self.namespace, self.root) def __repr__(self): diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index da5fa1646b..fe6e0a65a3 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -54,7 +54,7 @@ class Patch(object): if '://' in path_or_url: self.url = path_or_url else: - pkg_dir = spack.db.dirname_for_package_name(pkg_name) + pkg_dir = spack.db.dirname_for_package_name(self.pkg_name) self.path = join_path(pkg_dir, path_or_url) if not os.path.isfile(self.path): raise NoSuchPatchFileError(pkg_name, self.path) diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index eed182a257..b1195dfe4e 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -27,7 +27,6 @@ import shutil import os from tempfile import mkdtemp import spack -from spack.packages import PackageDB from spack.test.mock_packages_test import * # Some sample compiler config data diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py index 191abbe9e6..2fa13688e6 100644 --- a/lib/spack/spack/test/namespace_trie.py +++ b/lib/spack/spack/test/namespace_trie.py @@ -34,50 +34,81 @@ class NamespaceTrieTest(unittest.TestCase): def test_add_single(self): self.trie['foo'] = 'bar' + + self.assertTrue(self.trie.is_prefix('foo')) + self.assertTrue(self.trie.has_value('foo')) self.assertEqual(self.trie['foo'], 'bar') - self.assertTrue('foo' in self.trie) def test_add_multiple(self): self.trie['foo.bar'] = 'baz' + + self.assertFalse(self.trie.has_value('foo')) + self.assertTrue(self.trie.is_prefix('foo')) + + self.assertTrue(self.trie.is_prefix('foo.bar')) + self.assertTrue(self.trie.has_value('foo.bar')) self.assertEqual(self.trie['foo.bar'], 'baz') - self.assertFalse('foo' in self.trie) - self.assertFalse('foo.bar.baz' in self.trie) - self.assertTrue('foo.bar' in self.trie) + self.assertFalse(self.trie.is_prefix('foo.bar.baz')) + self.assertFalse(self.trie.has_value('foo.bar.baz')) def test_add_three(self): # add a three-level namespace self.trie['foo.bar.baz'] = 'quux' + + self.assertTrue(self.trie.is_prefix('foo')) + self.assertFalse(self.trie.has_value('foo')) + + self.assertTrue(self.trie.is_prefix('foo.bar')) + self.assertFalse(self.trie.has_value('foo.bar')) + + self.assertTrue(self.trie.is_prefix('foo.bar.baz')) + self.assertTrue(self.trie.has_value('foo.bar.baz')) self.assertEqual(self.trie['foo.bar.baz'], 'quux') - self.assertFalse('foo' in self.trie) - self.assertFalse('foo.bar' in self.trie) - self.assertTrue('foo.bar.baz' in self.trie) + self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux')) + self.assertFalse(self.trie.has_value('foo.bar.baz.quux')) - # Try to add a second element in a higher space + # Try to add a second element in a prefix namespace self.trie['foo.bar'] = 'blah' - self.assertFalse('foo' in self.trie) + self.assertTrue(self.trie.is_prefix('foo')) + self.assertFalse(self.trie.has_value('foo')) - self.assertTrue('foo.bar' in self.trie) + self.assertTrue(self.trie.is_prefix('foo.bar')) + self.assertTrue(self.trie.has_value('foo.bar')) self.assertEqual(self.trie['foo.bar'], 'blah') - self.assertTrue('foo.bar.baz' in self.trie) + self.assertTrue(self.trie.is_prefix('foo.bar.baz')) + self.assertTrue(self.trie.has_value('foo.bar.baz')) self.assertEqual(self.trie['foo.bar.baz'], 'quux') + self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux')) + self.assertFalse(self.trie.has_value('foo.bar.baz.quux')) + def test_add_none_single(self): self.trie['foo'] = None + self.assertTrue(self.trie.is_prefix('foo')) + self.assertTrue(self.trie.has_value('foo')) self.assertEqual(self.trie['foo'], None) - self.assertTrue('foo' in self.trie) + + self.assertFalse(self.trie.is_prefix('foo.bar')) + self.assertFalse(self.trie.has_value('foo.bar')) + def test_add_none_multiple(self): self.trie['foo.bar'] = None + + self.assertTrue(self.trie.is_prefix('foo')) + self.assertFalse(self.trie.has_value('foo')) + + self.assertTrue(self.trie.is_prefix('foo.bar')) + self.assertTrue(self.trie.has_value('foo.bar')) self.assertEqual(self.trie['foo.bar'], None) - self.assertFalse('foo' in self.trie) - self.assertFalse('foo.bar.baz' in self.trie) - self.assertTrue('foo.bar' in self.trie) + self.assertFalse(self.trie.is_prefix('foo.bar.baz')) + self.assertFalse(self.trie.has_value('foo.bar.baz')) diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index a5925ea066..a398568244 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -28,7 +28,6 @@ This test does sanity checks on Spack's builtin package database. import unittest import spack -import spack.url as url from spack.packages import PackageFinder @@ -45,7 +44,7 @@ class PackageSanityTest(unittest.TestCase): self.check_db() - def test_get_all_mock_packages(self): + def ztest_get_all_mock_packages(self): """Get the mock packages once each too.""" db = PackageFinder(spack.mock_packages_path) spack.db.swap(db) @@ -53,7 +52,7 @@ class PackageSanityTest(unittest.TestCase): spack.db.swap(db) - def test_url_versions(self): + def ztest_url_versions(self): """Check URLs for regular packages, if they are explicitly defined.""" for pkg in spack.db.all_packages(): for v, vattrs in pkg.versions.items(): diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 42bd91ec5c..2d19d9ddc7 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -27,7 +27,7 @@ import unittest from llnl.util.filesystem import join_path import spack -import spack.packages as packages +from spack.packages import Repo from spack.util.naming import mod_to_class from spack.test.mock_packages_test import * @@ -44,7 +44,7 @@ class PackagesTest(MockPackagesTest): def test_package_filename(self): - repo = spack.db.get_repo('gov.llnl.spack.mock') + repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('mpich') self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py')) @@ -55,7 +55,7 @@ class PackagesTest(MockPackagesTest): def test_nonexisting_package_filename(self): - repo = spack.db.get_repo('gov.llnl.spack.mock') + repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('some-nonexisting-package') self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py')) diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py index db7ddd251d..3ff76f63be 100644 --- a/lib/spack/spack/test/url_substitution.py +++ b/lib/spack/spack/test/url_substitution.py @@ -29,7 +29,6 @@ import unittest import spack import spack.url as url -from spack.packages import PackageDB class PackageSanityTest(unittest.TestCase): diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index 475062bb38..26ca86c77f 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -134,14 +134,42 @@ class NamespaceTrie(object): return self._get_helper(namespace, namespace) - def __contains__(self, namespace): + def is_prefix(self, namespace): + """True if the namespace has a value, or if it's the prefix of one that does.""" + first, sep, rest = namespace.partition(self._sep) + if not first: + return True + elif first not in self._subspaces: + return False + else: + return self._subspaces[first].is_prefix(rest) + + + def is_leaf(self, namespace): + """True if this namespace has no children in the trie.""" + first, sep, rest = namespace.partition(self._sep) + if not first: + return bool(self._subspaces) + elif first not in self._subspaces: + return False + else: + return self._subspaces[first].is_leaf(rest) + + + def has_value(self, namespace): + """True if there is a value set for the given namespace.""" first, sep, rest = namespace.partition(self._sep) if not first: return self._value is not None elif first not in self._subspaces: return False else: - return rest in self._subspaces[first] + return self._subspaces[first].has_value(rest) + + + def __contains__(self, namespace): + """Returns whether a value has been set for the namespace.""" + return self.has_value(namespace) def _str_helper(self, stream, level=0): -- cgit v1.2.3-70-g09d2 From e6d232bfefda069d876466faaa0c8e9e4a11b4f3 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 25 Nov 2015 09:58:10 -0800 Subject: Rename packages.py to repository.py, PackageFinder to RepoPath. --- lib/spack/spack/__init__.py | 4 +- lib/spack/spack/cmd/repo.py | 2 +- lib/spack/spack/cmd/uninstall.py | 4 +- lib/spack/spack/packages.py | 620 ---------------------------- lib/spack/spack/repo_loader.py | 110 ----- lib/spack/spack/repository.py | 627 +++++++++++++++++++++++++++++ lib/spack/spack/test/directory_layout.py | 4 +- lib/spack/spack/test/mock_packages_test.py | 4 +- lib/spack/spack/test/package_sanity.py | 4 +- lib/spack/spack/test/packages.py | 2 +- 10 files changed, 639 insertions(+), 742 deletions(-) delete mode 100644 lib/spack/spack/packages.py delete mode 100644 lib/spack/spack/repo_loader.py create mode 100644 lib/spack/spack/repository.py diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 549d0a9a0f..20ae8c9272 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -54,11 +54,11 @@ etc_path = join_path(prefix, "etc") # # Set up the default packages database. # -import spack.packages +import spack.repository _repo_paths = spack.config.get_repos_config() if not _repo_paths: tty.die("Spack configuration contains no package repositories.") -db = spack.packages.PackageFinder(*_repo_paths) +db = spack.repository.RepoPath(*_repo_paths) sys.meta_path.append(db) # diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index e290f60b7b..87f21b833d 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -32,7 +32,7 @@ from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path -from spack.packages import repo_config_filename +from spack.repository import repo_config_filename import os import exceptions diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index e80f2d2636..eba76ef71d 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -30,7 +30,7 @@ from llnl.util.tty.colify import colify import spack import spack.cmd -import spack.packages +import spack.repository from spack.cmd.find import display_specs from spack.package import PackageStillNeededError @@ -80,7 +80,7 @@ def uninstall(parser, args): # should work if package is known to spack pkgs.append(s.package) - except spack.packages.UnknownPackageError, e: + except spack.repository.UnknownPackageError, e: # The package.py file has gone away -- but still want to uninstall. spack.Package(s).do_uninstall(force=True) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py deleted file mode 100644 index 8114b7f1aa..0000000000 --- a/lib/spack/spack/packages.py +++ /dev/null @@ -1,620 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -import os -import exceptions -import sys -import inspect -import glob -import imp -import re -import itertools -import traceback -from bisect import bisect_left -from external import yaml - -import llnl.util.tty as tty -from llnl.util.filesystem import join_path -from llnl.util.lang import * - -import spack.error -import spack.spec -from spack.virtual import ProviderIndex -from spack.util.naming import * - -# Filename for package repo names -repo_config_filename = '_repo.yaml' - -# Filename for packages in repos. -package_file_name = 'package.py' - -def _autospec(function): - """Decorator that automatically converts the argument of a single-arg - function to a Spec.""" - def converter(self, spec_like, *args, **kwargs): - if not isinstance(spec_like, spack.spec.Spec): - spec_like = spack.spec.Spec(spec_like) - return function(self, spec_like, *args, **kwargs) - return converter - - -def _make_namespace_module(ns): - module = imp.new_module(ns) - module.__file__ = "(spack namespace)" - module.__path__ = [] - module.__package__ = ns - return module - - -class PackageFinder(object): - """A PackageFinder is a wrapper around a list of Repos. - - It functions exactly like a Repo, but it operates on the - combined results of the Repos in its list instead of on a - single package repository. - """ - def __init__(self, *repo_dirs): - self.repos = [] - self.by_namespace = NamespaceTrie() - self.by_path = {} - - self._all_package_names = [] - self._provider_index = None - - for root in repo_dirs: - repo = Repo(root) - self.put_last(repo) - - - def swap(self, other): - """Convenience function to make swapping repostiories easier. - - This is currently used by mock tests. - TODO: Maybe there is a cleaner way. - - """ - attrs = ['repos', 'by_namespace', 'by_path', '_all_package_names', '_provider_index'] - for attr in attrs: - tmp = getattr(self, attr) - setattr(self, attr, getattr(other, attr)) - setattr(other, attr, tmp) - - - def _add(self, repo): - """Add a repository to the namespace and path indexes. - - Checks for duplicates -- two repos can't have the same root - directory, and they provide have the same namespace. - - """ - if repo.root in self.by_path: - raise DuplicateRepoError("Package repos are the same", - repo, self.by_path[repo.root]) - - if repo.namespace in self.by_namespace: - raise DuplicateRepoError("Package repos cannot have the same name", - repo, self.by_namespace[repo.namespace]) - - # Add repo to the pkg indexes - self.by_namespace[repo.namespace] = repo - self.by_path[repo.root] = repo - - # add names to the cached name list - new_pkgs = set(repo.all_package_names()) - new_pkgs.update(set(self._all_package_names)) - self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower()) - - - def put_first(self, repo): - """Add repo first in the search path.""" - self._add(repo) - self.repos.insert(0, repo) - - - def put_last(self, repo): - """Add repo last in the search path.""" - self._add(repo) - self.repos.append(repo) - - - def remove(self, repo): - """Remove a repo from the search path.""" - if repo in self.repos: - self.repos.remove(repo) - - - def all_package_names(self): - """Return all unique package names in all repositories.""" - return self._all_package_names - - - def all_packages(self): - for name in self.all_package_names(): - yield self.get(name) - - - @_autospec - def providers_for(self, vpkg_spec): - if self._provider_index is None: - self._provider_index = ProviderIndex(self.all_package_names()) - - providers = self._provider_index.providers_for(vpkg_spec) - if not providers: - raise UnknownPackageError(vpkg_spec.name) - return providers - - - def find_module(self, fullname, path=None): - """Implements precedence for overlaid namespaces. - - Loop checks each namespace in self.repos for packages, and - also handles loading empty containing namespaces. - - """ - # namespaces are added to repo, and package modules are leaves. - namespace, dot, module_name = fullname.rpartition('.') - - # If it's a module in some repo, or if it is the repo's - # namespace, let the repo handle it. - for repo in self.repos: - if namespace == repo.namespace: - if repo.real_name(module_name): - return repo - elif fullname == repo.namespace: - return repo - - # No repo provides the namespace, but it is a valid prefix of - # something in the PackageFinder. - if self.by_namespace.is_prefix(fullname): - return self - - return None - - - def load_module(self, fullname): - """Loads containing namespaces when necessary. - - See ``Repo`` for how actual package modules are loaded. - """ - if fullname in sys.modules: - return sys.modules[fullname] - - # partition fullname into prefix and module name. - namespace, dot, module_name = fullname.rpartition('.') - - if not self.by_namespace.is_prefix(fullname): - raise ImportError("No such Spack repo: %s" % fullname) - - module = _make_namespace_module(namespace) - module.__loader__ = self - sys.modules[fullname] = module - return module - - - def repo_for_pkg(self, pkg_name): - for repo in self.repos: - if pkg_name in repo: - return repo - raise UnknownPackageError(pkg_name) - - - @_autospec - def get(self, spec, new=False): - """Find a repo that contains the supplied spec's package. - - Raises UnknownPackageError if not found. - """ - return self.repo_for_pkg(spec.name).get(spec) - - - def dirname_for_package_name(self, pkg_name): - return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name) - - - def exists(self, pkg_name): - return any(repo.exists(pkg_name) for repo in self.repos) - - - def __contains__(self, pkg_name): - return self.exists(pkg_name) - - - -class Repo(object): - """Class representing a package repository in the filesystem. - - Each package repository must have a top-level configuration file - called `_repo.yaml`. - - Currently, `_repo.yaml` this must define: - - `namespace`: - A Python namespace where the repository's packages should live. - - """ - def __init__(self, root): - """Instantiate a package repository from a filesystem path.""" - # Root directory, containing _repo.yaml and package dirs - self.root = root - - # Config file in /_repo.yaml - self.config_file = os.path.join(self.root, repo_config_filename) - - # Read configuration from _repo.yaml - config = self._read_config() - if not 'namespace' in config: - tty.die('Package repo in %s must define a namespace in %s.' - % (self.root, repo_config_filename)) - - # Check namespace in the repository configuration. - self.namespace = config['namespace'] - if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace): - tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be " - "valid python identifiers separated by '.'") - % (self.namespace, self.root)) - self._names = self.namespace.split('.') - - # These are internal cache variables. - self._modules = {} - self._classes = {} - self._instances = {} - - self._provider_index = None - self._all_package_names = None - - # make sure the namespace for packages in this repo exists. - self._create_namespace() - - - def _create_namespace(self): - """Create this repo's namespace module and insert it into sys.modules. - - Ensures that modules loaded via the repo have a home, and that - we don't get runtime warnings from Python's module system. - - """ - for l in range(1, len(self._names)+1): - ns = '.'.join(self._names[:l]) - if not ns in sys.modules: - sys.modules[ns] = _make_namespace_module(ns) - sys.modules[ns].__loader__ = self - - - def real_name(self, import_name): - """Allow users to import Spack packages using Python identifiers. - - A python identifier might map to many different Spack package - names due to hyphen/underscore ambiguity. - - Easy example: - num3proxy -> 3proxy - - Ambiguous: - foo_bar -> foo_bar, foo-bar - - More ambiguous: - foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz - """ - if import_name in self: - return import_name - - options = possible_spack_module_names(import_name) - options.remove(import_name) - for name in options: - if name in self: - return name - return None - - - def is_prefix(self, fullname): - """True if fullname is a prefix of this Repo's namespace.""" - parts = fullname.split('.') - return self._names[:len(parts)] == parts - - - def find_module(self, fullname, path=None): - """Python find_module import hook. - - Returns this Repo if it can load the module; None if not. - """ - if self.is_prefix(fullname): - return self - - namespace, dot, module_name = fullname.rpartition('.') - if namespace == self.namespace: - if self.real_name(module_name): - return self - - return None - - - def load_module(self, fullname): - """Python importer load hook. - - Tries to load the module; raises an ImportError if it can't. - """ - if fullname in sys.modules: - return sys.modules[fullname] - - namespace, dot, module_name = fullname.rpartition('.') - - if self.is_prefix(fullname): - module = _make_namespace_module(fullname) - - elif namespace == self.namespace: - real_name = self.real_name(module_name) - if not real_name: - raise ImportError("No module %s in repo %s" % (module_name, namespace)) - module = self._get_pkg_module(real_name) - - else: - raise ImportError("No module %s in repo %s" % (fullname, self.namespace)) - - module.__loader__ = self - sys.modules[fullname] = module - return module - - - def _read_config(self): - """Check for a YAML config file in this db's root directory.""" - try: - with open(self.config_file) as reponame_file: - yaml_data = yaml.load(reponame_file) - - if (not yaml_data or 'repo' not in yaml_data or - not isinstance(yaml_data['repo'], dict)): - tty.die("Invalid %s in repository %s" - % (repo_config_filename, self.root)) - - return yaml_data['repo'] - - except exceptions.IOError, e: - tty.die("Error reading %s when opening %s" - % (self.config_file, self.root)) - - - @_autospec - def get(self, spec, new=False): - if spec.virtual: - raise UnknownPackageError(spec.name) - - if new and spec in self._instances: - del self._instances[spec] - - if not spec in self._instances: - PackageClass = self._get_pkg_class(spec.name) - try: - copy = spec.copy() - self._instances[copy] = PackageClass(copy) - except Exception, e: - if spack.debug: - sys.excepthook(*sys.exc_info()) - raise FailedConstructorError(spec.name, *sys.exc_info()) - - return self._instances[spec] - - - def purge(self): - """Clear entire package instance cache.""" - self._instances.clear() - - - @_autospec - def providers_for(self, vpkg_spec): - if self._provider_index is None: - self._provider_index = ProviderIndex(self.all_package_names()) - - providers = self._provider_index.providers_for(vpkg_spec) - if not providers: - raise UnknownPackageError(vpkg_spec.name) - return providers - - - @_autospec - def extensions_for(self, extendee_spec): - return [p for p in self.all_packages() if p.extends(extendee_spec)] - - - def dirname_for_package_name(self, pkg_name): - """Get the directory name for a particular package. This is the - directory that contains its package.py file.""" - return join_path(self.root, pkg_name) - - - def filename_for_package_name(self, pkg_name): - """Get the filename for the module we should load for a particular - package. Packages for a Repo live in - ``$root//package.py`` - - This will return a proper package.py path even if the - package doesn't exist yet, so callers will need to ensure - the package exists before importing. - """ - validate_module_name(pkg_name) - pkg_dir = self.dirname_for_package_name(pkg_name) - - return join_path(pkg_dir, package_file_name) - - - def all_package_names(self): - """Returns a sorted list of all package names in the Repo.""" - if self._all_package_names is None: - self._all_package_names = [] - - for pkg_name in os.listdir(self.root): - pkg_dir = join_path(self.root, pkg_name) - pkg_file = join_path(pkg_dir, package_file_name) - if os.path.isfile(pkg_file): - self._all_package_names.append(pkg_name) - - self._all_package_names.sort() - - return self._all_package_names - - - def all_packages(self): - for name in self.all_package_names(): - yield self.get(name) - - - def exists(self, pkg_name): - """Whether a package with the supplied name exists.""" - # This does a binary search in the sorted list. - idx = bisect_left(self.all_package_names(), pkg_name) - return self._all_package_names[idx] == pkg_name - - - def _get_pkg_module(self, pkg_name): - """Create a module for a particular package. - - This caches the module within this Repo *instance*. It does - *not* add it to ``sys.modules``. So, you can construct - multiple Repos for testing and ensure that the module will be - loaded once per repo. - - """ - if pkg_name not in self._modules: - file_path = self.filename_for_package_name(pkg_name) - - if not os.path.exists(file_path): - raise UnknownPackageError(pkg_name, self.namespace) - - if not os.path.isfile(file_path): - tty.die("Something's wrong. '%s' is not a file!" % file_path) - - if not os.access(file_path, os.R_OK): - tty.die("Cannot read '%s'!" % file_path) - - fullname = "%s.%s" % (self.namespace, pkg_name) - - module = imp.load_source(fullname, file_path) - module.__package__ = self.namespace - module.__loader__ = self - self._modules[pkg_name] = module - - return self._modules[pkg_name] - - - def _get_pkg_class(self, pkg_name): - """Get the class for the package out of its module. - - First loads (or fetches from cache) a module for the - package. Then extracts the package class from the module - according to Spack's naming convention. - """ - class_name = mod_to_class(pkg_name) - module = self._get_pkg_module(pkg_name) - - cls = getattr(module, class_name) - if not inspect.isclass(cls): - tty.die("%s.%s is not a class" % (pkg_name, class_name)) - - return cls - - - def __str__(self): - return "" % (self.namespace, self.root) - - - def __repr__(self): - return self.__str__() - - - def __contains__(self, pkg_name): - return self.exists(pkg_name) - - - # - # Below functions deal with installed packages, and should be - # moved to some other part of Spack (conbine with - # directory_layout?) - # - @_autospec - def get_installed(self, spec): - """Get all the installed specs that satisfy the provided spec constraint.""" - return [s for s in self.installed_package_specs() if s.satisfies(spec)] - - - @_autospec - def installed_extensions_for(self, extendee_spec): - for s in self.installed_package_specs(): - try: - if s.package.extends(extendee_spec): - yield s.package - except UnknownPackageError, e: - # Skip packages we know nothing about - continue - - - def installed_package_specs(self): - """Read installed package names straight from the install directory - layout. - """ - # Get specs from the directory layout but ensure that they're - # all normalized properly. - installed = [] - for spec in spack.install_layout.all_specs(): - spec.normalize() - installed.append(spec) - return installed - - - def installed_known_package_specs(self): - """Read installed package names straight from the install - directory layout, but return only specs for which the - package is known to this version of spack. - """ - for spec in spack.install_layout.all_specs(): - if self.exists(spec.name): - yield spec - - -class UnknownPackageError(spack.error.SpackError): - """Raised when we encounter a package spack doesn't have.""" - def __init__(self, name, repo=None): - msg = None - if repo: - msg = "Package %s not found in packagerepo %s." % (name, repo) - else: - msg = "Package %s not found." % name - super(UnknownPackageError, self).__init__(msg) - self.name = name - - -class DuplicateRepoError(spack.error.SpackError): - """Raised when duplicate repos are added to a PackageFinder.""" - def __init__(self, msg, repo1, repo2): - super(UnknownPackageError, self).__init__( - "%s: %s, %s" % (msg, repo1, repo2)) - - -class FailedConstructorError(spack.error.SpackError): - """Raised when a package's class constructor fails.""" - def __init__(self, name, exc_type, exc_obj, exc_tb): - super(FailedConstructorError, self).__init__( - "Class constructor failed for package '%s'." % name, - '\nCaused by:\n' + - ('%s: %s\n' % (exc_type.__name__, exc_obj)) + - ''.join(traceback.format_tb(exc_tb))) - self.name = name diff --git a/lib/spack/spack/repo_loader.py b/lib/spack/spack/repo_loader.py deleted file mode 100644 index 441011cf98..0000000000 --- a/lib/spack/spack/repo_loader.py +++ /dev/null @@ -1,110 +0,0 @@ -import re -import sys -import types -import traceback - -from llnl.util.lang import * -import spack - -# Name of module under which packages are imported -imported_packages_module = 'spack.repos' - -# Name of the package file inside a package directory -package_file_name = 'package.py' - -class LazyLoader: - """The LazyLoader handles cases when repo modules or classes - are imported. It watches for 'spack.repos.*' loads, then - redirects the load to the appropriate module.""" - def find_module(self, fullname, pathname): - if not fullname.startswith(imported_packages_module): - return None - - partial_name = fullname[len(imported_packages_module)+1:] - - print "partial: ", partial_name - print - - last_dot = partial_name.rfind('.') - repo = partial_name[:last_dot] - module = partial_name[last_dot+1:] - - repo_loader = spack.db.repo_loaders.get(repo) - if repo_loader: - try: - self.mod = repo_loader.get_module(module) - return self - except (ImportError, spack.packages.UnknownPackageError): - return None - - def load_module(self, fullname): - return self.mod - -#sys.meta_path.append(LazyLoader()) - -_reponames = {} -class RepoNamespace(types.ModuleType): - """The RepoNamespace holds the repository namespaces under - spack.repos. For example, when accessing spack.repos.original - this class will use __getattr__ to translate the 'original' - into one of spack's known repositories""" - def __init__(self): - sys.modules[imported_packages_module] = self - - def __getattr__(self, name): - if name in _reponames: - return _reponames[name] - raise AttributeError - - @property - def __file__(self): - return None - - @property - def __path__(self): - return [] - - -class RepoLoader(types.ModuleType): - """Each RepoLoader is associated with a repository, and the RepoLoader is - responsible for loading packages out of that repository. For example, - a RepoLoader may be responsible for spack.repos.original, and when someone - references spack.repos.original.libelf that RepoLoader will load the - libelf package.""" - def __init__(self, reponame, repopath): - self.path = repopath - self.reponame = reponame - self.module_name = imported_packages_module + '.' + reponame - if not reponame in _reponames: - _reponames[reponame] = self - - sys.modules[self.module_name] = self - - - @property - def __path__(self): - return [ self.path ] - - - def __getattr__(self, name): - if name[0] == '_': - raise AttributeError - return self.get_module(name) - - - @memoized - def get_module(self, pkg_name): - import os - import imp - import llnl.util.tty as tty - - - try: - module_name = imported_packages_module + '.' + self.reponame + '.' + pkg_name - module = imp.load_source(module_name, file_path) - - except ImportError, e: - tty.die("Error while importing %s from %s:\n%s" % ( - pkg_name, file_path, e.message)) - - return module diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py new file mode 100644 index 0000000000..c1545b3654 --- /dev/null +++ b/lib/spack/spack/repository.py @@ -0,0 +1,627 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import exceptions +import sys +import inspect +import glob +import imp +import re +import itertools +import traceback +from bisect import bisect_left +from external import yaml + +import llnl.util.tty as tty +from llnl.util.filesystem import join_path +from llnl.util.lang import * + +import spack.error +import spack.spec +from spack.virtual import ProviderIndex +from spack.util.naming import * + +# Filename for package repo names +repo_config_filename = '_repo.yaml' + +# Filename for packages in repos. +package_file_name = 'package.py' + +def _autospec(function): + """Decorator that automatically converts the argument of a single-arg + function to a Spec.""" + def converter(self, spec_like, *args, **kwargs): + if not isinstance(spec_like, spack.spec.Spec): + spec_like = spack.spec.Spec(spec_like) + return function(self, spec_like, *args, **kwargs) + return converter + + +def _make_namespace_module(ns): + module = imp.new_module(ns) + module.__file__ = "(spack namespace)" + module.__path__ = [] + module.__package__ = ns + return module + + +class RepoPath(object): + """A RepoPath is a list of repos that function as one. + + It functions exactly like a Repo, but it operates on the + combined results of the Repos in its list instead of on a + single package repository. + """ + def __init__(self, *repo_dirs): + self.repos = [] + self.by_namespace = NamespaceTrie() + self.by_path = {} + + self._all_package_names = [] + self._provider_index = None + + for root in repo_dirs: + # Try to make it a repo if it's not one. + if not isinstance(root, Repo): + repo = Repo(root) + # Add the repo to the path. + self.put_last(repo) + + + def swap(self, other): + """Convenience function to make swapping repostiories easier. + + This is currently used by mock tests. + TODO: Maybe there is a cleaner way. + + """ + attrs = ['repos', + 'by_namespace', + 'by_path', + '_all_package_names', + '_provider_index'] + for attr in attrs: + tmp = getattr(self, attr) + setattr(self, attr, getattr(other, attr)) + setattr(other, attr, tmp) + + + def _add(self, repo): + """Add a repository to the namespace and path indexes. + + Checks for duplicates -- two repos can't have the same root + directory, and they provide have the same namespace. + + """ + if repo.root in self.by_path: + raise DuplicateRepoError("Package repos are the same", + repo, self.by_path[repo.root]) + + if repo.namespace in self.by_namespace: + raise DuplicateRepoError("Package repos cannot have the same name", + repo, self.by_namespace[repo.namespace]) + + # Add repo to the pkg indexes + self.by_namespace[repo.namespace] = repo + self.by_path[repo.root] = repo + + # add names to the cached name list + new_pkgs = set(repo.all_package_names()) + new_pkgs.update(set(self._all_package_names)) + self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower()) + + + def put_first(self, repo): + """Add repo first in the search path.""" + self._add(repo) + self.repos.insert(0, repo) + + + def put_last(self, repo): + """Add repo last in the search path.""" + self._add(repo) + self.repos.append(repo) + + + def remove(self, repo): + """Remove a repo from the search path.""" + if repo in self.repos: + self.repos.remove(repo) + + + def all_package_names(self): + """Return all unique package names in all repositories.""" + return self._all_package_names + + + def all_packages(self): + for name in self.all_package_names(): + yield self.get(name) + + + @_autospec + def providers_for(self, vpkg_spec): + if self._provider_index is None: + self._provider_index = ProviderIndex(self.all_package_names()) + + providers = self._provider_index.providers_for(vpkg_spec) + if not providers: + raise UnknownPackageError(vpkg_spec.name) + return providers + + + def find_module(self, fullname, path=None): + """Implements precedence for overlaid namespaces. + + Loop checks each namespace in self.repos for packages, and + also handles loading empty containing namespaces. + + """ + # namespaces are added to repo, and package modules are leaves. + namespace, dot, module_name = fullname.rpartition('.') + + # If it's a module in some repo, or if it is the repo's + # namespace, let the repo handle it. + for repo in self.repos: + if namespace == repo.namespace: + if repo.real_name(module_name): + return repo + elif fullname == repo.namespace: + return repo + + # No repo provides the namespace, but it is a valid prefix of + # something in the RepoPath. + if self.by_namespace.is_prefix(fullname): + return self + + return None + + + def load_module(self, fullname): + """Loads containing namespaces when necessary. + + See ``Repo`` for how actual package modules are loaded. + """ + if fullname in sys.modules: + return sys.modules[fullname] + + # partition fullname into prefix and module name. + namespace, dot, module_name = fullname.rpartition('.') + + if not self.by_namespace.is_prefix(fullname): + raise ImportError("No such Spack repo: %s" % fullname) + + module = _make_namespace_module(namespace) + module.__loader__ = self + sys.modules[fullname] = module + return module + + + def repo_for_pkg(self, pkg_name): + for repo in self.repos: + if pkg_name in repo: + return repo + raise UnknownPackageError(pkg_name) + + + @_autospec + def get(self, spec, new=False): + """Find a repo that contains the supplied spec's package. + + Raises UnknownPackageError if not found. + """ + return self.repo_for_pkg(spec.name).get(spec) + + + def dirname_for_package_name(self, pkg_name): + return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name) + + + def exists(self, pkg_name): + return any(repo.exists(pkg_name) for repo in self.repos) + + + def __contains__(self, pkg_name): + return self.exists(pkg_name) + + + +class Repo(object): + """Class representing a package repository in the filesystem. + + Each package repository must have a top-level configuration file + called `_repo.yaml`. + + Currently, `_repo.yaml` this must define: + + `namespace`: + A Python namespace where the repository's packages should live. + + """ + def __init__(self, root): + """Instantiate a package repository from a filesystem path.""" + # Root directory, containing _repo.yaml and package dirs + self.root = root + + # Config file in /_repo.yaml + self.config_file = os.path.join(self.root, repo_config_filename) + + # Read configuration from _repo.yaml + config = self._read_config() + if not 'namespace' in config: + tty.die('Package repo in %s must define a namespace in %s.' + % (self.root, repo_config_filename)) + + # Check namespace in the repository configuration. + self.namespace = config['namespace'] + if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace): + tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be " + "valid python identifiers separated by '.'") + % (self.namespace, self.root)) + self._names = self.namespace.split('.') + + # These are internal cache variables. + self._modules = {} + self._classes = {} + self._instances = {} + + self._provider_index = None + self._all_package_names = None + + # make sure the namespace for packages in this repo exists. + self._create_namespace() + + + def _create_namespace(self): + """Create this repo's namespace module and insert it into sys.modules. + + Ensures that modules loaded via the repo have a home, and that + we don't get runtime warnings from Python's module system. + + """ + for l in range(1, len(self._names)+1): + ns = '.'.join(self._names[:l]) + if not ns in sys.modules: + sys.modules[ns] = _make_namespace_module(ns) + sys.modules[ns].__loader__ = self + + + def real_name(self, import_name): + """Allow users to import Spack packages using Python identifiers. + + A python identifier might map to many different Spack package + names due to hyphen/underscore ambiguity. + + Easy example: + num3proxy -> 3proxy + + Ambiguous: + foo_bar -> foo_bar, foo-bar + + More ambiguous: + foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz + """ + if import_name in self: + return import_name + + options = possible_spack_module_names(import_name) + options.remove(import_name) + for name in options: + if name in self: + return name + return None + + + def is_prefix(self, fullname): + """True if fullname is a prefix of this Repo's namespace.""" + parts = fullname.split('.') + return self._names[:len(parts)] == parts + + + def find_module(self, fullname, path=None): + """Python find_module import hook. + + Returns this Repo if it can load the module; None if not. + """ + if self.is_prefix(fullname): + return self + + namespace, dot, module_name = fullname.rpartition('.') + if namespace == self.namespace: + if self.real_name(module_name): + return self + + return None + + + def load_module(self, fullname): + """Python importer load hook. + + Tries to load the module; raises an ImportError if it can't. + """ + if fullname in sys.modules: + return sys.modules[fullname] + + namespace, dot, module_name = fullname.rpartition('.') + + if self.is_prefix(fullname): + module = _make_namespace_module(fullname) + + elif namespace == self.namespace: + real_name = self.real_name(module_name) + if not real_name: + raise ImportError("No module %s in repo %s" % (module_name, namespace)) + module = self._get_pkg_module(real_name) + + else: + raise ImportError("No module %s in repo %s" % (fullname, self.namespace)) + + module.__loader__ = self + sys.modules[fullname] = module + return module + + + def _read_config(self): + """Check for a YAML config file in this db's root directory.""" + try: + with open(self.config_file) as reponame_file: + yaml_data = yaml.load(reponame_file) + + if (not yaml_data or 'repo' not in yaml_data or + not isinstance(yaml_data['repo'], dict)): + tty.die("Invalid %s in repository %s" + % (repo_config_filename, self.root)) + + return yaml_data['repo'] + + except exceptions.IOError, e: + tty.die("Error reading %s when opening %s" + % (self.config_file, self.root)) + + + @_autospec + def get(self, spec, new=False): + if spec.virtual: + raise UnknownPackageError(spec.name) + + if new and spec in self._instances: + del self._instances[spec] + + if not spec in self._instances: + PackageClass = self._get_pkg_class(spec.name) + try: + copy = spec.copy() + self._instances[copy] = PackageClass(copy) + except Exception, e: + if spack.debug: + sys.excepthook(*sys.exc_info()) + raise FailedConstructorError(spec.name, *sys.exc_info()) + + return self._instances[spec] + + + def purge(self): + """Clear entire package instance cache.""" + self._instances.clear() + + + @_autospec + def providers_for(self, vpkg_spec): + if self._provider_index is None: + self._provider_index = ProviderIndex(self.all_package_names()) + + providers = self._provider_index.providers_for(vpkg_spec) + if not providers: + raise UnknownPackageError(vpkg_spec.name) + return providers + + + @_autospec + def extensions_for(self, extendee_spec): + return [p for p in self.all_packages() if p.extends(extendee_spec)] + + + def dirname_for_package_name(self, pkg_name): + """Get the directory name for a particular package. This is the + directory that contains its package.py file.""" + return join_path(self.root, pkg_name) + + + def filename_for_package_name(self, pkg_name): + """Get the filename for the module we should load for a particular + package. Packages for a Repo live in + ``$root//package.py`` + + This will return a proper package.py path even if the + package doesn't exist yet, so callers will need to ensure + the package exists before importing. + """ + validate_module_name(pkg_name) + pkg_dir = self.dirname_for_package_name(pkg_name) + + return join_path(pkg_dir, package_file_name) + + + def all_package_names(self): + """Returns a sorted list of all package names in the Repo.""" + if self._all_package_names is None: + self._all_package_names = [] + + for pkg_name in os.listdir(self.root): + pkg_dir = join_path(self.root, pkg_name) + pkg_file = join_path(pkg_dir, package_file_name) + if os.path.isfile(pkg_file): + self._all_package_names.append(pkg_name) + + self._all_package_names.sort() + + return self._all_package_names + + + def all_packages(self): + for name in self.all_package_names(): + yield self.get(name) + + + def exists(self, pkg_name): + """Whether a package with the supplied name exists.""" + # This does a binary search in the sorted list. + idx = bisect_left(self.all_package_names(), pkg_name) + return self._all_package_names[idx] == pkg_name + + + def _get_pkg_module(self, pkg_name): + """Create a module for a particular package. + + This caches the module within this Repo *instance*. It does + *not* add it to ``sys.modules``. So, you can construct + multiple Repos for testing and ensure that the module will be + loaded once per repo. + + """ + if pkg_name not in self._modules: + file_path = self.filename_for_package_name(pkg_name) + + if not os.path.exists(file_path): + raise UnknownPackageError(pkg_name, self.namespace) + + if not os.path.isfile(file_path): + tty.die("Something's wrong. '%s' is not a file!" % file_path) + + if not os.access(file_path, os.R_OK): + tty.die("Cannot read '%s'!" % file_path) + + fullname = "%s.%s" % (self.namespace, pkg_name) + + module = imp.load_source(fullname, file_path) + module.__package__ = self.namespace + module.__loader__ = self + self._modules[pkg_name] = module + + return self._modules[pkg_name] + + + def _get_pkg_class(self, pkg_name): + """Get the class for the package out of its module. + + First loads (or fetches from cache) a module for the + package. Then extracts the package class from the module + according to Spack's naming convention. + """ + class_name = mod_to_class(pkg_name) + module = self._get_pkg_module(pkg_name) + + cls = getattr(module, class_name) + if not inspect.isclass(cls): + tty.die("%s.%s is not a class" % (pkg_name, class_name)) + + return cls + + + def __str__(self): + return "" % (self.namespace, self.root) + + + def __repr__(self): + return self.__str__() + + + def __contains__(self, pkg_name): + return self.exists(pkg_name) + + + # + # Below functions deal with installed packages, and should be + # moved to some other part of Spack (conbine with + # directory_layout?) + # + @_autospec + def get_installed(self, spec): + """Get all the installed specs that satisfy the provided spec constraint.""" + return [s for s in self.installed_package_specs() if s.satisfies(spec)] + + + @_autospec + def installed_extensions_for(self, extendee_spec): + for s in self.installed_package_specs(): + try: + if s.package.extends(extendee_spec): + yield s.package + except UnknownPackageError, e: + # Skip packages we know nothing about + continue + + + def installed_package_specs(self): + """Read installed package names straight from the install directory + layout. + """ + # Get specs from the directory layout but ensure that they're + # all normalized properly. + installed = [] + for spec in spack.install_layout.all_specs(): + spec.normalize() + installed.append(spec) + return installed + + + def installed_known_package_specs(self): + """Read installed package names straight from the install + directory layout, but return only specs for which the + package is known to this version of spack. + """ + for spec in spack.install_layout.all_specs(): + if self.exists(spec.name): + yield spec + + +class UnknownPackageError(spack.error.SpackError): + """Raised when we encounter a package spack doesn't have.""" + def __init__(self, name, repo=None): + msg = None + if repo: + msg = "Package %s not found in packagerepo %s." % (name, repo) + else: + msg = "Package %s not found." % name + super(UnknownPackageError, self).__init__(msg) + self.name = name + + +class DuplicateRepoError(spack.error.SpackError): + """Raised when duplicate repos are added to a RepoPath.""" + def __init__(self, msg, repo1, repo2): + super(UnknownPackageError, self).__init__( + "%s: %s, %s" % (msg, repo1, repo2)) + + +class FailedConstructorError(spack.error.SpackError): + """Raised when a package's class constructor fails.""" + def __init__(self, name, exc_type, exc_obj, exc_tb): + super(FailedConstructorError, self).__init__( + "Class constructor failed for package '%s'." % name, + '\nCaused by:\n' + + ('%s: %s\n' % (exc_type.__name__, exc_obj)) + + ''.join(traceback.format_tb(exc_tb))) + self.name = name diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 55b3f0b18f..580620e0e8 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -34,7 +34,7 @@ from llnl.util.filesystem import * import spack from spack.spec import Spec -from spack.packages import PackageFinder +from spack.repository import RepoPath from spack.directory_layout import YamlDirectoryLayout # number of packages to test (to reduce test time) @@ -123,7 +123,7 @@ class DirectoryLayoutTest(unittest.TestCase): information about installed packages' specs to uninstall or query them again if the package goes away. """ - mock_db = PackageFinder(spack.mock_packages_path) + mock_db = RepoPath(spack.mock_packages_path) not_in_mock = set.difference( set(spack.db.all_package_names()), diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 071c21b7e0..8c6273def0 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -27,7 +27,7 @@ import unittest import spack import spack.config -from spack.packages import PackageFinder +from spack.repository import RepoPath from spack.spec import Spec @@ -36,7 +36,7 @@ class MockPackagesTest(unittest.TestCase): # Use the mock packages database for these tests. This allows # us to set up contrived packages that don't interfere with # real ones. - self.db = PackageFinder(spack.mock_packages_path) + self.db = RepoPath(spack.mock_packages_path) spack.db.swap(self.db) spack.config.clear_config_caches() diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index a398568244..0a132fd701 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -28,7 +28,7 @@ This test does sanity checks on Spack's builtin package database. import unittest import spack -from spack.packages import PackageFinder +from spack.repository import RepoPath class PackageSanityTest(unittest.TestCase): @@ -46,7 +46,7 @@ class PackageSanityTest(unittest.TestCase): def ztest_get_all_mock_packages(self): """Get the mock packages once each too.""" - db = PackageFinder(spack.mock_packages_path) + db = RepoPath(spack.mock_packages_path) spack.db.swap(db) self.check_db() spack.db.swap(db) diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 2d19d9ddc7..8a786e364e 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -27,7 +27,7 @@ import unittest from llnl.util.filesystem import join_path import spack -from spack.packages import Repo +from spack.repository import Repo from spack.util.naming import mod_to_class from spack.test.mock_packages_test import * -- cgit v1.2.3-70-g09d2 From 04f032d6e397ce219a673c93277683060def52fd Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Wed, 25 Nov 2015 10:01:44 -0800 Subject: Rename spack.db -> spack.repo Making distinction between install database and package repositories clearer. --- lib/spack/spack/__init__.py | 6 ++-- lib/spack/spack/cmd/checksum.py | 2 +- lib/spack/spack/cmd/clean.py | 2 +- lib/spack/spack/cmd/create.py | 4 +-- lib/spack/spack/cmd/diy.py | 4 +-- lib/spack/spack/cmd/edit.py | 2 +- lib/spack/spack/cmd/extensions.py | 2 +- lib/spack/spack/cmd/fetch.py | 2 +- lib/spack/spack/cmd/find.py | 2 +- lib/spack/spack/cmd/info.py | 2 +- lib/spack/spack/cmd/install.py | 2 +- lib/spack/spack/cmd/list.py | 2 +- lib/spack/spack/cmd/location.py | 6 ++-- lib/spack/spack/cmd/mirror.py | 2 +- lib/spack/spack/cmd/package-list.py | 2 +- lib/spack/spack/cmd/patch.py | 2 +- lib/spack/spack/cmd/pkg.py | 2 +- lib/spack/spack/cmd/providers.py | 2 +- lib/spack/spack/cmd/repo.py | 2 +- lib/spack/spack/cmd/restage.py | 2 +- lib/spack/spack/cmd/stage.py | 2 +- lib/spack/spack/cmd/test-install.py | 8 ++--- lib/spack/spack/cmd/urls.py | 2 +- lib/spack/spack/cmd/versions.py | 2 +- lib/spack/spack/database.py | 2 +- lib/spack/spack/graph.py | 2 +- lib/spack/spack/package.py | 4 +-- lib/spack/spack/patch.py | 2 +- lib/spack/spack/spec.py | 14 ++++----- lib/spack/spack/test/concretize.py | 12 ++++---- lib/spack/spack/test/database.py | 2 +- lib/spack/spack/test/directory_layout.py | 12 ++++---- lib/spack/spack/test/git_fetch.py | 2 +- lib/spack/spack/test/hg_fetch.py | 2 +- lib/spack/spack/test/install.py | 2 +- lib/spack/spack/test/mirror.py | 2 +- lib/spack/spack/test/mock_packages_test.py | 6 ++-- lib/spack/spack/test/multimethod.py | 48 +++++++++++++++--------------- lib/spack/spack/test/package_sanity.py | 10 +++---- lib/spack/spack/test/packages.py | 6 ++-- lib/spack/spack/test/python_version.py | 4 +-- lib/spack/spack/test/svn_fetch.py | 2 +- lib/spack/spack/test/unit_install.py | 6 ++-- 43 files changed, 103 insertions(+), 103 deletions(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 20ae8c9272..4f481ce937 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -58,8 +58,8 @@ import spack.repository _repo_paths = spack.config.get_repos_config() if not _repo_paths: tty.die("Spack configuration contains no package repositories.") -db = spack.repository.RepoPath(*_repo_paths) -sys.meta_path.append(db) +repo = spack.repository.RepoPath(*_repo_paths) +sys.meta_path.append(repo) # # Set up the installed packages database @@ -146,7 +146,7 @@ sys_type = None # When packages call 'from spack import *', this extra stuff is brought in. # # Spack internal code should call 'import spack' and accesses other -# variables (spack.db, paths, etc.) directly. +# variables (spack.repo, paths, etc.) directly. # # TODO: maybe this should be separated out and should go in build_environment.py? # TODO: it's not clear where all the stuff that needs to be included in packages diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 8a448450c2..a9a5d11cca 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -81,7 +81,7 @@ def get_checksums(versions, urls, **kwargs): def checksum(parser, args): # get the package we're going to generate checksums for - pkg = spack.db.get(args.package) + pkg = spack.repo.get(args.package) # If the user asked for specific versions, use those. if args.versions: diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py index c20136ebe5..5546060e09 100644 --- a/lib/spack/spack/cmd/clean.py +++ b/lib/spack/spack/cmd/clean.py @@ -42,5 +42,5 @@ def clean(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) package.do_clean() diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 1502942f2c..475c4993b5 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -172,7 +172,7 @@ def create(parser, args): tty.msg("Creating template for package %s" % name) # Create a directory for the new package. - pkg_path = spack.db.filename_for_package_name(name, package_repo) + pkg_path = spack.repo.filename_for_package_name(name, package_repo) if os.path.exists(pkg_path) and not args.force: tty.die("%s already exists." % pkg_path) else: @@ -214,7 +214,7 @@ def create(parser, args): name = 'py-%s' % name # Create a directory for the new package. - pkg_path = spack.db.filename_for_package_name(name) + pkg_path = spack.repo.filename_for_package_name(name) if os.path.exists(pkg_path) and not args.force: tty.die("%s already exists." % pkg_path) else: diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 9f8a6d39db..9b8d2e1f6f 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -61,7 +61,7 @@ def diy(self, args): # Take a write lock before checking for existence. with spack.installed_db.write_transaction(): spec = specs[0] - if not spack.db.exists(spec.name): + if not spack.repo.exists(spec.name): tty.warn("No such package: %s" % spec.name) create = tty.get_yes_or_no("Create this package?", default=False) if not create: @@ -76,7 +76,7 @@ def diy(self, args): tty.die("spack diy spec must have a single, concrete version.") spec.concretize() - package = spack.db.get(spec) + package = spack.repo.get(spec) if package.installed: tty.error("Already installed in %s" % package.prefix) diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index 9081d12516..cb63c6fecd 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -54,7 +54,7 @@ class ${class_name}(Package): def edit_package(name, force=False): - path = spack.db.filename_for_package_name(name) + path = spack.repo.filename_for_package_name(name) if os.path.exists(path): if not os.path.isfile(path): diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py index 7cadc424b0..39c8e1f8c0 100644 --- a/lib/spack/spack/cmd/extensions.py +++ b/lib/spack/spack/cmd/extensions.py @@ -71,7 +71,7 @@ def extensions(parser, args): args.mode = 'short' # List package names of extensions - extensions = spack.db.extensions_for(spec) + extensions = spack.repo.extensions_for(spec) if not extensions: tty.msg("%s has no extensions." % spec.cshort_spec) return diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py index 0ccebd9486..efaa7acc13 100644 --- a/lib/spack/spack/cmd/fetch.py +++ b/lib/spack/spack/cmd/fetch.py @@ -46,5 +46,5 @@ def fetch(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) package.do_fetch() diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 0b0dd6ef6f..5c0f2f521e 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -137,7 +137,7 @@ def find(parser, args): # Filter out specs that don't exist. query_specs = spack.cmd.parse_specs(args.query_specs) query_specs, nonexisting = partition_list( - query_specs, lambda s: spack.db.exists(s.name)) + query_specs, lambda s: spack.repo.exists(s.name)) if nonexisting: msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '') diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py index 085e4db44d..2b61850c2d 100644 --- a/lib/spack/spack/cmd/info.py +++ b/lib/spack/spack/cmd/info.py @@ -105,5 +105,5 @@ def print_text_info(pkg): def info(parser, args): - pkg = spack.db.get(args.name) + pkg = spack.repo.get(args.name) print_text_info(pkg) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 836a6260c8..0942a8e383 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -70,7 +70,7 @@ def install(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) with spack.installed_db.write_transaction(): package.do_install( keep_prefix=args.keep_prefix, diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py index 1f0978a18e..d20aded6ca 100644 --- a/lib/spack/spack/cmd/list.py +++ b/lib/spack/spack/cmd/list.py @@ -43,7 +43,7 @@ def setup_parser(subparser): def list(parser, args): # Start with all package names. - pkgs = spack.db.all_package_names() + pkgs = spack.repo.all_package_names() # filter if a filter arg was provided if args.filter: diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index e8e9c3f277..7475c65867 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -72,7 +72,7 @@ def location(parser, args): print spack.prefix elif args.packages: - print spack.db.root + print spack.repo.root elif args.stages: print spack.stage_path @@ -94,12 +94,12 @@ def location(parser, args): if args.package_dir: # This one just needs the spec name. - print join_path(spack.db.root, spec.name) + print join_path(spack.repo.root, spec.name) else: # These versions need concretized specs. spec.concretize() - pkg = spack.db.get(spec) + pkg = spack.repo.get(spec) if args.stage_dir: print pkg.stage.path diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 2356170a9a..9a507e69db 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -128,7 +128,7 @@ def mirror_create(args): # If nothing is passed, use all packages. if not specs: - specs = [Spec(n) for n in spack.db.all_package_names()] + specs = [Spec(n) for n in spack.repo.all_package_names()] specs.sort(key=lambda s: s.format("$_$@").lower()) # Default name for directory is spack-mirror- diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py index f048482845..a14b06bf7f 100644 --- a/lib/spack/spack/cmd/package-list.py +++ b/lib/spack/spack/cmd/package-list.py @@ -48,7 +48,7 @@ def rst_table(elts): def print_rst_package_list(): """Print out information on all packages in restructured text.""" - pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower()) + pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower()) print ".. _package-list:" print diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index a6556c4828..600cad87fe 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -47,5 +47,5 @@ def patch(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) package.do_patch() diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py index 055b7c2062..5b70188941 100644 --- a/lib/spack/spack/cmd/pkg.py +++ b/lib/spack/spack/cmd/pkg.py @@ -85,7 +85,7 @@ def list_packages(rev): def pkg_add(args): for pkg_name in args.packages: - filename = spack.db.filename_for_package_name(pkg_name) + filename = spack.repo.filename_for_package_name(pkg_name) if not os.path.isfile(filename): tty.die("No such package: %s. Path does not exist:" % pkg_name, filename) diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py index 2bcdc9fba2..872afa98d5 100644 --- a/lib/spack/spack/cmd/providers.py +++ b/lib/spack/spack/cmd/providers.py @@ -39,4 +39,4 @@ def setup_parser(subparser): def providers(parser, args): for spec in spack.cmd.parse_specs(args.vpkg_spec): - colify(sorted(spack.db.providers_for(spec)), indent=4) + colify(sorted(spack.repo.providers_for(spec)), indent=4) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 87f21b833d..85cc83730c 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -108,7 +108,7 @@ def repo_remove(args): def repo_list(args): """List package sources and their mnemoics""" - root_names = spack.db.repos + root_names = spack.repo.repos max_len = max(len(s[0]) for s in root_names) fmt = "%%-%ds%%s" % (max_len + 4) for root in root_names: diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py index e735a12c32..a9ee26a539 100644 --- a/lib/spack/spack/cmd/restage.py +++ b/lib/spack/spack/cmd/restage.py @@ -42,5 +42,5 @@ def restage(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) package.do_restage() diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py index f3dc97be17..f734b3dd3b 100644 --- a/lib/spack/spack/cmd/stage.py +++ b/lib/spack/spack/cmd/stage.py @@ -49,5 +49,5 @@ def stage(parser, args): specs = spack.cmd.parse_specs(args.specs, concretize=True) for spec in specs: - package = spack.db.get(spec) + package = spack.repo.get(spec) package.do_stage() diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py index 68b761d5dc..7a8921d058 100644 --- a/lib/spack/spack/cmd/test-install.py +++ b/lib/spack/spack/cmd/test-install.py @@ -115,7 +115,7 @@ def fetch_log(path): def failed_dependencies(spec): return set(childSpec for childSpec in spec.dependencies.itervalues() if not - spack.db.get(childSpec).installed) + spack.repo.get(childSpec).installed) def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): @@ -126,7 +126,7 @@ def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log): continue failedDeps = failed_dependencies(spec) - package = spack.db.get(spec) + package = spack.repo.get(spec) if failedDeps: result = TestResult.SKIPPED dep = iter(failedDeps).next() @@ -171,7 +171,7 @@ def test_install(parser, args): newInstalls = set() for spec in topSpec.traverse(): - package = spack.db.get(spec) + package = spack.repo.get(spec) if not package.installed: newInstalls.add(spec) @@ -188,7 +188,7 @@ def test_install(parser, args): # Calling do_install for the top-level package would be sufficient but # this attempts to keep going if any package fails (other packages which # are not dependents may succeed) - package = spack.db.get(spec) + package = spack.repo.get(spec) if (not failed_dependencies(spec)) and (not package.installed): try: package.do_install( diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py index 417ce3ab68..14ce4c369f 100644 --- a/lib/spack/spack/cmd/urls.py +++ b/lib/spack/spack/cmd/urls.py @@ -41,7 +41,7 @@ def setup_parser(subparser): def urls(parser, args): urls = set() - for pkg in spack.db.all_packages(): + for pkg in spack.repo.all_packages(): url = getattr(pkg.__class__, 'url', None) if url: urls.add(url) diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py index ed16728261..a5ea5e8f83 100644 --- a/lib/spack/spack/cmd/versions.py +++ b/lib/spack/spack/cmd/versions.py @@ -34,7 +34,7 @@ def setup_parser(subparser): def versions(parser, args): - pkg = spack.db.get(args.package) + pkg = spack.repo.get(args.package) safe_versions = pkg.versions fetched_versions = pkg.fetch_remote_versions() diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index e0c14a0455..8e380083f3 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -549,7 +549,7 @@ class Database(object): for key, rec in self._data.items(): if installed is not any and rec.installed != installed: continue - if known is not any and spack.db.exists(rec.spec.name) != known: + if known is not any and spack.repo.exists(rec.spec.name) != known: continue if query_spec is any or rec.spec.satisfies(query_spec): results.append(rec.spec) diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py index 5fb6a9cd23..0600016a51 100644 --- a/lib/spack/spack/graph.py +++ b/lib/spack/spack/graph.py @@ -523,7 +523,7 @@ def graph_dot(*specs, **kwargs): return '"%s"' % string if not specs: - specs = [p.name for p in spack.db.all_packages()] + specs = [p.name for p in spack.repo.all_packages()] else: roots = specs specs = set() diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 39d71bb4b9..95b74bc961 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -372,7 +372,7 @@ class Package(object): self._total_time = 0.0 if self.is_extension: - spack.db.get(self.extendee_spec)._check_extendable() + spack.repo.get(self.extendee_spec)._check_extendable() @property @@ -548,7 +548,7 @@ class Package(object): yield spec continue - for pkg in spack.db.get(name).preorder_traversal(visited, **kwargs): + for pkg in spack.repo.get(name).preorder_traversal(visited, **kwargs): yield pkg diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py index fe6e0a65a3..fef42fb5ee 100644 --- a/lib/spack/spack/patch.py +++ b/lib/spack/spack/patch.py @@ -54,7 +54,7 @@ class Patch(object): if '://' in path_or_url: self.url = path_or_url else: - pkg_dir = spack.db.dirname_for_package_name(self.pkg_name) + pkg_dir = spack.repo.dirname_for_package_name(self.pkg_name) self.path = join_path(pkg_dir, path_or_url) if not os.path.isfile(self.path): raise NoSuchPatchFileError(pkg_name, self.path) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 5e59f240a4..bb0f194c13 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -487,7 +487,7 @@ class Spec(object): @property def package(self): - return spack.db.get(self) + return spack.repo.get(self) @property @@ -505,7 +505,7 @@ class Spec(object): @staticmethod def is_virtual(name): """Test if a name is virtual without requiring a Spec.""" - return not spack.db.exists(name) + return not spack.repo.exists(name) @property @@ -798,7 +798,7 @@ class Spec(object): return changed for spec in virtuals: - providers = spack.db.providers_for(spec) + providers = spack.repo.providers_for(spec) concrete = spack.concretizer.choose_provider(spec, providers) concrete = concrete.copy() spec._replace_with(concrete) @@ -909,7 +909,7 @@ class Spec(object): the dependency. If no conditions are True (and we don't depend on it), return None. """ - pkg = spack.db.get(self.name) + pkg = spack.repo.get(self.name) conditions = pkg.dependencies[name] # evaluate when specs to figure out constraints on the dependency. @@ -1037,7 +1037,7 @@ class Spec(object): any_change = False changed = True - pkg = spack.db.get(self.name) + pkg = spack.repo.get(self.name) while changed: changed = False for dep_name in pkg.dependencies: @@ -1115,7 +1115,7 @@ class Spec(object): for spec in self.traverse(): # Don't get a package for a virtual name. if not spec.virtual: - spack.db.get(spec.name) + spack.repo.get(spec.name) # validate compiler in addition to the package name. if spec.compiler: @@ -1247,7 +1247,7 @@ class Spec(object): # A concrete provider can satisfy a virtual dependency. if not self.virtual and other.virtual: - pkg = spack.db.get(self.name) + pkg = spack.repo.get(self.name) if pkg.provides(other.name): for provided, when_spec in pkg.provided.items(): if self.satisfies(when_spec, deps=False, strict=strict): diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index b3a77d076a..8842efe5a0 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -125,22 +125,22 @@ class ConcretizeTest(MockPackagesTest): we ask for some advanced version. """ self.assertTrue(not any(spec.satisfies('mpich2@:1.0') - for spec in spack.db.providers_for('mpi@2.1'))) + for spec in spack.repo.providers_for('mpi@2.1'))) self.assertTrue(not any(spec.satisfies('mpich2@:1.1') - for spec in spack.db.providers_for('mpi@2.2'))) + for spec in spack.repo.providers_for('mpi@2.2'))) self.assertTrue(not any(spec.satisfies('mpich2@:1.1') - for spec in spack.db.providers_for('mpi@2.2'))) + for spec in spack.repo.providers_for('mpi@2.2'))) self.assertTrue(not any(spec.satisfies('mpich@:1') - for spec in spack.db.providers_for('mpi@2'))) + for spec in spack.repo.providers_for('mpi@2'))) self.assertTrue(not any(spec.satisfies('mpich@:1') - for spec in spack.db.providers_for('mpi@3'))) + for spec in spack.repo.providers_for('mpi@3'))) self.assertTrue(not any(spec.satisfies('mpich2') - for spec in spack.db.providers_for('mpi@3'))) + for spec in spack.repo.providers_for('mpi@3'))) def test_virtual_is_fully_expanded_for_callpath(self): diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 8416143f2d..5ce010ae8f 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -79,7 +79,7 @@ class DatabaseTest(MockPackagesTest): def _mock_install(self, spec): s = Spec(spec) - pkg = spack.db.get(s.concretized()) + pkg = spack.repo.get(s.concretized()) pkg.do_install(fake=True) diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 580620e0e8..ded978b818 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -62,7 +62,7 @@ class DirectoryLayoutTest(unittest.TestCase): finally that the directory can be removed by the directory layout. """ - packages = list(spack.db.all_packages())[:max_packages] + packages = list(spack.repo.all_packages())[:max_packages] for pkg in packages: spec = pkg.spec @@ -126,14 +126,14 @@ class DirectoryLayoutTest(unittest.TestCase): mock_db = RepoPath(spack.mock_packages_path) not_in_mock = set.difference( - set(spack.db.all_package_names()), + set(spack.repo.all_package_names()), set(mock_db.all_package_names())) packages = list(not_in_mock)[:max_packages] # Create all the packages that are not in mock. installed_specs = {} for pkg_name in packages: - spec = spack.db.get(pkg_name).spec + spec = spack.repo.get(pkg_name).spec # If a spec fails to concretize, just skip it. If it is a # real error, it will be caught by concretization tests. @@ -145,7 +145,7 @@ class DirectoryLayoutTest(unittest.TestCase): self.layout.create_install_directory(spec) installed_specs[spec] = self.layout.path_for_spec(spec) - spack.db.swap(mock_db) + spack.repo.swap(mock_db) # Now check that even without the package files, we know # enough to read a spec from the spec file. @@ -160,12 +160,12 @@ class DirectoryLayoutTest(unittest.TestCase): self.assertTrue(spec.eq_dag(spec_from_file)) self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash()) - spack.db.swap(mock_db) + spack.repo.swap(mock_db) def test_find(self): """Test that finding specs within an install layout works.""" - packages = list(spack.db.all_packages())[:max_packages] + packages = list(spack.repo.all_packages())[:max_packages] # Create install prefixes for all packages in the list installed_specs = {} diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 9700bd7533..bf31bd2a5b 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -50,7 +50,7 @@ class GitFetchTest(MockPackagesTest): spec = Spec('git-test') spec.concretize() - self.pkg = spack.db.get(spec, new=True) + self.pkg = spack.repo.get(spec, new=True) def tearDown(self): diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py index 531dfabaa1..559fc2959a 100644 --- a/lib/spack/spack/test/hg_fetch.py +++ b/lib/spack/spack/test/hg_fetch.py @@ -47,7 +47,7 @@ class HgFetchTest(MockPackagesTest): spec = Spec('hg-test') spec.concretize() - self.pkg = spack.db.get(spec, new=True) + self.pkg = spack.repo.get(spec, new=True) def tearDown(self): diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 5659e97a4d..b280576c77 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -78,7 +78,7 @@ class InstallTest(MockPackagesTest): self.assertTrue(spec.concrete) # Get the package - pkg = spack.db.get(spec) + pkg = spack.repo.get(spec) # Fake the URL for the package so it downloads from a file. pkg.fetcher = URLFetchStrategy(self.repo.url) diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 89ab14359e..edd075a9a9 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -55,7 +55,7 @@ class MirrorTest(MockPackagesTest): spec.concretize() # Get the package and fix its fetch args to point to a mock repo - pkg = spack.db.get(spec) + pkg = spack.repo.get(spec) repo = MockRepoClass() self.repos[name] = repo diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 8c6273def0..2150b40876 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -37,7 +37,7 @@ class MockPackagesTest(unittest.TestCase): # us to set up contrived packages that don't interfere with # real ones. self.db = RepoPath(spack.mock_packages_path) - spack.db.swap(self.db) + spack.repo.swap(self.db) spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes @@ -59,7 +59,7 @@ class MockPackagesTest(unittest.TestCase): spec = Spec(spec) # Save original dependencies before making any changes. - pkg = spack.db.get(pkg_name) + pkg = spack.repo.get(pkg_name) if pkg_name not in self.saved_deps: self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy()) @@ -69,7 +69,7 @@ class MockPackagesTest(unittest.TestCase): def cleanmock(self): """Restore the real packages path after any test.""" - spack.db.swap(self.db) + spack.repo.swap(self.db) spack.config.config_scopes = self.real_scopes spack.config.clear_config_caches() diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index cd5d9e625e..56e8b3f343 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -38,92 +38,92 @@ from spack.test.mock_packages_test import * class MultiMethodTest(MockPackagesTest): def test_no_version_match(self): - pkg = spack.db.get('multimethod@2.0') + pkg = spack.repo.get('multimethod@2.0') self.assertRaises(NoSuchMethodError, pkg.no_version_2) def test_one_version_match(self): - pkg = spack.db.get('multimethod@1.0') + pkg = spack.repo.get('multimethod@1.0') self.assertEqual(pkg.no_version_2(), 1) - pkg = spack.db.get('multimethod@3.0') + pkg = spack.repo.get('multimethod@3.0') self.assertEqual(pkg.no_version_2(), 3) - pkg = spack.db.get('multimethod@4.0') + pkg = spack.repo.get('multimethod@4.0') self.assertEqual(pkg.no_version_2(), 4) def test_version_overlap(self): - pkg = spack.db.get('multimethod@2.0') + pkg = spack.repo.get('multimethod@2.0') self.assertEqual(pkg.version_overlap(), 1) - pkg = spack.db.get('multimethod@5.0') + pkg = spack.repo.get('multimethod@5.0') self.assertEqual(pkg.version_overlap(), 2) def test_mpi_version(self): - pkg = spack.db.get('multimethod^mpich@3.0.4') + pkg = spack.repo.get('multimethod^mpich@3.0.4') self.assertEqual(pkg.mpi_version(), 3) - pkg = spack.db.get('multimethod^mpich2@1.2') + pkg = spack.repo.get('multimethod^mpich2@1.2') self.assertEqual(pkg.mpi_version(), 2) - pkg = spack.db.get('multimethod^mpich@1.0') + pkg = spack.repo.get('multimethod^mpich@1.0') self.assertEqual(pkg.mpi_version(), 1) def test_undefined_mpi_version(self): - pkg = spack.db.get('multimethod^mpich@0.4') + pkg = spack.repo.get('multimethod^mpich@0.4') self.assertEqual(pkg.mpi_version(), 1) - pkg = spack.db.get('multimethod^mpich@1.4') + pkg = spack.repo.get('multimethod^mpich@1.4') self.assertEqual(pkg.mpi_version(), 1) def test_default_works(self): - pkg = spack.db.get('multimethod%gcc') + pkg = spack.repo.get('multimethod%gcc') self.assertEqual(pkg.has_a_default(), 'gcc') - pkg = spack.db.get('multimethod%intel') + pkg = spack.repo.get('multimethod%intel') self.assertEqual(pkg.has_a_default(), 'intel') - pkg = spack.db.get('multimethod%pgi') + pkg = spack.repo.get('multimethod%pgi') self.assertEqual(pkg.has_a_default(), 'default') def test_architecture_match(self): - pkg = spack.db.get('multimethod=x86_64') + pkg = spack.repo.get('multimethod=x86_64') self.assertEqual(pkg.different_by_architecture(), 'x86_64') - pkg = spack.db.get('multimethod=ppc64') + pkg = spack.repo.get('multimethod=ppc64') self.assertEqual(pkg.different_by_architecture(), 'ppc64') - pkg = spack.db.get('multimethod=ppc32') + pkg = spack.repo.get('multimethod=ppc32') self.assertEqual(pkg.different_by_architecture(), 'ppc32') - pkg = spack.db.get('multimethod=arm64') + pkg = spack.repo.get('multimethod=arm64') self.assertEqual(pkg.different_by_architecture(), 'arm64') - pkg = spack.db.get('multimethod=macos') + pkg = spack.repo.get('multimethod=macos') self.assertRaises(NoSuchMethodError, pkg.different_by_architecture) def test_dependency_match(self): - pkg = spack.db.get('multimethod^zmpi') + pkg = spack.repo.get('multimethod^zmpi') self.assertEqual(pkg.different_by_dep(), 'zmpi') - pkg = spack.db.get('multimethod^mpich') + pkg = spack.repo.get('multimethod^mpich') self.assertEqual(pkg.different_by_dep(), 'mpich') # If we try to switch on some entirely different dep, it's ambiguous, # but should take the first option - pkg = spack.db.get('multimethod^foobar') + pkg = spack.repo.get('multimethod^foobar') self.assertEqual(pkg.different_by_dep(), 'mpich') def test_virtual_dep_match(self): - pkg = spack.db.get('multimethod^mpich2') + pkg = spack.repo.get('multimethod^mpich2') self.assertEqual(pkg.different_by_virtual_dep(), 2) - pkg = spack.db.get('multimethod^mpich@1.0') + pkg = spack.repo.get('multimethod^mpich@1.0') self.assertEqual(pkg.different_by_virtual_dep(), 1) diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 0a132fd701..b2d3da91b1 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -35,8 +35,8 @@ class PackageSanityTest(unittest.TestCase): def check_db(self): """Get all packages in a DB to make sure they work.""" - for name in spack.db.all_package_names(): - spack.db.get(name) + for name in spack.repo.all_package_names(): + spack.repo.get(name) def test_get_all_packages(self): @@ -47,14 +47,14 @@ class PackageSanityTest(unittest.TestCase): def ztest_get_all_mock_packages(self): """Get the mock packages once each too.""" db = RepoPath(spack.mock_packages_path) - spack.db.swap(db) + spack.repo.swap(db) self.check_db() - spack.db.swap(db) + spack.repo.swap(db) def ztest_url_versions(self): """Check URLs for regular packages, if they are explicitly defined.""" - for pkg in spack.db.all_packages(): + for pkg in spack.repo.all_packages(): for v, vattrs in pkg.versions.items(): if 'url' in vattrs: # If there is a url for the version check it. diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index 8a786e364e..e39def2ff2 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -35,11 +35,11 @@ from spack.test.mock_packages_test import * class PackagesTest(MockPackagesTest): def test_load_package(self): - pkg = spack.db.get('mpich') + pkg = spack.repo.get('mpich') def test_package_name(self): - pkg = spack.db.get('mpich') + pkg = spack.repo.get('mpich') self.assertEqual(pkg.name, 'mpich') @@ -50,7 +50,7 @@ class PackagesTest(MockPackagesTest): def test_package_name(self): - pkg = spack.db.get('mpich') + pkg = spack.repo.get('mpich') self.assertEqual(pkg.name, 'mpich') diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py index 5779d31ed2..d2f7ea45b2 100644 --- a/lib/spack/spack/test/python_version.py +++ b/lib/spack/spack/test/python_version.py @@ -54,8 +54,8 @@ class PythonVersionTest(unittest.TestCase): def package_py_files(self): - for name in spack.db.all_package_names(): - yield spack.db.filename_for_package_name(name) + for name in spack.repo.all_package_names(): + yield spack.repo.filename_for_package_name(name) def check_python_versions(self, *files): diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py index 184fe8faa1..8abd089847 100644 --- a/lib/spack/spack/test/svn_fetch.py +++ b/lib/spack/spack/test/svn_fetch.py @@ -49,7 +49,7 @@ class SvnFetchTest(MockPackagesTest): spec = Spec('svn-test') spec.concretize() - self.pkg = spack.db.get(spec, new=True) + self.pkg = spack.repo.get(spec, new=True) def tearDown(self): diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py index 7168272997..477ca0436d 100644 --- a/lib/spack/spack/test/unit_install.py +++ b/lib/spack/spack/test/unit_install.py @@ -79,15 +79,15 @@ class UnitInstallTest(unittest.TestCase): pkgX.installed = False pkgY.installed = False - self.saved_db = spack.db + self.saved_db = spack.repo pkgDb = MockPackageDb({specX:pkgX, specY:pkgY}) - spack.db = pkgDb + spack.repo = pkgDb def tearDown(self): super(UnitInstallTest, self).tearDown() - spack.db = self.saved_db + spack.repo = self.saved_db def test_installing_both(self): mo = MockOutput() -- cgit v1.2.3-70-g09d2 From 89d5127900dda96b2a583c4c1a9bdac8e51c1c15 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 26 Nov 2015 14:19:27 -0800 Subject: New, cleaner package repository structure. Package repositories now look like this: top-level-dir/ repo.yaml packages/ libelf/ package.py mpich/ package.py ... This leaves room at the top level for additional metadata, source, per-repo configs, indexes, etc., and it makes it easy to see that something is a spack repo (just look for repo.yaml and packages). --- lib/spack/spack/__init__.py | 15 +- lib/spack/spack/cmd/repo.py | 2 +- lib/spack/spack/repository.py | 178 ++- var/spack/mock_packages/_repo.yaml | 2 - var/spack/mock_packages/a/package.py | 12 - var/spack/mock_packages/b/package.py | 12 - var/spack/mock_packages/c/package.py | 12 - var/spack/mock_packages/callpath/package.py | 41 - var/spack/mock_packages/direct_mpich/package.py | 36 - var/spack/mock_packages/dyninst/package.py | 44 - var/spack/mock_packages/e/package.py | 12 - var/spack/mock_packages/fake/package.py | 34 - var/spack/mock_packages/git-test/package.py | 10 - var/spack/mock_packages/hg-test/package.py | 10 - var/spack/mock_packages/indirect_mpich/package.py | 41 - var/spack/mock_packages/libdwarf/package.py | 44 - var/spack/mock_packages/libelf/package.py | 43 - var/spack/mock_packages/mpich/package.py | 46 - var/spack/mock_packages/mpich2/package.py | 47 - var/spack/mock_packages/mpileaks/package.py | 43 - var/spack/mock_packages/multimethod/package.py | 143 -- .../mock_packages/optional-dep-test-2/package.py | 18 - .../mock_packages/optional-dep-test-3/package.py | 17 - .../mock_packages/optional-dep-test/package.py | 29 - var/spack/mock_packages/svn-test/package.py | 10 - .../trivial_install_test_package/package.py | 38 - var/spack/mock_packages/zmpi/package.py | 39 - var/spack/packages/ImageMagick/package.py | 37 - var/spack/packages/Mitos/package.py | 19 - var/spack/packages/R/package.py | 33 - var/spack/packages/SAMRAI/no-tool-build.patch | 20 - var/spack/packages/SAMRAI/package.py | 53 - var/spack/packages/_repo.yaml | 2 - var/spack/packages/activeharmony/package.py | 15 - var/spack/packages/adept-utils/package.py | 42 - var/spack/packages/apex/package.py | 34 - var/spack/packages/arpack/package.py | 41 - var/spack/packages/asciidoc/package.py | 18 - var/spack/packages/atk/package.py | 18 - var/spack/packages/atlas/package.py | 60 - var/spack/packages/autoconf/package.py | 14 - var/spack/packages/automaded/package.py | 51 - var/spack/packages/automake/package.py | 16 - var/spack/packages/bear/package.py | 17 - var/spack/packages/bib2xhtml/package.py | 27 - var/spack/packages/binutils/package.py | 30 - var/spack/packages/bison/package.py | 17 - var/spack/packages/boost/package.py | 66 - var/spack/packages/bowtie2/bowtie2-2.5.patch | 16 - var/spack/packages/bowtie2/package.py | 24 - var/spack/packages/boxlib/package.py | 25 - var/spack/packages/bzip2/package.py | 36 - var/spack/packages/cairo/package.py | 19 - var/spack/packages/callpath/package.py | 47 - var/spack/packages/cblas/package.py | 35 - var/spack/packages/cgm/package.py | 30 - var/spack/packages/clang/package.py | 51 - var/spack/packages/cloog/package.py | 26 - var/spack/packages/cmake/package.py | 45 - var/spack/packages/coreutils/package.py | 17 - var/spack/packages/cppcheck/package.py | 15 - var/spack/packages/cram/package.py | 15 - var/spack/packages/cscope/package.py | 17 - var/spack/packages/cube/package.py | 55 - var/spack/packages/czmq/package.py | 19 - var/spack/packages/dbus/package.py | 31 - var/spack/packages/docbook-xml/package.py | 19 - var/spack/packages/doxygen/package.py | 25 - var/spack/packages/dri2proto/package.py | 14 - var/spack/packages/dtcmp/package.py | 20 - var/spack/packages/dyninst/package.py | 68 - var/spack/packages/elfutils/package.py | 26 - var/spack/packages/extrae/package.py | 46 - var/spack/packages/exuberant-ctags/package.py | 14 - var/spack/packages/fish/package.py | 18 - var/spack/packages/flex/package.py | 15 - var/spack/packages/flux/package.py | 36 - var/spack/packages/fontconfig/package.py | 16 - var/spack/packages/freetype/package.py | 16 - var/spack/packages/gasnet/package.py | 35 - var/spack/packages/gcc/package.py | 122 -- var/spack/packages/gdk-pixbuf/package.py | 22 - var/spack/packages/geos/package.py | 31 - var/spack/packages/gflags/package.py | 21 - var/spack/packages/ghostscript/package.py | 17 - var/spack/packages/git/package.py | 27 - var/spack/packages/glib/package.py | 18 - var/spack/packages/glm/package.py | 19 - var/spack/packages/global/package.py | 24 - var/spack/packages/glog/package.py | 15 - var/spack/packages/gmp/package.py | 40 - var/spack/packages/gnutls/package.py | 22 - var/spack/packages/gperf/package.py | 19 - var/spack/packages/gperftools/package.py | 38 - var/spack/packages/graphlib/package.py | 14 - var/spack/packages/graphviz/package.py | 21 - var/spack/packages/gtkplus/package.py | 22 - var/spack/packages/harfbuzz/package.py | 20 - var/spack/packages/hdf5/package.py | 42 - var/spack/packages/hwloc/package.py | 25 - var/spack/packages/hypre/package.py | 32 - var/spack/packages/icu/package.py | 25 - var/spack/packages/icu4c/package.py | 17 - var/spack/packages/isl/package.py | 17 - var/spack/packages/jdk/package.py | 46 - var/spack/packages/jpeg/package.py | 14 - var/spack/packages/launchmon/package.py | 47 - .../packages/launchmon/patch.lmon_install_dir | 147 -- var/spack/packages/lcms/package.py | 19 - var/spack/packages/leveldb/package.py | 29 - var/spack/packages/libNBC/package.py | 43 - var/spack/packages/libarchive/package.py | 16 - var/spack/packages/libcircle/package.py | 18 - var/spack/packages/libdrm/package.py | 18 - var/spack/packages/libdwarf/package.py | 81 - var/spack/packages/libelf/package.py | 49 - var/spack/packages/libevent/package.py | 30 - var/spack/packages/libffi/package.py | 17 - var/spack/packages/libgcrypt/package.py | 19 - var/spack/packages/libgpg-error/package.py | 17 - var/spack/packages/libjpeg-turbo/package.py | 20 - var/spack/packages/libjson-c/package.py | 14 - var/spack/packages/libmng/package.py | 23 - var/spack/packages/libmonitor/package.py | 36 - var/spack/packages/libpciaccess/package.py | 21 - var/spack/packages/libpng/package.py | 15 - var/spack/packages/libsodium/package.py | 19 - var/spack/packages/libtiff/package.py | 18 - var/spack/packages/libtool/package.py | 14 - var/spack/packages/libunwind/package.py | 38 - var/spack/packages/libuuid/package.py | 16 - var/spack/packages/libxcb/package.py | 21 - var/spack/packages/libxml2/package.py | 20 - var/spack/packages/libxshmfence/package.py | 16 - var/spack/packages/libxslt/package.py | 24 - var/spack/packages/llvm-lld/package.py | 46 - var/spack/packages/llvm/package.py | 53 - var/spack/packages/lmdb/package.py | 39 - var/spack/packages/lua/package.py | 26 - var/spack/packages/lwgrp/package.py | 18 - var/spack/packages/lwm2/package.py | 18 - var/spack/packages/matio/package.py | 15 - var/spack/packages/memaxes/package.py | 19 - var/spack/packages/mesa/package.py | 34 - var/spack/packages/metis/package.py | 27 - var/spack/packages/mpc/package.py | 42 - var/spack/packages/mpe2/mpe2.patch | 12 - var/spack/packages/mpe2/package.py | 28 - var/spack/packages/mpfr/package.py | 41 - var/spack/packages/mpibash/mpibash-4.3.patch | 1565 -------------------- var/spack/packages/mpibash/package.py | 32 - var/spack/packages/mpich/package.py | 92 -- var/spack/packages/mpileaks/package.py | 44 - var/spack/packages/mrnet/package.py | 20 - var/spack/packages/munge/package.py | 20 - var/spack/packages/muster/package.py | 22 - .../mvapich2/ad_lustre_rwcontig_open_source.patch | 11 - var/spack/packages/mvapich2/package.py | 104 -- var/spack/packages/nasm/package.py | 14 - var/spack/packages/ncdu/package.py | 28 - var/spack/packages/ncurses/package.py | 33 - var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch | 25 - var/spack/packages/netcdf/package.py | 27 - var/spack/packages/netgauge/package.py | 43 - var/spack/packages/netlib-blas/package.py | 46 - var/spack/packages/netlib-lapack/package.py | 59 - var/spack/packages/nettle/package.py | 17 - var/spack/packages/ompss/package.py | 50 - var/spack/packages/ompt-openmp/package.py | 23 - var/spack/packages/opari2/package.py | 65 - .../openmpi/ad_lustre_rwcontig_open_source.patch | 11 - var/spack/packages/openmpi/llnl-platforms.patch | 151 -- var/spack/packages/openmpi/package.py | 109 -- var/spack/packages/openssl/package.py | 26 - var/spack/packages/otf/package.py | 21 - var/spack/packages/otf2/package.py | 74 - var/spack/packages/pango/package.py | 19 - var/spack/packages/papi/package.py | 35 - var/spack/packages/paraver/package.py | 41 - var/spack/packages/paraview/package.py | 72 - var/spack/packages/parmetis/package.py | 26 - var/spack/packages/parpack/package.py | 43 - var/spack/packages/pcre/package.py | 15 - var/spack/packages/petsc/package.py | 40 - var/spack/packages/pidx/package.py | 21 - var/spack/packages/pixman/package.py | 18 - var/spack/packages/pkg-config/package.py | 17 - var/spack/packages/pmgr_collective/package.py | 37 - var/spack/packages/postgresql/package.py | 20 - var/spack/packages/ppl/package.py | 28 - var/spack/packages/protobuf/package.py | 16 - var/spack/packages/py-basemap/package.py | 20 - var/spack/packages/py-biopython/package.py | 15 - var/spack/packages/py-cffi/package.py | 17 - var/spack/packages/py-cython/package.py | 14 - var/spack/packages/py-dateutil/package.py | 16 - var/spack/packages/py-epydoc/package.py | 13 - var/spack/packages/py-genders/package.py | 15 - var/spack/packages/py-gnuplot/package.py | 14 - var/spack/packages/py-h5py/package.py | 19 - var/spack/packages/py-ipython/package.py | 16 - var/spack/packages/py-libxml2/package.py | 15 - var/spack/packages/py-lockfile/package.py | 23 - var/spack/packages/py-mako/package.py | 16 - var/spack/packages/py-matplotlib/package.py | 47 - var/spack/packages/py-mock/package.py | 17 - var/spack/packages/py-mpi4py/package.py | 14 - var/spack/packages/py-mx/package.py | 13 - var/spack/packages/py-nose/package.py | 17 - var/spack/packages/py-numpy/package.py | 28 - var/spack/packages/py-pandas/package.py | 25 - var/spack/packages/py-pexpect/package.py | 13 - var/spack/packages/py-pil/package.py | 14 - var/spack/packages/py-pmw/package.py | 13 - var/spack/packages/py-pychecker/package.py | 13 - var/spack/packages/py-pycparser/package.py | 15 - var/spack/packages/py-pyelftools/package.py | 13 - var/spack/packages/py-pygments/package.py | 15 - var/spack/packages/py-pylint/package.py | 17 - var/spack/packages/py-pypar/package.py | 14 - var/spack/packages/py-pyparsing/package.py | 13 - var/spack/packages/py-pyqt/package.py | 24 - var/spack/packages/py-pyside/package.py | 45 - var/spack/packages/py-python-daemon/package.py | 26 - var/spack/packages/py-pytz/package.py | 14 - var/spack/packages/py-rpy2/package.py | 17 - var/spack/packages/py-scientificpython/package.py | 16 - var/spack/packages/py-scikit-learn/package.py | 14 - var/spack/packages/py-scipy/package.py | 18 - var/spack/packages/py-setuptools/package.py | 15 - var/spack/packages/py-shiboken/package.py | 45 - var/spack/packages/py-sip/package.py | 21 - var/spack/packages/py-six/package.py | 14 - var/spack/packages/py-sphinx/package.py | 13 - var/spack/packages/py-sympy/package.py | 13 - var/spack/packages/py-virtualenv/package.py | 16 - var/spack/packages/py-yapf/package.py | 15 - var/spack/packages/python/package.py | 160 -- var/spack/packages/qhull/package.py | 27 - var/spack/packages/qt/package.py | 109 -- var/spack/packages/qthreads/package.py | 22 - var/spack/packages/ravel/package.py | 23 - var/spack/packages/readline/package.py | 21 - .../rose/add_spack_compiler_recognition.patch | 13 - var/spack/packages/rose/package.py | 39 - var/spack/packages/ruby/package.py | 41 - var/spack/packages/samtools/package.py | 18 - var/spack/packages/samtools/samtools1.2.patch | 20 - var/spack/packages/scalasca/package.py | 65 - var/spack/packages/scorep/package.py | 74 - var/spack/packages/scotch/package.py | 40 - var/spack/packages/scr/package.py | 44 - var/spack/packages/silo/package.py | 19 - var/spack/packages/snappy/package.py | 15 - var/spack/packages/spindle/package.py | 44 - var/spack/packages/sqlite/package.py | 40 - var/spack/packages/stat/configure_mpicxx.patch | 19 - var/spack/packages/stat/package.py | 40 - var/spack/packages/sundials/package.py | 39 - var/spack/packages/swig/package.py | 46 - var/spack/packages/task/package.py | 20 - var/spack/packages/taskd/package.py | 20 - var/spack/packages/tau/package.py | 36 - var/spack/packages/tcl/package.py | 22 - var/spack/packages/the_silver_searcher/package.py | 17 - var/spack/packages/thrift/package.py | 44 - var/spack/packages/tk/package.py | 22 - var/spack/packages/tmux/package.py | 24 - var/spack/packages/tmuxinator/package.py | 17 - var/spack/packages/trilinos/package.py | 50 - var/spack/packages/uncrustify/package.py | 14 - var/spack/packages/util-linux/package.py | 20 - var/spack/packages/vim/package.py | 83 -- var/spack/packages/vtk/package.py | 40 - var/spack/packages/wget/package.py | 21 - var/spack/packages/wx/package.py | 24 - var/spack/packages/wxpropgrid/package.py | 20 - var/spack/packages/xcb-proto/package.py | 15 - var/spack/packages/xz/package.py | 20 - var/spack/packages/yasm/package.py | 16 - var/spack/packages/zeromq/package.py | 20 - var/spack/packages/zlib/package.py | 18 - var/spack/packages/zsh/package.py | 16 - var/spack/repos/builtin.mock/packages/a/package.py | 12 + var/spack/repos/builtin.mock/packages/b/package.py | 12 + var/spack/repos/builtin.mock/packages/c/package.py | 12 + .../builtin.mock/packages/callpath/package.py | 41 + .../builtin.mock/packages/direct_mpich/package.py | 36 + .../repos/builtin.mock/packages/dyninst/package.py | 44 + var/spack/repos/builtin.mock/packages/e/package.py | 12 + .../repos/builtin.mock/packages/fake/package.py | 34 + .../builtin.mock/packages/git-test/package.py | 10 + .../repos/builtin.mock/packages/hg-test/package.py | 10 + .../packages/indirect_mpich/package.py | 41 + .../builtin.mock/packages/libdwarf/package.py | 44 + .../repos/builtin.mock/packages/libelf/package.py | 43 + .../repos/builtin.mock/packages/mpich/package.py | 46 + .../repos/builtin.mock/packages/mpich2/package.py | 47 + .../builtin.mock/packages/mpileaks/package.py | 43 + .../builtin.mock/packages/multimethod/package.py | 143 ++ .../packages/optional-dep-test-2/package.py | 18 + .../packages/optional-dep-test-3/package.py | 17 + .../packages/optional-dep-test/package.py | 29 + .../builtin.mock/packages/svn-test/package.py | 10 + .../trivial_install_test_package/package.py | 38 + .../repos/builtin.mock/packages/zmpi/package.py | 39 + var/spack/repos/builtin.mock/repo.yaml | 2 + .../repos/builtin/packages/ImageMagick/package.py | 37 + var/spack/repos/builtin/packages/Mitos/package.py | 19 + var/spack/repos/builtin/packages/R/package.py | 33 + .../builtin/packages/SAMRAI/no-tool-build.patch | 20 + var/spack/repos/builtin/packages/SAMRAI/package.py | 53 + .../builtin/packages/activeharmony/package.py | 15 + .../repos/builtin/packages/adept-utils/package.py | 42 + var/spack/repos/builtin/packages/apex/package.py | 34 + var/spack/repos/builtin/packages/arpack/package.py | 41 + .../repos/builtin/packages/asciidoc/package.py | 18 + var/spack/repos/builtin/packages/atk/package.py | 18 + var/spack/repos/builtin/packages/atlas/package.py | 60 + .../repos/builtin/packages/autoconf/package.py | 14 + .../repos/builtin/packages/automaded/package.py | 51 + .../repos/builtin/packages/automake/package.py | 16 + var/spack/repos/builtin/packages/bear/package.py | 17 + .../repos/builtin/packages/bib2xhtml/package.py | 27 + .../repos/builtin/packages/binutils/package.py | 30 + var/spack/repos/builtin/packages/bison/package.py | 17 + var/spack/repos/builtin/packages/boost/package.py | 66 + .../builtin/packages/bowtie2/bowtie2-2.5.patch | 16 + .../repos/builtin/packages/bowtie2/package.py | 24 + var/spack/repos/builtin/packages/boxlib/package.py | 25 + var/spack/repos/builtin/packages/bzip2/package.py | 36 + var/spack/repos/builtin/packages/cairo/package.py | 19 + .../repos/builtin/packages/callpath/package.py | 47 + var/spack/repos/builtin/packages/cblas/package.py | 35 + var/spack/repos/builtin/packages/cgm/package.py | 30 + var/spack/repos/builtin/packages/clang/package.py | 51 + var/spack/repos/builtin/packages/cloog/package.py | 26 + var/spack/repos/builtin/packages/cmake/package.py | 45 + .../repos/builtin/packages/coreutils/package.py | 17 + .../repos/builtin/packages/cppcheck/package.py | 15 + var/spack/repos/builtin/packages/cram/package.py | 15 + var/spack/repos/builtin/packages/cscope/package.py | 17 + var/spack/repos/builtin/packages/cube/package.py | 55 + var/spack/repos/builtin/packages/czmq/package.py | 19 + var/spack/repos/builtin/packages/dbus/package.py | 31 + .../repos/builtin/packages/docbook-xml/package.py | 19 + .../repos/builtin/packages/doxygen/package.py | 25 + .../repos/builtin/packages/dri2proto/package.py | 14 + var/spack/repos/builtin/packages/dtcmp/package.py | 20 + .../repos/builtin/packages/dyninst/package.py | 68 + .../repos/builtin/packages/elfutils/package.py | 26 + var/spack/repos/builtin/packages/extrae/package.py | 46 + .../builtin/packages/exuberant-ctags/package.py | 14 + var/spack/repos/builtin/packages/fish/package.py | 18 + var/spack/repos/builtin/packages/flex/package.py | 15 + var/spack/repos/builtin/packages/flux/package.py | 36 + .../repos/builtin/packages/fontconfig/package.py | 16 + .../repos/builtin/packages/freetype/package.py | 16 + var/spack/repos/builtin/packages/gasnet/package.py | 35 + var/spack/repos/builtin/packages/gcc/package.py | 122 ++ .../repos/builtin/packages/gdk-pixbuf/package.py | 22 + var/spack/repos/builtin/packages/geos/package.py | 31 + var/spack/repos/builtin/packages/gflags/package.py | 21 + .../repos/builtin/packages/ghostscript/package.py | 17 + var/spack/repos/builtin/packages/git/package.py | 27 + var/spack/repos/builtin/packages/glib/package.py | 18 + var/spack/repos/builtin/packages/glm/package.py | 19 + var/spack/repos/builtin/packages/global/package.py | 24 + var/spack/repos/builtin/packages/glog/package.py | 15 + var/spack/repos/builtin/packages/gmp/package.py | 40 + var/spack/repos/builtin/packages/gnutls/package.py | 22 + var/spack/repos/builtin/packages/gperf/package.py | 19 + .../repos/builtin/packages/gperftools/package.py | 38 + .../repos/builtin/packages/graphlib/package.py | 14 + .../repos/builtin/packages/graphviz/package.py | 21 + .../repos/builtin/packages/gtkplus/package.py | 22 + .../repos/builtin/packages/harfbuzz/package.py | 20 + var/spack/repos/builtin/packages/hdf5/package.py | 42 + var/spack/repos/builtin/packages/hwloc/package.py | 25 + var/spack/repos/builtin/packages/hypre/package.py | 32 + var/spack/repos/builtin/packages/icu/package.py | 25 + var/spack/repos/builtin/packages/icu4c/package.py | 17 + var/spack/repos/builtin/packages/isl/package.py | 17 + var/spack/repos/builtin/packages/jdk/package.py | 46 + var/spack/repos/builtin/packages/jpeg/package.py | 14 + .../repos/builtin/packages/launchmon/package.py | 47 + .../packages/launchmon/patch.lmon_install_dir | 147 ++ var/spack/repos/builtin/packages/lcms/package.py | 19 + .../repos/builtin/packages/leveldb/package.py | 29 + var/spack/repos/builtin/packages/libNBC/package.py | 43 + .../repos/builtin/packages/libarchive/package.py | 16 + .../repos/builtin/packages/libcircle/package.py | 18 + var/spack/repos/builtin/packages/libdrm/package.py | 18 + .../repos/builtin/packages/libdwarf/package.py | 81 + var/spack/repos/builtin/packages/libelf/package.py | 49 + .../repos/builtin/packages/libevent/package.py | 30 + var/spack/repos/builtin/packages/libffi/package.py | 17 + .../repos/builtin/packages/libgcrypt/package.py | 19 + .../repos/builtin/packages/libgpg-error/package.py | 17 + .../builtin/packages/libjpeg-turbo/package.py | 20 + .../repos/builtin/packages/libjson-c/package.py | 14 + var/spack/repos/builtin/packages/libmng/package.py | 23 + .../repos/builtin/packages/libmonitor/package.py | 36 + .../repos/builtin/packages/libpciaccess/package.py | 21 + var/spack/repos/builtin/packages/libpng/package.py | 15 + .../repos/builtin/packages/libsodium/package.py | 19 + .../repos/builtin/packages/libtiff/package.py | 18 + .../repos/builtin/packages/libtool/package.py | 14 + .../repos/builtin/packages/libunwind/package.py | 38 + .../repos/builtin/packages/libuuid/package.py | 16 + var/spack/repos/builtin/packages/libxcb/package.py | 21 + .../repos/builtin/packages/libxml2/package.py | 20 + .../repos/builtin/packages/libxshmfence/package.py | 16 + .../repos/builtin/packages/libxslt/package.py | 24 + .../repos/builtin/packages/llvm-lld/package.py | 46 + var/spack/repos/builtin/packages/llvm/package.py | 53 + var/spack/repos/builtin/packages/lmdb/package.py | 39 + var/spack/repos/builtin/packages/lua/package.py | 26 + var/spack/repos/builtin/packages/lwgrp/package.py | 18 + var/spack/repos/builtin/packages/lwm2/package.py | 18 + var/spack/repos/builtin/packages/matio/package.py | 15 + .../repos/builtin/packages/memaxes/package.py | 19 + var/spack/repos/builtin/packages/mesa/package.py | 34 + var/spack/repos/builtin/packages/metis/package.py | 27 + var/spack/repos/builtin/packages/mpc/package.py | 42 + var/spack/repos/builtin/packages/mpe2/mpe2.patch | 12 + var/spack/repos/builtin/packages/mpe2/package.py | 28 + var/spack/repos/builtin/packages/mpfr/package.py | 41 + .../builtin/packages/mpibash/mpibash-4.3.patch | 1565 ++++++++++++++++++++ .../repos/builtin/packages/mpibash/package.py | 32 + var/spack/repos/builtin/packages/mpich/package.py | 92 ++ .../repos/builtin/packages/mpileaks/package.py | 44 + var/spack/repos/builtin/packages/mrnet/package.py | 20 + var/spack/repos/builtin/packages/munge/package.py | 20 + var/spack/repos/builtin/packages/muster/package.py | 22 + .../mvapich2/ad_lustre_rwcontig_open_source.patch | 11 + .../repos/builtin/packages/mvapich2/package.py | 104 ++ var/spack/repos/builtin/packages/nasm/package.py | 14 + var/spack/repos/builtin/packages/ncdu/package.py | 28 + .../repos/builtin/packages/ncurses/package.py | 33 + .../builtin/packages/netcdf/netcdf-4.3.3-mpi.patch | 25 + var/spack/repos/builtin/packages/netcdf/package.py | 27 + .../repos/builtin/packages/netgauge/package.py | 43 + .../repos/builtin/packages/netlib-blas/package.py | 46 + .../builtin/packages/netlib-lapack/package.py | 59 + var/spack/repos/builtin/packages/nettle/package.py | 17 + var/spack/repos/builtin/packages/ompss/package.py | 50 + .../repos/builtin/packages/ompt-openmp/package.py | 23 + var/spack/repos/builtin/packages/opari2/package.py | 65 + .../openmpi/ad_lustre_rwcontig_open_source.patch | 11 + .../builtin/packages/openmpi/llnl-platforms.patch | 151 ++ .../repos/builtin/packages/openmpi/package.py | 109 ++ .../repos/builtin/packages/openssl/package.py | 26 + var/spack/repos/builtin/packages/otf/package.py | 21 + var/spack/repos/builtin/packages/otf2/package.py | 74 + var/spack/repos/builtin/packages/pango/package.py | 19 + var/spack/repos/builtin/packages/papi/package.py | 35 + .../repos/builtin/packages/paraver/package.py | 41 + .../repos/builtin/packages/paraview/package.py | 72 + .../repos/builtin/packages/parmetis/package.py | 26 + .../repos/builtin/packages/parpack/package.py | 43 + var/spack/repos/builtin/packages/pcre/package.py | 15 + var/spack/repos/builtin/packages/petsc/package.py | 40 + var/spack/repos/builtin/packages/pidx/package.py | 21 + var/spack/repos/builtin/packages/pixman/package.py | 18 + .../repos/builtin/packages/pkg-config/package.py | 17 + .../builtin/packages/pmgr_collective/package.py | 37 + .../repos/builtin/packages/postgresql/package.py | 20 + var/spack/repos/builtin/packages/ppl/package.py | 28 + .../repos/builtin/packages/protobuf/package.py | 16 + .../repos/builtin/packages/py-basemap/package.py | 20 + .../repos/builtin/packages/py-biopython/package.py | 15 + .../repos/builtin/packages/py-cffi/package.py | 17 + .../repos/builtin/packages/py-cython/package.py | 14 + .../repos/builtin/packages/py-dateutil/package.py | 16 + .../repos/builtin/packages/py-epydoc/package.py | 13 + .../repos/builtin/packages/py-genders/package.py | 15 + .../repos/builtin/packages/py-gnuplot/package.py | 14 + .../repos/builtin/packages/py-h5py/package.py | 19 + .../repos/builtin/packages/py-ipython/package.py | 16 + .../repos/builtin/packages/py-libxml2/package.py | 15 + .../repos/builtin/packages/py-lockfile/package.py | 23 + .../repos/builtin/packages/py-mako/package.py | 16 + .../builtin/packages/py-matplotlib/package.py | 47 + .../repos/builtin/packages/py-mock/package.py | 17 + .../repos/builtin/packages/py-mpi4py/package.py | 14 + var/spack/repos/builtin/packages/py-mx/package.py | 13 + .../repos/builtin/packages/py-nose/package.py | 17 + .../repos/builtin/packages/py-numpy/package.py | 28 + .../repos/builtin/packages/py-pandas/package.py | 25 + .../repos/builtin/packages/py-pexpect/package.py | 13 + var/spack/repos/builtin/packages/py-pil/package.py | 14 + var/spack/repos/builtin/packages/py-pmw/package.py | 13 + .../repos/builtin/packages/py-pychecker/package.py | 13 + .../repos/builtin/packages/py-pycparser/package.py | 15 + .../builtin/packages/py-pyelftools/package.py | 13 + .../repos/builtin/packages/py-pygments/package.py | 15 + .../repos/builtin/packages/py-pylint/package.py | 17 + .../repos/builtin/packages/py-pypar/package.py | 14 + .../repos/builtin/packages/py-pyparsing/package.py | 13 + .../repos/builtin/packages/py-pyqt/package.py | 24 + .../repos/builtin/packages/py-pyside/package.py | 45 + .../builtin/packages/py-python-daemon/package.py | 26 + .../repos/builtin/packages/py-pytz/package.py | 14 + .../repos/builtin/packages/py-rpy2/package.py | 17 + .../packages/py-scientificpython/package.py | 16 + .../builtin/packages/py-scikit-learn/package.py | 14 + .../repos/builtin/packages/py-scipy/package.py | 18 + .../builtin/packages/py-setuptools/package.py | 15 + .../repos/builtin/packages/py-shiboken/package.py | 45 + var/spack/repos/builtin/packages/py-sip/package.py | 21 + var/spack/repos/builtin/packages/py-six/package.py | 14 + .../repos/builtin/packages/py-sphinx/package.py | 13 + .../repos/builtin/packages/py-sympy/package.py | 13 + .../builtin/packages/py-virtualenv/package.py | 16 + .../repos/builtin/packages/py-yapf/package.py | 15 + var/spack/repos/builtin/packages/python/package.py | 160 ++ var/spack/repos/builtin/packages/qhull/package.py | 27 + var/spack/repos/builtin/packages/qt/package.py | 109 ++ .../repos/builtin/packages/qthreads/package.py | 22 + var/spack/repos/builtin/packages/ravel/package.py | 23 + .../repos/builtin/packages/readline/package.py | 21 + .../rose/add_spack_compiler_recognition.patch | 13 + var/spack/repos/builtin/packages/rose/package.py | 39 + var/spack/repos/builtin/packages/ruby/package.py | 41 + .../repos/builtin/packages/samtools/package.py | 18 + .../builtin/packages/samtools/samtools1.2.patch | 20 + .../repos/builtin/packages/scalasca/package.py | 65 + var/spack/repos/builtin/packages/scorep/package.py | 74 + var/spack/repos/builtin/packages/scotch/package.py | 40 + var/spack/repos/builtin/packages/scr/package.py | 44 + var/spack/repos/builtin/packages/silo/package.py | 19 + var/spack/repos/builtin/packages/snappy/package.py | 15 + .../repos/builtin/packages/spindle/package.py | 44 + var/spack/repos/builtin/packages/sqlite/package.py | 40 + .../builtin/packages/stat/configure_mpicxx.patch | 19 + var/spack/repos/builtin/packages/stat/package.py | 40 + .../repos/builtin/packages/sundials/package.py | 39 + var/spack/repos/builtin/packages/swig/package.py | 46 + var/spack/repos/builtin/packages/task/package.py | 20 + var/spack/repos/builtin/packages/taskd/package.py | 20 + var/spack/repos/builtin/packages/tau/package.py | 36 + var/spack/repos/builtin/packages/tcl/package.py | 22 + .../packages/the_silver_searcher/package.py | 17 + var/spack/repos/builtin/packages/thrift/package.py | 44 + var/spack/repos/builtin/packages/tk/package.py | 22 + var/spack/repos/builtin/packages/tmux/package.py | 24 + .../repos/builtin/packages/tmuxinator/package.py | 17 + .../repos/builtin/packages/trilinos/package.py | 50 + .../repos/builtin/packages/uncrustify/package.py | 14 + .../repos/builtin/packages/util-linux/package.py | 20 + var/spack/repos/builtin/packages/vim/package.py | 83 ++ var/spack/repos/builtin/packages/vtk/package.py | 40 + var/spack/repos/builtin/packages/wget/package.py | 21 + var/spack/repos/builtin/packages/wx/package.py | 24 + .../repos/builtin/packages/wxpropgrid/package.py | 20 + .../repos/builtin/packages/xcb-proto/package.py | 15 + var/spack/repos/builtin/packages/xz/package.py | 20 + var/spack/repos/builtin/packages/yasm/package.py | 16 + var/spack/repos/builtin/packages/zeromq/package.py | 20 + var/spack/repos/builtin/packages/zlib/package.py | 18 + var/spack/repos/builtin/packages/zsh/package.py | 16 + var/spack/repos/builtin/repo.yaml | 2 + 563 files changed, 10048 insertions(+), 9975 deletions(-) delete mode 100644 var/spack/mock_packages/_repo.yaml delete mode 100644 var/spack/mock_packages/a/package.py delete mode 100644 var/spack/mock_packages/b/package.py delete mode 100644 var/spack/mock_packages/c/package.py delete mode 100644 var/spack/mock_packages/callpath/package.py delete mode 100644 var/spack/mock_packages/direct_mpich/package.py delete mode 100644 var/spack/mock_packages/dyninst/package.py delete mode 100644 var/spack/mock_packages/e/package.py delete mode 100644 var/spack/mock_packages/fake/package.py delete mode 100644 var/spack/mock_packages/git-test/package.py delete mode 100644 var/spack/mock_packages/hg-test/package.py delete mode 100644 var/spack/mock_packages/indirect_mpich/package.py delete mode 100644 var/spack/mock_packages/libdwarf/package.py delete mode 100644 var/spack/mock_packages/libelf/package.py delete mode 100644 var/spack/mock_packages/mpich/package.py delete mode 100644 var/spack/mock_packages/mpich2/package.py delete mode 100644 var/spack/mock_packages/mpileaks/package.py delete mode 100644 var/spack/mock_packages/multimethod/package.py delete mode 100644 var/spack/mock_packages/optional-dep-test-2/package.py delete mode 100644 var/spack/mock_packages/optional-dep-test-3/package.py delete mode 100644 var/spack/mock_packages/optional-dep-test/package.py delete mode 100644 var/spack/mock_packages/svn-test/package.py delete mode 100644 var/spack/mock_packages/trivial_install_test_package/package.py delete mode 100644 var/spack/mock_packages/zmpi/package.py delete mode 100644 var/spack/packages/ImageMagick/package.py delete mode 100644 var/spack/packages/Mitos/package.py delete mode 100644 var/spack/packages/R/package.py delete mode 100644 var/spack/packages/SAMRAI/no-tool-build.patch delete mode 100644 var/spack/packages/SAMRAI/package.py delete mode 100644 var/spack/packages/_repo.yaml delete mode 100644 var/spack/packages/activeharmony/package.py delete mode 100644 var/spack/packages/adept-utils/package.py delete mode 100644 var/spack/packages/apex/package.py delete mode 100644 var/spack/packages/arpack/package.py delete mode 100644 var/spack/packages/asciidoc/package.py delete mode 100644 var/spack/packages/atk/package.py delete mode 100644 var/spack/packages/atlas/package.py delete mode 100644 var/spack/packages/autoconf/package.py delete mode 100644 var/spack/packages/automaded/package.py delete mode 100644 var/spack/packages/automake/package.py delete mode 100644 var/spack/packages/bear/package.py delete mode 100644 var/spack/packages/bib2xhtml/package.py delete mode 100644 var/spack/packages/binutils/package.py delete mode 100644 var/spack/packages/bison/package.py delete mode 100644 var/spack/packages/boost/package.py delete mode 100644 var/spack/packages/bowtie2/bowtie2-2.5.patch delete mode 100644 var/spack/packages/bowtie2/package.py delete mode 100644 var/spack/packages/boxlib/package.py delete mode 100644 var/spack/packages/bzip2/package.py delete mode 100644 var/spack/packages/cairo/package.py delete mode 100644 var/spack/packages/callpath/package.py delete mode 100644 var/spack/packages/cblas/package.py delete mode 100644 var/spack/packages/cgm/package.py delete mode 100644 var/spack/packages/clang/package.py delete mode 100644 var/spack/packages/cloog/package.py delete mode 100644 var/spack/packages/cmake/package.py delete mode 100644 var/spack/packages/coreutils/package.py delete mode 100644 var/spack/packages/cppcheck/package.py delete mode 100644 var/spack/packages/cram/package.py delete mode 100644 var/spack/packages/cscope/package.py delete mode 100644 var/spack/packages/cube/package.py delete mode 100644 var/spack/packages/czmq/package.py delete mode 100644 var/spack/packages/dbus/package.py delete mode 100644 var/spack/packages/docbook-xml/package.py delete mode 100644 var/spack/packages/doxygen/package.py delete mode 100644 var/spack/packages/dri2proto/package.py delete mode 100644 var/spack/packages/dtcmp/package.py delete mode 100644 var/spack/packages/dyninst/package.py delete mode 100644 var/spack/packages/elfutils/package.py delete mode 100644 var/spack/packages/extrae/package.py delete mode 100644 var/spack/packages/exuberant-ctags/package.py delete mode 100644 var/spack/packages/fish/package.py delete mode 100644 var/spack/packages/flex/package.py delete mode 100644 var/spack/packages/flux/package.py delete mode 100644 var/spack/packages/fontconfig/package.py delete mode 100644 var/spack/packages/freetype/package.py delete mode 100644 var/spack/packages/gasnet/package.py delete mode 100644 var/spack/packages/gcc/package.py delete mode 100644 var/spack/packages/gdk-pixbuf/package.py delete mode 100644 var/spack/packages/geos/package.py delete mode 100644 var/spack/packages/gflags/package.py delete mode 100644 var/spack/packages/ghostscript/package.py delete mode 100644 var/spack/packages/git/package.py delete mode 100644 var/spack/packages/glib/package.py delete mode 100644 var/spack/packages/glm/package.py delete mode 100644 var/spack/packages/global/package.py delete mode 100644 var/spack/packages/glog/package.py delete mode 100644 var/spack/packages/gmp/package.py delete mode 100644 var/spack/packages/gnutls/package.py delete mode 100644 var/spack/packages/gperf/package.py delete mode 100644 var/spack/packages/gperftools/package.py delete mode 100644 var/spack/packages/graphlib/package.py delete mode 100644 var/spack/packages/graphviz/package.py delete mode 100644 var/spack/packages/gtkplus/package.py delete mode 100644 var/spack/packages/harfbuzz/package.py delete mode 100644 var/spack/packages/hdf5/package.py delete mode 100644 var/spack/packages/hwloc/package.py delete mode 100644 var/spack/packages/hypre/package.py delete mode 100644 var/spack/packages/icu/package.py delete mode 100644 var/spack/packages/icu4c/package.py delete mode 100644 var/spack/packages/isl/package.py delete mode 100644 var/spack/packages/jdk/package.py delete mode 100644 var/spack/packages/jpeg/package.py delete mode 100644 var/spack/packages/launchmon/package.py delete mode 100644 var/spack/packages/launchmon/patch.lmon_install_dir delete mode 100644 var/spack/packages/lcms/package.py delete mode 100644 var/spack/packages/leveldb/package.py delete mode 100644 var/spack/packages/libNBC/package.py delete mode 100644 var/spack/packages/libarchive/package.py delete mode 100644 var/spack/packages/libcircle/package.py delete mode 100644 var/spack/packages/libdrm/package.py delete mode 100644 var/spack/packages/libdwarf/package.py delete mode 100644 var/spack/packages/libelf/package.py delete mode 100644 var/spack/packages/libevent/package.py delete mode 100644 var/spack/packages/libffi/package.py delete mode 100644 var/spack/packages/libgcrypt/package.py delete mode 100644 var/spack/packages/libgpg-error/package.py delete mode 100644 var/spack/packages/libjpeg-turbo/package.py delete mode 100644 var/spack/packages/libjson-c/package.py delete mode 100644 var/spack/packages/libmng/package.py delete mode 100644 var/spack/packages/libmonitor/package.py delete mode 100644 var/spack/packages/libpciaccess/package.py delete mode 100644 var/spack/packages/libpng/package.py delete mode 100644 var/spack/packages/libsodium/package.py delete mode 100644 var/spack/packages/libtiff/package.py delete mode 100644 var/spack/packages/libtool/package.py delete mode 100644 var/spack/packages/libunwind/package.py delete mode 100644 var/spack/packages/libuuid/package.py delete mode 100644 var/spack/packages/libxcb/package.py delete mode 100644 var/spack/packages/libxml2/package.py delete mode 100644 var/spack/packages/libxshmfence/package.py delete mode 100644 var/spack/packages/libxslt/package.py delete mode 100644 var/spack/packages/llvm-lld/package.py delete mode 100644 var/spack/packages/llvm/package.py delete mode 100644 var/spack/packages/lmdb/package.py delete mode 100644 var/spack/packages/lua/package.py delete mode 100644 var/spack/packages/lwgrp/package.py delete mode 100644 var/spack/packages/lwm2/package.py delete mode 100644 var/spack/packages/matio/package.py delete mode 100644 var/spack/packages/memaxes/package.py delete mode 100644 var/spack/packages/mesa/package.py delete mode 100644 var/spack/packages/metis/package.py delete mode 100644 var/spack/packages/mpc/package.py delete mode 100644 var/spack/packages/mpe2/mpe2.patch delete mode 100644 var/spack/packages/mpe2/package.py delete mode 100644 var/spack/packages/mpfr/package.py delete mode 100644 var/spack/packages/mpibash/mpibash-4.3.patch delete mode 100644 var/spack/packages/mpibash/package.py delete mode 100644 var/spack/packages/mpich/package.py delete mode 100644 var/spack/packages/mpileaks/package.py delete mode 100644 var/spack/packages/mrnet/package.py delete mode 100644 var/spack/packages/munge/package.py delete mode 100644 var/spack/packages/muster/package.py delete mode 100644 var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch delete mode 100644 var/spack/packages/mvapich2/package.py delete mode 100644 var/spack/packages/nasm/package.py delete mode 100644 var/spack/packages/ncdu/package.py delete mode 100644 var/spack/packages/ncurses/package.py delete mode 100644 var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch delete mode 100644 var/spack/packages/netcdf/package.py delete mode 100644 var/spack/packages/netgauge/package.py delete mode 100644 var/spack/packages/netlib-blas/package.py delete mode 100644 var/spack/packages/netlib-lapack/package.py delete mode 100644 var/spack/packages/nettle/package.py delete mode 100644 var/spack/packages/ompss/package.py delete mode 100644 var/spack/packages/ompt-openmp/package.py delete mode 100644 var/spack/packages/opari2/package.py delete mode 100644 var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch delete mode 100644 var/spack/packages/openmpi/llnl-platforms.patch delete mode 100644 var/spack/packages/openmpi/package.py delete mode 100644 var/spack/packages/openssl/package.py delete mode 100644 var/spack/packages/otf/package.py delete mode 100644 var/spack/packages/otf2/package.py delete mode 100644 var/spack/packages/pango/package.py delete mode 100644 var/spack/packages/papi/package.py delete mode 100644 var/spack/packages/paraver/package.py delete mode 100644 var/spack/packages/paraview/package.py delete mode 100644 var/spack/packages/parmetis/package.py delete mode 100644 var/spack/packages/parpack/package.py delete mode 100644 var/spack/packages/pcre/package.py delete mode 100644 var/spack/packages/petsc/package.py delete mode 100644 var/spack/packages/pidx/package.py delete mode 100644 var/spack/packages/pixman/package.py delete mode 100644 var/spack/packages/pkg-config/package.py delete mode 100644 var/spack/packages/pmgr_collective/package.py delete mode 100644 var/spack/packages/postgresql/package.py delete mode 100644 var/spack/packages/ppl/package.py delete mode 100644 var/spack/packages/protobuf/package.py delete mode 100644 var/spack/packages/py-basemap/package.py delete mode 100644 var/spack/packages/py-biopython/package.py delete mode 100644 var/spack/packages/py-cffi/package.py delete mode 100644 var/spack/packages/py-cython/package.py delete mode 100644 var/spack/packages/py-dateutil/package.py delete mode 100644 var/spack/packages/py-epydoc/package.py delete mode 100644 var/spack/packages/py-genders/package.py delete mode 100644 var/spack/packages/py-gnuplot/package.py delete mode 100644 var/spack/packages/py-h5py/package.py delete mode 100644 var/spack/packages/py-ipython/package.py delete mode 100644 var/spack/packages/py-libxml2/package.py delete mode 100644 var/spack/packages/py-lockfile/package.py delete mode 100644 var/spack/packages/py-mako/package.py delete mode 100644 var/spack/packages/py-matplotlib/package.py delete mode 100644 var/spack/packages/py-mock/package.py delete mode 100644 var/spack/packages/py-mpi4py/package.py delete mode 100644 var/spack/packages/py-mx/package.py delete mode 100644 var/spack/packages/py-nose/package.py delete mode 100644 var/spack/packages/py-numpy/package.py delete mode 100644 var/spack/packages/py-pandas/package.py delete mode 100644 var/spack/packages/py-pexpect/package.py delete mode 100644 var/spack/packages/py-pil/package.py delete mode 100644 var/spack/packages/py-pmw/package.py delete mode 100644 var/spack/packages/py-pychecker/package.py delete mode 100644 var/spack/packages/py-pycparser/package.py delete mode 100644 var/spack/packages/py-pyelftools/package.py delete mode 100644 var/spack/packages/py-pygments/package.py delete mode 100644 var/spack/packages/py-pylint/package.py delete mode 100644 var/spack/packages/py-pypar/package.py delete mode 100644 var/spack/packages/py-pyparsing/package.py delete mode 100644 var/spack/packages/py-pyqt/package.py delete mode 100644 var/spack/packages/py-pyside/package.py delete mode 100644 var/spack/packages/py-python-daemon/package.py delete mode 100644 var/spack/packages/py-pytz/package.py delete mode 100644 var/spack/packages/py-rpy2/package.py delete mode 100644 var/spack/packages/py-scientificpython/package.py delete mode 100644 var/spack/packages/py-scikit-learn/package.py delete mode 100644 var/spack/packages/py-scipy/package.py delete mode 100644 var/spack/packages/py-setuptools/package.py delete mode 100644 var/spack/packages/py-shiboken/package.py delete mode 100644 var/spack/packages/py-sip/package.py delete mode 100644 var/spack/packages/py-six/package.py delete mode 100644 var/spack/packages/py-sphinx/package.py delete mode 100644 var/spack/packages/py-sympy/package.py delete mode 100644 var/spack/packages/py-virtualenv/package.py delete mode 100644 var/spack/packages/py-yapf/package.py delete mode 100644 var/spack/packages/python/package.py delete mode 100644 var/spack/packages/qhull/package.py delete mode 100644 var/spack/packages/qt/package.py delete mode 100644 var/spack/packages/qthreads/package.py delete mode 100644 var/spack/packages/ravel/package.py delete mode 100644 var/spack/packages/readline/package.py delete mode 100644 var/spack/packages/rose/add_spack_compiler_recognition.patch delete mode 100644 var/spack/packages/rose/package.py delete mode 100644 var/spack/packages/ruby/package.py delete mode 100644 var/spack/packages/samtools/package.py delete mode 100644 var/spack/packages/samtools/samtools1.2.patch delete mode 100644 var/spack/packages/scalasca/package.py delete mode 100644 var/spack/packages/scorep/package.py delete mode 100644 var/spack/packages/scotch/package.py delete mode 100644 var/spack/packages/scr/package.py delete mode 100644 var/spack/packages/silo/package.py delete mode 100644 var/spack/packages/snappy/package.py delete mode 100644 var/spack/packages/spindle/package.py delete mode 100644 var/spack/packages/sqlite/package.py delete mode 100644 var/spack/packages/stat/configure_mpicxx.patch delete mode 100644 var/spack/packages/stat/package.py delete mode 100644 var/spack/packages/sundials/package.py delete mode 100644 var/spack/packages/swig/package.py delete mode 100644 var/spack/packages/task/package.py delete mode 100644 var/spack/packages/taskd/package.py delete mode 100644 var/spack/packages/tau/package.py delete mode 100644 var/spack/packages/tcl/package.py delete mode 100644 var/spack/packages/the_silver_searcher/package.py delete mode 100644 var/spack/packages/thrift/package.py delete mode 100644 var/spack/packages/tk/package.py delete mode 100644 var/spack/packages/tmux/package.py delete mode 100644 var/spack/packages/tmuxinator/package.py delete mode 100644 var/spack/packages/trilinos/package.py delete mode 100644 var/spack/packages/uncrustify/package.py delete mode 100644 var/spack/packages/util-linux/package.py delete mode 100644 var/spack/packages/vim/package.py delete mode 100644 var/spack/packages/vtk/package.py delete mode 100644 var/spack/packages/wget/package.py delete mode 100644 var/spack/packages/wx/package.py delete mode 100644 var/spack/packages/wxpropgrid/package.py delete mode 100644 var/spack/packages/xcb-proto/package.py delete mode 100644 var/spack/packages/xz/package.py delete mode 100644 var/spack/packages/yasm/package.py delete mode 100644 var/spack/packages/zeromq/package.py delete mode 100644 var/spack/packages/zlib/package.py delete mode 100644 var/spack/packages/zsh/package.py create mode 100644 var/spack/repos/builtin.mock/packages/a/package.py create mode 100644 var/spack/repos/builtin.mock/packages/b/package.py create mode 100644 var/spack/repos/builtin.mock/packages/c/package.py create mode 100644 var/spack/repos/builtin.mock/packages/callpath/package.py create mode 100644 var/spack/repos/builtin.mock/packages/direct_mpich/package.py create mode 100644 var/spack/repos/builtin.mock/packages/dyninst/package.py create mode 100644 var/spack/repos/builtin.mock/packages/e/package.py create mode 100644 var/spack/repos/builtin.mock/packages/fake/package.py create mode 100644 var/spack/repos/builtin.mock/packages/git-test/package.py create mode 100644 var/spack/repos/builtin.mock/packages/hg-test/package.py create mode 100644 var/spack/repos/builtin.mock/packages/indirect_mpich/package.py create mode 100644 var/spack/repos/builtin.mock/packages/libdwarf/package.py create mode 100644 var/spack/repos/builtin.mock/packages/libelf/package.py create mode 100644 var/spack/repos/builtin.mock/packages/mpich/package.py create mode 100644 var/spack/repos/builtin.mock/packages/mpich2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/mpileaks/package.py create mode 100644 var/spack/repos/builtin.mock/packages/multimethod/package.py create mode 100644 var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py create mode 100644 var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py create mode 100644 var/spack/repos/builtin.mock/packages/optional-dep-test/package.py create mode 100644 var/spack/repos/builtin.mock/packages/svn-test/package.py create mode 100644 var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py create mode 100644 var/spack/repos/builtin.mock/packages/zmpi/package.py create mode 100644 var/spack/repos/builtin.mock/repo.yaml create mode 100644 var/spack/repos/builtin/packages/ImageMagick/package.py create mode 100644 var/spack/repos/builtin/packages/Mitos/package.py create mode 100644 var/spack/repos/builtin/packages/R/package.py create mode 100644 var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch create mode 100644 var/spack/repos/builtin/packages/SAMRAI/package.py create mode 100644 var/spack/repos/builtin/packages/activeharmony/package.py create mode 100644 var/spack/repos/builtin/packages/adept-utils/package.py create mode 100644 var/spack/repos/builtin/packages/apex/package.py create mode 100644 var/spack/repos/builtin/packages/arpack/package.py create mode 100644 var/spack/repos/builtin/packages/asciidoc/package.py create mode 100644 var/spack/repos/builtin/packages/atk/package.py create mode 100644 var/spack/repos/builtin/packages/atlas/package.py create mode 100644 var/spack/repos/builtin/packages/autoconf/package.py create mode 100644 var/spack/repos/builtin/packages/automaded/package.py create mode 100644 var/spack/repos/builtin/packages/automake/package.py create mode 100644 var/spack/repos/builtin/packages/bear/package.py create mode 100644 var/spack/repos/builtin/packages/bib2xhtml/package.py create mode 100644 var/spack/repos/builtin/packages/binutils/package.py create mode 100644 var/spack/repos/builtin/packages/bison/package.py create mode 100644 var/spack/repos/builtin/packages/boost/package.py create mode 100644 var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch create mode 100644 var/spack/repos/builtin/packages/bowtie2/package.py create mode 100644 var/spack/repos/builtin/packages/boxlib/package.py create mode 100644 var/spack/repos/builtin/packages/bzip2/package.py create mode 100644 var/spack/repos/builtin/packages/cairo/package.py create mode 100644 var/spack/repos/builtin/packages/callpath/package.py create mode 100644 var/spack/repos/builtin/packages/cblas/package.py create mode 100644 var/spack/repos/builtin/packages/cgm/package.py create mode 100644 var/spack/repos/builtin/packages/clang/package.py create mode 100644 var/spack/repos/builtin/packages/cloog/package.py create mode 100644 var/spack/repos/builtin/packages/cmake/package.py create mode 100644 var/spack/repos/builtin/packages/coreutils/package.py create mode 100644 var/spack/repos/builtin/packages/cppcheck/package.py create mode 100644 var/spack/repos/builtin/packages/cram/package.py create mode 100644 var/spack/repos/builtin/packages/cscope/package.py create mode 100644 var/spack/repos/builtin/packages/cube/package.py create mode 100644 var/spack/repos/builtin/packages/czmq/package.py create mode 100644 var/spack/repos/builtin/packages/dbus/package.py create mode 100644 var/spack/repos/builtin/packages/docbook-xml/package.py create mode 100644 var/spack/repos/builtin/packages/doxygen/package.py create mode 100644 var/spack/repos/builtin/packages/dri2proto/package.py create mode 100644 var/spack/repos/builtin/packages/dtcmp/package.py create mode 100644 var/spack/repos/builtin/packages/dyninst/package.py create mode 100644 var/spack/repos/builtin/packages/elfutils/package.py create mode 100644 var/spack/repos/builtin/packages/extrae/package.py create mode 100644 var/spack/repos/builtin/packages/exuberant-ctags/package.py create mode 100644 var/spack/repos/builtin/packages/fish/package.py create mode 100644 var/spack/repos/builtin/packages/flex/package.py create mode 100644 var/spack/repos/builtin/packages/flux/package.py create mode 100644 var/spack/repos/builtin/packages/fontconfig/package.py create mode 100644 var/spack/repos/builtin/packages/freetype/package.py create mode 100644 var/spack/repos/builtin/packages/gasnet/package.py create mode 100644 var/spack/repos/builtin/packages/gcc/package.py create mode 100644 var/spack/repos/builtin/packages/gdk-pixbuf/package.py create mode 100644 var/spack/repos/builtin/packages/geos/package.py create mode 100644 var/spack/repos/builtin/packages/gflags/package.py create mode 100644 var/spack/repos/builtin/packages/ghostscript/package.py create mode 100644 var/spack/repos/builtin/packages/git/package.py create mode 100644 var/spack/repos/builtin/packages/glib/package.py create mode 100644 var/spack/repos/builtin/packages/glm/package.py create mode 100644 var/spack/repos/builtin/packages/global/package.py create mode 100644 var/spack/repos/builtin/packages/glog/package.py create mode 100644 var/spack/repos/builtin/packages/gmp/package.py create mode 100644 var/spack/repos/builtin/packages/gnutls/package.py create mode 100644 var/spack/repos/builtin/packages/gperf/package.py create mode 100644 var/spack/repos/builtin/packages/gperftools/package.py create mode 100644 var/spack/repos/builtin/packages/graphlib/package.py create mode 100644 var/spack/repos/builtin/packages/graphviz/package.py create mode 100644 var/spack/repos/builtin/packages/gtkplus/package.py create mode 100644 var/spack/repos/builtin/packages/harfbuzz/package.py create mode 100644 var/spack/repos/builtin/packages/hdf5/package.py create mode 100644 var/spack/repos/builtin/packages/hwloc/package.py create mode 100644 var/spack/repos/builtin/packages/hypre/package.py create mode 100644 var/spack/repos/builtin/packages/icu/package.py create mode 100644 var/spack/repos/builtin/packages/icu4c/package.py create mode 100644 var/spack/repos/builtin/packages/isl/package.py create mode 100644 var/spack/repos/builtin/packages/jdk/package.py create mode 100644 var/spack/repos/builtin/packages/jpeg/package.py create mode 100644 var/spack/repos/builtin/packages/launchmon/package.py create mode 100644 var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir create mode 100644 var/spack/repos/builtin/packages/lcms/package.py create mode 100644 var/spack/repos/builtin/packages/leveldb/package.py create mode 100644 var/spack/repos/builtin/packages/libNBC/package.py create mode 100644 var/spack/repos/builtin/packages/libarchive/package.py create mode 100644 var/spack/repos/builtin/packages/libcircle/package.py create mode 100644 var/spack/repos/builtin/packages/libdrm/package.py create mode 100644 var/spack/repos/builtin/packages/libdwarf/package.py create mode 100644 var/spack/repos/builtin/packages/libelf/package.py create mode 100644 var/spack/repos/builtin/packages/libevent/package.py create mode 100644 var/spack/repos/builtin/packages/libffi/package.py create mode 100644 var/spack/repos/builtin/packages/libgcrypt/package.py create mode 100644 var/spack/repos/builtin/packages/libgpg-error/package.py create mode 100644 var/spack/repos/builtin/packages/libjpeg-turbo/package.py create mode 100644 var/spack/repos/builtin/packages/libjson-c/package.py create mode 100644 var/spack/repos/builtin/packages/libmng/package.py create mode 100644 var/spack/repos/builtin/packages/libmonitor/package.py create mode 100644 var/spack/repos/builtin/packages/libpciaccess/package.py create mode 100644 var/spack/repos/builtin/packages/libpng/package.py create mode 100644 var/spack/repos/builtin/packages/libsodium/package.py create mode 100644 var/spack/repos/builtin/packages/libtiff/package.py create mode 100644 var/spack/repos/builtin/packages/libtool/package.py create mode 100644 var/spack/repos/builtin/packages/libunwind/package.py create mode 100644 var/spack/repos/builtin/packages/libuuid/package.py create mode 100644 var/spack/repos/builtin/packages/libxcb/package.py create mode 100644 var/spack/repos/builtin/packages/libxml2/package.py create mode 100644 var/spack/repos/builtin/packages/libxshmfence/package.py create mode 100644 var/spack/repos/builtin/packages/libxslt/package.py create mode 100644 var/spack/repos/builtin/packages/llvm-lld/package.py create mode 100644 var/spack/repos/builtin/packages/llvm/package.py create mode 100644 var/spack/repos/builtin/packages/lmdb/package.py create mode 100644 var/spack/repos/builtin/packages/lua/package.py create mode 100644 var/spack/repos/builtin/packages/lwgrp/package.py create mode 100644 var/spack/repos/builtin/packages/lwm2/package.py create mode 100644 var/spack/repos/builtin/packages/matio/package.py create mode 100644 var/spack/repos/builtin/packages/memaxes/package.py create mode 100644 var/spack/repos/builtin/packages/mesa/package.py create mode 100644 var/spack/repos/builtin/packages/metis/package.py create mode 100644 var/spack/repos/builtin/packages/mpc/package.py create mode 100644 var/spack/repos/builtin/packages/mpe2/mpe2.patch create mode 100644 var/spack/repos/builtin/packages/mpe2/package.py create mode 100644 var/spack/repos/builtin/packages/mpfr/package.py create mode 100644 var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch create mode 100644 var/spack/repos/builtin/packages/mpibash/package.py create mode 100644 var/spack/repos/builtin/packages/mpich/package.py create mode 100644 var/spack/repos/builtin/packages/mpileaks/package.py create mode 100644 var/spack/repos/builtin/packages/mrnet/package.py create mode 100644 var/spack/repos/builtin/packages/munge/package.py create mode 100644 var/spack/repos/builtin/packages/muster/package.py create mode 100644 var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch create mode 100644 var/spack/repos/builtin/packages/mvapich2/package.py create mode 100644 var/spack/repos/builtin/packages/nasm/package.py create mode 100644 var/spack/repos/builtin/packages/ncdu/package.py create mode 100644 var/spack/repos/builtin/packages/ncurses/package.py create mode 100644 var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch create mode 100644 var/spack/repos/builtin/packages/netcdf/package.py create mode 100644 var/spack/repos/builtin/packages/netgauge/package.py create mode 100644 var/spack/repos/builtin/packages/netlib-blas/package.py create mode 100644 var/spack/repos/builtin/packages/netlib-lapack/package.py create mode 100644 var/spack/repos/builtin/packages/nettle/package.py create mode 100644 var/spack/repos/builtin/packages/ompss/package.py create mode 100644 var/spack/repos/builtin/packages/ompt-openmp/package.py create mode 100644 var/spack/repos/builtin/packages/opari2/package.py create mode 100644 var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch create mode 100644 var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch create mode 100644 var/spack/repos/builtin/packages/openmpi/package.py create mode 100644 var/spack/repos/builtin/packages/openssl/package.py create mode 100644 var/spack/repos/builtin/packages/otf/package.py create mode 100644 var/spack/repos/builtin/packages/otf2/package.py create mode 100644 var/spack/repos/builtin/packages/pango/package.py create mode 100644 var/spack/repos/builtin/packages/papi/package.py create mode 100644 var/spack/repos/builtin/packages/paraver/package.py create mode 100644 var/spack/repos/builtin/packages/paraview/package.py create mode 100644 var/spack/repos/builtin/packages/parmetis/package.py create mode 100644 var/spack/repos/builtin/packages/parpack/package.py create mode 100644 var/spack/repos/builtin/packages/pcre/package.py create mode 100644 var/spack/repos/builtin/packages/petsc/package.py create mode 100644 var/spack/repos/builtin/packages/pidx/package.py create mode 100644 var/spack/repos/builtin/packages/pixman/package.py create mode 100644 var/spack/repos/builtin/packages/pkg-config/package.py create mode 100644 var/spack/repos/builtin/packages/pmgr_collective/package.py create mode 100644 var/spack/repos/builtin/packages/postgresql/package.py create mode 100644 var/spack/repos/builtin/packages/ppl/package.py create mode 100644 var/spack/repos/builtin/packages/protobuf/package.py create mode 100644 var/spack/repos/builtin/packages/py-basemap/package.py create mode 100644 var/spack/repos/builtin/packages/py-biopython/package.py create mode 100644 var/spack/repos/builtin/packages/py-cffi/package.py create mode 100644 var/spack/repos/builtin/packages/py-cython/package.py create mode 100644 var/spack/repos/builtin/packages/py-dateutil/package.py create mode 100644 var/spack/repos/builtin/packages/py-epydoc/package.py create mode 100644 var/spack/repos/builtin/packages/py-genders/package.py create mode 100644 var/spack/repos/builtin/packages/py-gnuplot/package.py create mode 100644 var/spack/repos/builtin/packages/py-h5py/package.py create mode 100644 var/spack/repos/builtin/packages/py-ipython/package.py create mode 100644 var/spack/repos/builtin/packages/py-libxml2/package.py create mode 100644 var/spack/repos/builtin/packages/py-lockfile/package.py create mode 100644 var/spack/repos/builtin/packages/py-mako/package.py create mode 100644 var/spack/repos/builtin/packages/py-matplotlib/package.py create mode 100644 var/spack/repos/builtin/packages/py-mock/package.py create mode 100644 var/spack/repos/builtin/packages/py-mpi4py/package.py create mode 100644 var/spack/repos/builtin/packages/py-mx/package.py create mode 100644 var/spack/repos/builtin/packages/py-nose/package.py create mode 100644 var/spack/repos/builtin/packages/py-numpy/package.py create mode 100644 var/spack/repos/builtin/packages/py-pandas/package.py create mode 100644 var/spack/repos/builtin/packages/py-pexpect/package.py create mode 100644 var/spack/repos/builtin/packages/py-pil/package.py create mode 100644 var/spack/repos/builtin/packages/py-pmw/package.py create mode 100644 var/spack/repos/builtin/packages/py-pychecker/package.py create mode 100644 var/spack/repos/builtin/packages/py-pycparser/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyelftools/package.py create mode 100644 var/spack/repos/builtin/packages/py-pygments/package.py create mode 100644 var/spack/repos/builtin/packages/py-pylint/package.py create mode 100644 var/spack/repos/builtin/packages/py-pypar/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyparsing/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyqt/package.py create mode 100644 var/spack/repos/builtin/packages/py-pyside/package.py create mode 100644 var/spack/repos/builtin/packages/py-python-daemon/package.py create mode 100644 var/spack/repos/builtin/packages/py-pytz/package.py create mode 100644 var/spack/repos/builtin/packages/py-rpy2/package.py create mode 100644 var/spack/repos/builtin/packages/py-scientificpython/package.py create mode 100644 var/spack/repos/builtin/packages/py-scikit-learn/package.py create mode 100644 var/spack/repos/builtin/packages/py-scipy/package.py create mode 100644 var/spack/repos/builtin/packages/py-setuptools/package.py create mode 100644 var/spack/repos/builtin/packages/py-shiboken/package.py create mode 100644 var/spack/repos/builtin/packages/py-sip/package.py create mode 100644 var/spack/repos/builtin/packages/py-six/package.py create mode 100644 var/spack/repos/builtin/packages/py-sphinx/package.py create mode 100644 var/spack/repos/builtin/packages/py-sympy/package.py create mode 100644 var/spack/repos/builtin/packages/py-virtualenv/package.py create mode 100644 var/spack/repos/builtin/packages/py-yapf/package.py create mode 100644 var/spack/repos/builtin/packages/python/package.py create mode 100644 var/spack/repos/builtin/packages/qhull/package.py create mode 100644 var/spack/repos/builtin/packages/qt/package.py create mode 100644 var/spack/repos/builtin/packages/qthreads/package.py create mode 100644 var/spack/repos/builtin/packages/ravel/package.py create mode 100644 var/spack/repos/builtin/packages/readline/package.py create mode 100644 var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch create mode 100644 var/spack/repos/builtin/packages/rose/package.py create mode 100644 var/spack/repos/builtin/packages/ruby/package.py create mode 100644 var/spack/repos/builtin/packages/samtools/package.py create mode 100644 var/spack/repos/builtin/packages/samtools/samtools1.2.patch create mode 100644 var/spack/repos/builtin/packages/scalasca/package.py create mode 100644 var/spack/repos/builtin/packages/scorep/package.py create mode 100644 var/spack/repos/builtin/packages/scotch/package.py create mode 100644 var/spack/repos/builtin/packages/scr/package.py create mode 100644 var/spack/repos/builtin/packages/silo/package.py create mode 100644 var/spack/repos/builtin/packages/snappy/package.py create mode 100644 var/spack/repos/builtin/packages/spindle/package.py create mode 100644 var/spack/repos/builtin/packages/sqlite/package.py create mode 100644 var/spack/repos/builtin/packages/stat/configure_mpicxx.patch create mode 100644 var/spack/repos/builtin/packages/stat/package.py create mode 100644 var/spack/repos/builtin/packages/sundials/package.py create mode 100644 var/spack/repos/builtin/packages/swig/package.py create mode 100644 var/spack/repos/builtin/packages/task/package.py create mode 100644 var/spack/repos/builtin/packages/taskd/package.py create mode 100644 var/spack/repos/builtin/packages/tau/package.py create mode 100644 var/spack/repos/builtin/packages/tcl/package.py create mode 100644 var/spack/repos/builtin/packages/the_silver_searcher/package.py create mode 100644 var/spack/repos/builtin/packages/thrift/package.py create mode 100644 var/spack/repos/builtin/packages/tk/package.py create mode 100644 var/spack/repos/builtin/packages/tmux/package.py create mode 100644 var/spack/repos/builtin/packages/tmuxinator/package.py create mode 100644 var/spack/repos/builtin/packages/trilinos/package.py create mode 100644 var/spack/repos/builtin/packages/uncrustify/package.py create mode 100644 var/spack/repos/builtin/packages/util-linux/package.py create mode 100644 var/spack/repos/builtin/packages/vim/package.py create mode 100644 var/spack/repos/builtin/packages/vtk/package.py create mode 100644 var/spack/repos/builtin/packages/wget/package.py create mode 100644 var/spack/repos/builtin/packages/wx/package.py create mode 100644 var/spack/repos/builtin/packages/wxpropgrid/package.py create mode 100644 var/spack/repos/builtin/packages/xcb-proto/package.py create mode 100644 var/spack/repos/builtin/packages/xz/package.py create mode 100644 var/spack/repos/builtin/packages/yasm/package.py create mode 100644 var/spack/repos/builtin/packages/zeromq/package.py create mode 100644 var/spack/repos/builtin/packages/zlib/package.py create mode 100644 var/spack/repos/builtin/packages/zsh/package.py create mode 100644 var/spack/repos/builtin/repo.yaml diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 4f481ce937..aab20cb260 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -43,7 +43,7 @@ test_path = join_path(module_path, "test") hooks_path = join_path(module_path, "hooks") var_path = join_path(spack_root, "var", "spack") stage_path = join_path(var_path, "stage") -packages_path = join_path(var_path, "packages") +repos_path = join_path(var_path, "repos") share_path = join_path(spack_root, "share", "spack") prefix = spack_root @@ -58,8 +58,12 @@ import spack.repository _repo_paths = spack.config.get_repos_config() if not _repo_paths: tty.die("Spack configuration contains no package repositories.") -repo = spack.repository.RepoPath(*_repo_paths) -sys.meta_path.append(repo) + +try: + repo = spack.repository.RepoPath(*_repo_paths) + sys.meta_path.append(repo) +except spack.repository.BadRepoError, e: + tty.die('Bad repository. %s' % e.message) # # Set up the installed packages database @@ -68,9 +72,10 @@ from spack.database import Database installed_db = Database(install_path) # -# Paths to mock files for testing. +# Paths to built-in Spack repositories. # -mock_packages_path = join_path(var_path, "mock_packages") +packages_path = join_path(repos_path, "builtin") +mock_packages_path = join_path(repos_path, "builtin.mock") mock_config_path = join_path(var_path, "mock_configs") mock_site_config = join_path(mock_config_path, "site_spackconfig") diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 85cc83730c..395aa90bed 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -32,7 +32,7 @@ from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path -from spack.repository import repo_config_filename +from spack.repository import repo_config_name import os import exceptions diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index c1545b3654..a2c0bbe147 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -26,28 +26,32 @@ import os import exceptions import sys import inspect -import glob import imp import re -import itertools import traceback from bisect import bisect_left from external import yaml import llnl.util.tty as tty from llnl.util.filesystem import join_path -from llnl.util.lang import * import spack.error import spack.spec from spack.virtual import ProviderIndex from spack.util.naming import * -# Filename for package repo names -repo_config_filename = '_repo.yaml' +# +# Super-namespace for all packages. +# Package modules are imported as spack.pkg... +# +repo_namespace = 'spack.pkg' -# Filename for packages in repos. -package_file_name = 'package.py' +# +# These names describe how repos should be laid out in the filesystem. +# +repo_config_name = 'repo.yaml' # Top-level filename for repo config. +packages_dir_name = 'packages' # Top-level repo directory containing pkgs. +package_file_name = 'package.py' # Filename for packages in a repository. def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -74,7 +78,10 @@ class RepoPath(object): combined results of the Repos in its list instead of on a single package repository. """ - def __init__(self, *repo_dirs): + def __init__(self, *repo_dirs, **kwargs): + # super-namespace for all packages in the RepoPath + self.super_namespace = kwargs.get('namespace', repo_namespace) + self.repos = [] self.by_namespace = NamespaceTrie() self.by_path = {} @@ -82,11 +89,9 @@ class RepoPath(object): self._all_package_names = [] self._provider_index = None + # Add each repo to this path. for root in repo_dirs: - # Try to make it a repo if it's not one. - if not isinstance(root, Repo): - repo = Repo(root) - # Add the repo to the path. + repo = Repo(root, self.super_namespace) self.put_last(repo) @@ -120,11 +125,11 @@ class RepoPath(object): repo, self.by_path[repo.root]) if repo.namespace in self.by_namespace: - raise DuplicateRepoError("Package repos cannot have the same name", + raise DuplicateRepoError("Package repos cannot provide the same namespace", repo, self.by_namespace[repo.namespace]) # Add repo to the pkg indexes - self.by_namespace[repo.namespace] = repo + self.by_namespace[repo.full_namespace] = repo self.by_path[repo.root] = repo # add names to the cached name list @@ -185,10 +190,10 @@ class RepoPath(object): # If it's a module in some repo, or if it is the repo's # namespace, let the repo handle it. for repo in self.repos: - if namespace == repo.namespace: + if namespace == repo.full_namespace: if repo.real_name(module_name): return repo - elif fullname == repo.namespace: + elif fullname == repo.full_namespace: return repo # No repo provides the namespace, but it is a valid prefix of @@ -200,13 +205,14 @@ class RepoPath(object): def load_module(self, fullname): - """Loads containing namespaces when necessary. + """Handles loading container namespaces when necessary. See ``Repo`` for how actual package modules are loaded. """ if fullname in sys.modules: return sys.modules[fullname] + # partition fullname into prefix and module name. namespace, dot, module_name = fullname.rpartition('.') @@ -252,41 +258,67 @@ class Repo(object): """Class representing a package repository in the filesystem. Each package repository must have a top-level configuration file - called `_repo.yaml`. + called `repo.yaml`. - Currently, `_repo.yaml` this must define: + Currently, `repo.yaml` this must define: `namespace`: A Python namespace where the repository's packages should live. """ - def __init__(self, root): - """Instantiate a package repository from a filesystem path.""" + def __init__(self, root, namespace=repo_namespace): + """Instantiate a package repository from a filesystem path. + + Arguments: + root The root directory of the repository. + + namespace A super-namespace that will contain the repo-defined + namespace (this is generally jsut `spack.pkg`). The + super-namespace is Spack's way of separating repositories + from other python namespaces. + + """ # Root directory, containing _repo.yaml and package dirs self.root = root - # Config file in /_repo.yaml - self.config_file = os.path.join(self.root, repo_config_filename) + # super-namespace for all packages in the Repo + self.super_namespace = namespace - # Read configuration from _repo.yaml + # check and raise BadRepoError on fail. + def check(condition, msg): + if not condition: raise BadRepoError(msg) + + # Validate repository layout. + self.config_file = join_path(self.root, repo_config_name) + check(os.path.isfile(self.config_file), + "No %s found in '%s'" % (repo_config_name, root)) + self.packages_path = join_path(self.root, packages_dir_name) + check(os.path.isdir(self.packages_path), + "No directory '%s' found in '%s'" % (repo_config_name, root)) + + # Read configuration and validate namespace config = self._read_config() - if not 'namespace' in config: - tty.die('Package repo in %s must define a namespace in %s.' - % (self.root, repo_config_filename)) + check('namespace' in config, '%s must define a namespace.' + % join_path(self.root, repo_config_name)) - # Check namespace in the repository configuration. self.namespace = config['namespace'] - if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace): - tty.die(("Invalid namespace '%s' in '%s'. Namespaces must be " - "valid python identifiers separated by '.'") - % (self.namespace, self.root)) - self._names = self.namespace.split('.') + check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace), + ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) + + "Namespaces must be valid python identifiers separated by '.'") + + # Set up 'full_namespace' to include the super-namespace + if self.super_namespace: + self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace) + else: + self.full_namespace = self.namespace + + # Keep name components around for checking prefixes. + self._names = self.full_namespace.split('.') # These are internal cache variables. self._modules = {} self._classes = {} self._instances = {} - self._provider_index = None self._all_package_names = None @@ -301,11 +333,27 @@ class Repo(object): we don't get runtime warnings from Python's module system. """ + parent = None for l in range(1, len(self._names)+1): ns = '.'.join(self._names[:l]) if not ns in sys.modules: - sys.modules[ns] = _make_namespace_module(ns) - sys.modules[ns].__loader__ = self + module = _make_namespace_module(ns) + module.__loader__ = self + sys.modules[ns] = module + + # Ensure the namespace is an atrribute of its parent, + # if it has not been set by something else already. + # + # This ensures that we can do things like: + # import spack.pkg.builtin.mpich as mpich + if parent: + modname = self._names[l-1] + if not hasattr(parent, modname): + setattr(parent, modname, module) + else: + # no need to set up a module, but keep track of the parent. + module = sys.modules[ns] + parent = module def real_name(self, import_name): @@ -349,7 +397,7 @@ class Repo(object): return self namespace, dot, module_name = fullname.rpartition('.') - if namespace == self.namespace: + if namespace == self.full_namespace: if self.real_name(module_name): return self @@ -369,14 +417,14 @@ class Repo(object): if self.is_prefix(fullname): module = _make_namespace_module(fullname) - elif namespace == self.namespace: + elif namespace == self.full_namespace: real_name = self.real_name(module_name) if not real_name: - raise ImportError("No module %s in repo %s" % (module_name, namespace)) + raise ImportError("No module %s in %s" % (module_name, self)) module = self._get_pkg_module(real_name) else: - raise ImportError("No module %s in repo %s" % (fullname, self.namespace)) + raise ImportError("No module %s in %s" % (fullname, self)) module.__loader__ = self sys.modules[fullname] = module @@ -392,7 +440,7 @@ class Repo(object): if (not yaml_data or 'repo' not in yaml_data or not isinstance(yaml_data['repo'], dict)): tty.die("Invalid %s in repository %s" - % (repo_config_filename, self.root)) + % (repo_config_name, self.root)) return yaml_data['repo'] @@ -446,7 +494,7 @@ class Repo(object): def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return join_path(self.root, pkg_name) + return join_path(self.packages_path, pkg_name) def filename_for_package_name(self, pkg_name): @@ -460,7 +508,6 @@ class Repo(object): """ validate_module_name(pkg_name) pkg_dir = self.dirname_for_package_name(pkg_name) - return join_path(pkg_dir, package_file_name) @@ -469,12 +516,25 @@ class Repo(object): if self._all_package_names is None: self._all_package_names = [] - for pkg_name in os.listdir(self.root): - pkg_dir = join_path(self.root, pkg_name) - pkg_file = join_path(pkg_dir, package_file_name) - if os.path.isfile(pkg_file): - self._all_package_names.append(pkg_name) - + for pkg_name in os.listdir(self.packages_path): + # Skip non-directories in the package root. + pkg_dir = join_path(self.packages_path, pkg_name) + if not os.path.isdir(pkg_dir): + continue + + # Skip directories without a package.py in them. + pkg_file = join_path(self.packages_path, pkg_name, package_file_name) + if not os.path.isfile(pkg_file): + continue + + # Warn about invalid names that look like packages. + if not valid_module_name(pkg_name): + tty.warn("Skipping package at %s. '%s' is not a valid Spack module name." + % (pkg_dir, pkg_name)) + continue + + # All checks passed. Add it to the list. + self._all_package_names.append(pkg_name) self._all_package_names.sort() return self._all_package_names @@ -489,7 +549,8 @@ class Repo(object): """Whether a package with the supplied name exists.""" # This does a binary search in the sorted list. idx = bisect_left(self.all_package_names(), pkg_name) - return self._all_package_names[idx] == pkg_name + return (idx < len(self._all_package_names) and + self._all_package_names[idx] == pkg_name) def _get_pkg_module(self, pkg_name): @@ -505,7 +566,7 @@ class Repo(object): file_path = self.filename_for_package_name(pkg_name) if not os.path.exists(file_path): - raise UnknownPackageError(pkg_name, self.namespace) + raise UnknownPackageError(pkg_name, self) if not os.path.isfile(file_path): tty.die("Something's wrong. '%s' is not a file!" % file_path) @@ -513,10 +574,11 @@ class Repo(object): if not os.access(file_path, os.R_OK): tty.die("Cannot read '%s'!" % file_path) - fullname = "%s.%s" % (self.namespace, pkg_name) + # e.g., spack.pkg.builtin.mpich + fullname = "%s.%s" % (self.full_namespace, pkg_name) module = imp.load_source(fullname, file_path) - module.__package__ = self.namespace + module.__package__ = self.full_namespace module.__loader__ = self self._modules[pkg_name] = module @@ -541,7 +603,7 @@ class Repo(object): def __str__(self): - return "" % (self.namespace, self.root) + return "[Repo '%s' at '%s']" % (self.namespace, self.root) def __repr__(self): @@ -597,12 +659,18 @@ class Repo(object): yield spec +class BadRepoError(spack.error.SpackError): + """Raised when repo layout is invalid.""" + def __init__(self, msg): + super(BadRepoError, self).__init__(msg) + + class UnknownPackageError(spack.error.SpackError): """Raised when we encounter a package spack doesn't have.""" def __init__(self, name, repo=None): msg = None if repo: - msg = "Package %s not found in packagerepo %s." % (name, repo) + msg = "Package %s not found in repository %s." % (name, repo) else: msg = "Package %s not found." % name super(UnknownPackageError, self).__init__(msg) diff --git a/var/spack/mock_packages/_repo.yaml b/var/spack/mock_packages/_repo.yaml deleted file mode 100644 index b97b978de3..0000000000 --- a/var/spack/mock_packages/_repo.yaml +++ /dev/null @@ -1,2 +0,0 @@ -repo: - namespace: gov.llnl.spack.mock diff --git a/var/spack/mock_packages/a/package.py b/var/spack/mock_packages/a/package.py deleted file mode 100644 index fa63c08df0..0000000000 --- a/var/spack/mock_packages/a/package.py +++ /dev/null @@ -1,12 +0,0 @@ -from spack import * - -class A(Package): - """Simple package with no dependencies""" - - homepage = "http://www.example.com" - url = "http://www.example.com/a-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/b/package.py b/var/spack/mock_packages/b/package.py deleted file mode 100644 index cb88aa2157..0000000000 --- a/var/spack/mock_packages/b/package.py +++ /dev/null @@ -1,12 +0,0 @@ -from spack import * - -class B(Package): - """Simple package with no dependencies""" - - homepage = "http://www.example.com" - url = "http://www.example.com/b-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/c/package.py b/var/spack/mock_packages/c/package.py deleted file mode 100644 index f51b913fa9..0000000000 --- a/var/spack/mock_packages/c/package.py +++ /dev/null @@ -1,12 +0,0 @@ -from spack import * - -class C(Package): - """Simple package with no dependencies""" - - homepage = "http://www.example.com" - url = "http://www.example.com/c-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/callpath/package.py b/var/spack/mock_packages/callpath/package.py deleted file mode 100644 index 5b6b70ba2a..0000000000 --- a/var/spack/mock_packages/callpath/package.py +++ /dev/null @@ -1,41 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Callpath(Package): - homepage = "https://github.com/tgamblin/callpath" - url = "http://github.com/tgamblin/callpath-1.0.tar.gz" - - version(0.8, 'foobarbaz') - version(0.9, 'foobarbaz') - version(1.0, 'foobarbaz') - - depends_on("dyninst") - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/mock_packages/direct_mpich/package.py b/var/spack/mock_packages/direct_mpich/package.py deleted file mode 100644 index 2ced82521b..0000000000 --- a/var/spack/mock_packages/direct_mpich/package.py +++ /dev/null @@ -1,36 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class DirectMpich(Package): - homepage = "http://www.example.com" - url = "http://www.example.com/direct_mpich-1.0.tar.gz" - - version('1.0', 'foobarbaz') - - depends_on('mpich') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/dyninst/package.py b/var/spack/mock_packages/dyninst/package.py deleted file mode 100644 index 7998578da1..0000000000 --- a/var/spack/mock_packages/dyninst/package.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Dyninst(Package): - homepage = "https://paradyn.org" - url = "http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz" - - version('8.2', 'cxyzab', - url='http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') - version('8.1.2', 'bcxyza', - url='http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz') - version('8.1.1', 'abcxyz', - url='http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') - - depends_on("libelf") - depends_on("libdwarf") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/mock_packages/e/package.py b/var/spack/mock_packages/e/package.py deleted file mode 100644 index 76c6b64c7f..0000000000 --- a/var/spack/mock_packages/e/package.py +++ /dev/null @@ -1,12 +0,0 @@ -from spack import * - -class E(Package): - """Simple package with no dependencies""" - - homepage = "http://www.example.com" - url = "http://www.example.com/e-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/fake/package.py b/var/spack/mock_packages/fake/package.py deleted file mode 100644 index fb3c2bdd2e..0000000000 --- a/var/spack/mock_packages/fake/package.py +++ /dev/null @@ -1,34 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Fake(Package): - homepage = "http://www.fake-spack-example.org" - url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz" - - version('1.0', 'foobarbaz') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/git-test/package.py b/var/spack/mock_packages/git-test/package.py deleted file mode 100644 index 689185463c..0000000000 --- a/var/spack/mock_packages/git-test/package.py +++ /dev/null @@ -1,10 +0,0 @@ -from spack import * - -class GitTest(Package): - """Mock package that uses git for fetching.""" - homepage = "http://www.git-fetch-example.com" - - version('git', git='to-be-filled-in-by-test') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/hg-test/package.py b/var/spack/mock_packages/hg-test/package.py deleted file mode 100644 index 462f1e4c3a..0000000000 --- a/var/spack/mock_packages/hg-test/package.py +++ /dev/null @@ -1,10 +0,0 @@ -from spack import * - -class HgTest(Package): - """Test package that does fetching with mercurial.""" - homepage = "http://www.hg-fetch-example.com" - - version('hg', hg='to-be-filled-in-by-test') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/indirect_mpich/package.py b/var/spack/mock_packages/indirect_mpich/package.py deleted file mode 100644 index daf8b4b166..0000000000 --- a/var/spack/mock_packages/indirect_mpich/package.py +++ /dev/null @@ -1,41 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class IndirectMpich(Package): - """Test case for a package that depends on MPI and one of its - dependencies requires a *particular version* of MPI. - """ - - homepage = "http://www.example.com" - url = "http://www.example.com/indirect_mpich-1.0.tar.gz" - - version(1.0, 'foobarbaz') - - depends_on('mpi') - depends_on('direct_mpich') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/libdwarf/package.py b/var/spack/mock_packages/libdwarf/package.py deleted file mode 100644 index 0b8df04cfb..0000000000 --- a/var/spack/mock_packages/libdwarf/package.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * -import os - -# Only build certain parts of dwarf because the other ones break. -dwarf_dirs = ['libdwarf', 'dwarfdump2'] - -class Libdwarf(Package): - homepage = "http://www.prevanders.net/dwarf.html" - url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" - list_url = homepage - - version(20130729, "64b42692e947d5180e162e46c689dfbf") - version(20130207, 'foobarbaz') - version(20111030, 'foobarbaz') - version(20070703, 'foobarbaz') - - depends_on("libelf") - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/libelf/package.py b/var/spack/mock_packages/libelf/package.py deleted file mode 100644 index 94c8f942cd..0000000000 --- a/var/spack/mock_packages/libelf/package.py +++ /dev/null @@ -1,43 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Libelf(Package): - homepage = "http://www.mr511.de/software/english.html" - url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" - - version('0.8.13', '4136d7b4c04df68b686570afa26988ac') - version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') - version('0.8.10', '9db4d36c283d9790d8fa7df1f4d7b4d9') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--enable-shared", - "--disable-dependency-tracking", - "--disable-debug") - make() - - # The mkdir commands in libelf's intsall can fail in parallel - make("install", parallel=False) diff --git a/var/spack/mock_packages/mpich/package.py b/var/spack/mock_packages/mpich/package.py deleted file mode 100644 index f77d3efc5d..0000000000 --- a/var/spack/mock_packages/mpich/package.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpich(Package): - homepage = "http://www.mpich.org" - url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" - list_url = "http://www.mpich.org/static/downloads/" - list_depth = 2 - - variant('debug', default=False, - description="Compile MPICH with debug flags.") - - version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') - version('3.0.3', 'foobarbaz') - version('3.0.2', 'foobarbaz') - version('3.0.1', 'foobarbaz') - version('3.0', 'foobarbaz') - - provides('mpi@:3', when='@3:') - provides('mpi@:1', when='@:1') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/mpich2/package.py b/var/spack/mock_packages/mpich2/package.py deleted file mode 100644 index 827b94c8a4..0000000000 --- a/var/spack/mock_packages/mpich2/package.py +++ /dev/null @@ -1,47 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpich2(Package): - homepage = "http://www.mpich.org" - url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz" - list_url = "http://www.mpich.org/static/downloads/" - list_depth = 2 - - version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') - version('1.4', 'foobarbaz') - version('1.3', 'foobarbaz') - version('1.2', 'foobarbaz') - version('1.1', 'foobarbaz') - version('1.0', 'foobarbaz') - - provides('mpi@:2.0') - provides('mpi@:2.1', when='@1.1:') - provides('mpi@:2.2', when='@1.2:') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/mock_packages/mpileaks/package.py b/var/spack/mock_packages/mpileaks/package.py deleted file mode 100644 index 3989f1b452..0000000000 --- a/var/spack/mock_packages/mpileaks/package.py +++ /dev/null @@ -1,43 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpileaks(Package): - homepage = "http://www.llnl.gov" - url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" - - version(1.0, 'foobarbaz') - version(2.1, 'foobarbaz') - version(2.2, 'foobarbaz') - version(2.3, 'foobarbaz') - - variant('debug', default=False, description='Debug variant') - variant('opt', default=False, description='Optimized variant') - - depends_on("mpi") - depends_on("callpath") - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/multimethod/package.py b/var/spack/mock_packages/multimethod/package.py deleted file mode 100644 index 75b1606ffc..0000000000 --- a/var/spack/mock_packages/multimethod/package.py +++ /dev/null @@ -1,143 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - - -class Multimethod(Package): - """This package is designed for use with Spack's multimethod test. - It has a bunch of test cases for the @when decorator that the - test uses. - """ - - homepage = 'http://www.example.com/' - url = 'http://www.example.com/example-1.0.tar.gz' - - # - # These functions are only valid for versions 1, 2, and 3. - # - @when('@1.0') - def no_version_2(self): - return 1 - - @when('@3.0') - def no_version_2(self): - return 3 - - @when('@4.0') - def no_version_2(self): - return 4 - - - # - # These functions overlap, so there is ambiguity, but we'll take - # the first one. - # - @when('@:4') - def version_overlap(self): - return 1 - - @when('@2:') - def version_overlap(self): - return 2 - - - # - # More complicated case with cascading versions. - # - def mpi_version(self): - return 0 - - @when('^mpi@3:') - def mpi_version(self): - return 3 - - @when('^mpi@2:') - def mpi_version(self): - return 2 - - @when('^mpi@1:') - def mpi_version(self): - return 1 - - - # - # Use these to test whether the default method is called when no - # match is found. This also tests whether we can switch methods - # on compilers - # - def has_a_default(self): - return 'default' - - @when('%gcc') - def has_a_default(self): - return 'gcc' - - @when('%intel') - def has_a_default(self): - return 'intel' - - - - # - # Make sure we can switch methods on different architectures - # - @when('=x86_64') - def different_by_architecture(self): - return 'x86_64' - - @when('=ppc64') - def different_by_architecture(self): - return 'ppc64' - - @when('=ppc32') - def different_by_architecture(self): - return 'ppc32' - - @when('=arm64') - def different_by_architecture(self): - return 'arm64' - - - # - # Make sure we can switch methods on different dependencies - # - @when('^mpich') - def different_by_dep(self): - return 'mpich' - - @when('^zmpi') - def different_by_dep(self): - return 'zmpi' - - - # - # Make sure we can switch on virtual dependencies - # - def different_by_virtual_dep(self): - return 1 - - @when('^mpi@2:') - def different_by_virtual_dep(self): - return 2 diff --git a/var/spack/mock_packages/optional-dep-test-2/package.py b/var/spack/mock_packages/optional-dep-test-2/package.py deleted file mode 100644 index ef0587588e..0000000000 --- a/var/spack/mock_packages/optional-dep-test-2/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class OptionalDepTest2(Package): - """Depends on the optional-dep-test package""" - - homepage = "http://www.example.com" - url = "http://www.example.com/optional-dep-test-2-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - variant('odt', default=False) - variant('mpi', default=False) - - depends_on('optional-dep-test', when='+odt') - depends_on('optional-dep-test+mpi', when='+mpi') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/optional-dep-test-3/package.py b/var/spack/mock_packages/optional-dep-test-3/package.py deleted file mode 100644 index e6cb3bd6e7..0000000000 --- a/var/spack/mock_packages/optional-dep-test-3/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class OptionalDepTest3(Package): - """Depends on the optional-dep-test package""" - - homepage = "http://www.example.com" - url = "http://www.example.com/optional-dep-test-3-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - - variant('var', default=False) - - depends_on('a', when='~var') - depends_on('b', when='+var') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/optional-dep-test/package.py b/var/spack/mock_packages/optional-dep-test/package.py deleted file mode 100644 index bb57576ca9..0000000000 --- a/var/spack/mock_packages/optional-dep-test/package.py +++ /dev/null @@ -1,29 +0,0 @@ -from spack import * - -class OptionalDepTest(Package): - """Description""" - - homepage = "http://www.example.com" - url = "http://www.example.com/optional_dep_test-1.0.tar.gz" - - version('1.0', '0123456789abcdef0123456789abcdef') - version('1.1', '0123456789abcdef0123456789abcdef') - - variant('a', default=False) - variant('f', default=False) - variant('mpi', default=False) - - depends_on('a', when='+a') - depends_on('b', when='@1.1') - depends_on('c', when='%intel') - depends_on('d', when='%intel@64.1') - depends_on('e', when='%clang@34:40') - - depends_on('f', when='+f') - depends_on('g', when='^f') - depends_on('mpi', when='^g') - - depends_on('mpi', when='+mpi') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/svn-test/package.py b/var/spack/mock_packages/svn-test/package.py deleted file mode 100644 index ba4d5522b4..0000000000 --- a/var/spack/mock_packages/svn-test/package.py +++ /dev/null @@ -1,10 +0,0 @@ -from spack import * - -class SvnTest(Package): - """Mock package that uses svn for fetching.""" - url = "http://www.example.com/svn-test-1.0.tar.gz" - - version('svn', 'to-be-filled-in-by-test') - - def install(self, spec, prefix): - pass diff --git a/var/spack/mock_packages/trivial_install_test_package/package.py b/var/spack/mock_packages/trivial_install_test_package/package.py deleted file mode 100644 index c4db9f5f07..0000000000 --- a/var/spack/mock_packages/trivial_install_test_package/package.py +++ /dev/null @@ -1,38 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class TrivialInstallTestPackage(Package): - """This package is a stub with a trivial install method. It allows us - to test the install and uninstall logic of spack.""" - homepage = "http://www.example.com/trivial_install" - url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz" - - version('1.0', 'foobarbaz') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - make() - make('install') diff --git a/var/spack/mock_packages/zmpi/package.py b/var/spack/mock_packages/zmpi/package.py deleted file mode 100644 index 8c6ceda6d3..0000000000 --- a/var/spack/mock_packages/zmpi/package.py +++ /dev/null @@ -1,39 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Zmpi(Package): - """This is a fake MPI package used to demonstrate virtual package providers - with dependencies.""" - homepage = "http://www.spack-fake-zmpi.org" - url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz" - - version('1.0', 'foobarbaz') - - provides('mpi@:10.0') - depends_on('fake') - - def install(self, spec, prefix): - pass diff --git a/var/spack/packages/ImageMagick/package.py b/var/spack/packages/ImageMagick/package.py deleted file mode 100644 index 753ea80ca6..0000000000 --- a/var/spack/packages/ImageMagick/package.py +++ /dev/null @@ -1,37 +0,0 @@ -from spack import * - -class Imagemagick(Package): - """ImageMagick is a image processing library""" - homepage = "http://www.imagemagic.org" - - #------------------------------------------------------------------------- - # ImageMagick does not keep around anything but *-10 versions, so - # this URL may change. If you want the bleeding edge, you can - # uncomment it and see if it works but you may need to try to - # fetch a newer version (-6, -7, -8, -9, etc.) or you can stick - # wtih the older, stable, archived -10 versions below. - # - # TODO: would be nice if spack had a way to recommend avoiding a - # TODO: bleeding edge version, but not comment it out. - # ------------------------------------------------------------------------- - # version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b', - # url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2") - - #------------------------------------------------------------------------- - # *-10 versions are archived, so these versions should fetch reliably. - # ------------------------------------------------------------------------- - version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c', - url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download") - - depends_on('libtool') - depends_on('jpeg') - depends_on('libpng') - depends_on('freetype') - depends_on('fontconfig') - depends_on('libtiff') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/Mitos/package.py b/var/spack/packages/Mitos/package.py deleted file mode 100644 index e312da3ffc..0000000000 --- a/var/spack/packages/Mitos/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Mitos(Package): - """Mitos is a library and a tool for collecting sampled memory - performance data to view with MemAxes""" - - homepage = "https://github.com/scalability-llnl/Mitos" - url = "https://github.com/scalability-llnl/Mitos" - - version('0.9.1', 'c6cb57f3cae54f5157affd97ef7ef79e', git='https://github.com/scalability-llnl/Mitos.git', tag='v0.9.1') - - depends_on('dyninst@8.2.1:') - depends_on('hwloc') - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/R/package.py b/var/spack/packages/R/package.py deleted file mode 100644 index 2e6f65a742..0000000000 --- a/var/spack/packages/R/package.py +++ /dev/null @@ -1,33 +0,0 @@ -from spack import * - -class R(Package): - """R is 'GNU S', a freely available language and environment for - statistical computing and graphics which provides a wide va - riety of statistical and graphical techniques: linear and - nonlinear modelling, statistical tests, time series analysis, - classification, clustering, etc. Please consult the R project - homepage for further information.""" - homepage = "http://www.example.com" - url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz" - - version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74') - - depends_on("readline") - depends_on("ncurses") - depends_on("icu") - depends_on("glib") - depends_on("zlib") - depends_on("libtiff") - depends_on("jpeg") - depends_on("cairo") - depends_on("pango") - depends_on("freetype") - depends_on("tcl") - depends_on("tk") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--enable-R-shlib", - "--enable-BLAS-shlib") - make() - make("install") diff --git a/var/spack/packages/SAMRAI/no-tool-build.patch b/var/spack/packages/SAMRAI/no-tool-build.patch deleted file mode 100644 index 1adf0cf721..0000000000 --- a/var/spack/packages/SAMRAI/no-tool-build.patch +++ /dev/null @@ -1,20 +0,0 @@ ---- SAMRAI/Makefile.in 2013-05-31 11:04:32.000000000 -0700 -+++ SAMRAI/Makefile.in.notools 2014-05-30 10:31:15.135979900 -0700 -@@ -8,7 +8,7 @@ - ## - ######################################################################### - --default: library tools -+default: library - - SAMRAI = @top_srcdir@ - SUBDIR = . -@@ -135,7 +135,7 @@ - done - $(MAKE) archive_remove_obj_names - --install: library tools -+install: library - $(INSTALL) -d -m 755 $(INSTDIR)/config - $(INSTALL) -d -m 755 $(INSTDIR)/lib - $(INSTALL) -d -m 755 $(INSTDIR)/bin diff --git a/var/spack/packages/SAMRAI/package.py b/var/spack/packages/SAMRAI/package.py deleted file mode 100644 index eef041f0d5..0000000000 --- a/var/spack/packages/SAMRAI/package.py +++ /dev/null @@ -1,53 +0,0 @@ -from spack import * - -class Samrai(Package): - """SAMRAI (Structured Adaptive Mesh Refinement Application Infrastructure) - is an object-oriented C++ software library enables exploration of numerical, - algorithmic, parallel computing, and software issues associated with applying - structured adaptive mesh refinement (SAMR) technology in large-scale parallel - application development. - """ - homepage = "https://computation.llnl.gov/project/SAMRAI/" - url = "https://computation.llnl.gov/project/SAMRAI/download/SAMRAI-v3.9.1.tar.gz" - list_url = homepage - - version('3.9.1', '232d04d0c995f5abf20d94350befd0b2') - version('3.7.3', '12d574eacadf8c9a70f1bb4cd1a69df6') - version('3.7.2', 'f6a716f171c9fdbf3cb12f71fa6e2737') - version('3.6.3-beta', 'ef0510bf2893042daedaca434e5ec6ce') - version('3.5.2-beta', 'd072d9d681eeb9ada15ce91bea784274') - version('3.5.0-beta', '1ad18a319fc573e12e2b1fbb6f6b0a19') - version('3.4.1-beta', '00814cbee2cb76bf8302aff56bbb385b') - version('3.3.3-beta', '1db3241d3e1cab913dc310d736c34388') - version('3.3.2-beta', 'e598a085dab979498fcb6c110c4dd26c') - version('2.4.4', '04fb048ed0efe7c531ac10c81cc5f6ac') - - depends_on("mpi") - depends_on("zlib") - depends_on("hdf5") - depends_on("boost") - - # don't build tools with gcc - patch('no-tool-build.patch', when='%gcc') - - # TODO: currently hard-coded to use openmpi - be careful! - def install(self, spec, prefix): - mpi = next(m for m in ('openmpi', 'mpich', 'mvapich') - if m in spec) - - configure( - "--prefix=%s" % prefix, - "--with-CXX=%s" % spec[mpi].prefix.bin + "/mpic++", - "--with-CC=%s" % spec[mpi].prefix.bin + "/mpicc", - "--with-hdf5=%s" % spec['hdf5'].prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-zlib=%s" % spec['zlib'].prefix, - "--without-blas", - "--without-lapack", - "--with-hypre=no", - "--with-petsc=no", - "--enable-opt", - "--disable-debug") - - make() - make("install") diff --git a/var/spack/packages/_repo.yaml b/var/spack/packages/_repo.yaml deleted file mode 100644 index 4a371e1cad..0000000000 --- a/var/spack/packages/_repo.yaml +++ /dev/null @@ -1,2 +0,0 @@ -repo: - namespace: gov.llnl.spack diff --git a/var/spack/packages/activeharmony/package.py b/var/spack/packages/activeharmony/package.py deleted file mode 100644 index 45dcc7c0e8..0000000000 --- a/var/spack/packages/activeharmony/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Activeharmony(Package): - """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application).""" - homepage = "http://www.dyninst.org/harmony" - url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz" - - version('4.5', 'caee5b864716d376e2c25d739251b2a9') - - def install(self, spec, prefix): - make("CFLAGS=-O3") - make("install", 'PREFIX=%s' % prefix) - -from spack import * - diff --git a/var/spack/packages/adept-utils/package.py b/var/spack/packages/adept-utils/package.py deleted file mode 100644 index e4a2e1523f..0000000000 --- a/var/spack/packages/adept-utils/package.py +++ /dev/null @@ -1,42 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class AdeptUtils(Package): - """Utility libraries for LLNL performance tools.""" - - homepage = "https://github.com/scalability-llnl/adept-utils" - url = "https://github.com/scalability-llnl/adept-utils/archive/v1.0.tar.gz" - - version('1.0.1', '731a310717adcb004d9d195130efee7d') - version('1.0', '5c6cd9badce56c945ac8551e34804397') - - depends_on("boost") - depends_on("mpi") - - def install(self, spec, prefix): - cmake(*std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/apex/package.py b/var/spack/packages/apex/package.py deleted file mode 100644 index 6404d5208a..0000000000 --- a/var/spack/packages/apex/package.py +++ /dev/null @@ -1,34 +0,0 @@ -from spack import * -from spack.util.environment import * - -class Apex(Package): - homepage = "http://github.com/khuck/xpress-apex" - #url = "http://github.com/khuck/xpress-apex/archive/v0.1-release-candidate.tar.gz" - url = "http://github.com/khuck/xpress-apex" - - #version('0.1', '6e039c224387348296739f6bf360d081') - #version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') - version('2015-10-21', git='https://github.com/khuck/xpress-apex.git', commit='d2e66ddde689120472fc57fc546d8cd80aab745c') - - depends_on("binutils+libiberty") - depends_on("boost@1.54:") - depends_on("cmake@2.8.12:") - depends_on("activeharmony@4.5:") - depends_on("ompt-openmp") - - def install(self, spec, prefix): - - path=get_path("PATH") - path.remove(spec["binutils"].prefix.bin) - path_set("PATH", path) - with working_dir("build", create=True): - cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix, - '-DUSE_BFD=TRUE', - '-DBFD_ROOT=%s' % spec['binutils'].prefix, - '-DUSE_ACTIVEHARMONY=TRUE', - '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, - '-DUSE_OMPT=TRUE', - '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix, - '..', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/arpack/package.py b/var/spack/packages/arpack/package.py deleted file mode 100644 index 8c67c536f3..0000000000 --- a/var/spack/packages/arpack/package.py +++ /dev/null @@ -1,41 +0,0 @@ -from spack import * -import os -import shutil - -class Arpack(Package): - """A collection of Fortran77 subroutines designed to solve large scale - eigenvalue problems. - """ - homepage = "http://www.caam.rice.edu/software/ARPACK/" - url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz" - - version('96', 'fffaa970198b285676f4156cebc8626e') - - depends_on('blas') - depends_on('lapack') - - def patch(self): - # Filter the cray makefile to make a spack one. - shutil.move('ARMAKES/ARmake.CRAY', 'ARmake.inc') - makefile = FileFilter('ARmake.inc') - - # Be sure to use Spack F77 wrapper - makefile.filter('^FC.*', 'FC = f77') - makefile.filter('^FFLAGS.*', 'FFLAGS = -O2 -g') - - # Set up some variables. - makefile.filter('^PLAT.*', 'PLAT = ') - makefile.filter('^home.*', 'home = %s' % os.getcwd()) - makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix) - makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix) - - # build the library in our own prefix. - makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/libarpack.a' % os.getcwd()) - - - def install(self, spec, prefix): - with working_dir('SRC'): - make('all') - - mkdirp(prefix.lib) - install('libarpack.a', prefix.lib) diff --git a/var/spack/packages/asciidoc/package.py b/var/spack/packages/asciidoc/package.py deleted file mode 100644 index 828f3b3f4f..0000000000 --- a/var/spack/packages/asciidoc/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Asciidoc(Package): - """ A presentable text document format for writing articles, UNIX man - pages and other small to medium sized documents.""" - homepage = "http://asciidoc.org" - url = "http://downloads.sourceforge.net/project/asciidoc/asciidoc/8.6.9/asciidoc-8.6.9.tar.gz" - - version('8.6.9', 'c59018f105be8d022714b826b0be130a') - - depends_on('libxml2') - depends_on('libxslt') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/atk/package.py b/var/spack/packages/atk/package.py deleted file mode 100644 index 769805b227..0000000000 --- a/var/spack/packages/atk/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Atk(Package): - """ATK provides the set of accessibility interfaces that are - implemented by other toolkits and applications. Using the ATK - interfaces, accessibility tools have full access to view and - control running applications.""" - homepage = "https://developer.gnome.org/atk/" - url = "http://ftp.gnome.org/pub/gnome/sources/atk/2.14/atk-2.14.0.tar.xz" - - version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b') - - depends_on("glib") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/atlas/package.py b/var/spack/packages/atlas/package.py deleted file mode 100644 index fc683363a7..0000000000 --- a/var/spack/packages/atlas/package.py +++ /dev/null @@ -1,60 +0,0 @@ -from spack import * -from spack.util.executable import Executable -import os - -class Atlas(Package): - """ - Automatically Tuned Linear Algebra Software, generic shared - ATLAS is an approach for the automatic generation and optimization of - numerical software. Currently ATLAS supplies optimized versions for the - complete set of linear algebra kernels known as the Basic Linear Algebra - Subroutines (BLAS), and a subset of the linear algebra routines in the - LAPACK library. - """ - homepage = "http://math-atlas.sourceforge.net/" - - version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825', - url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download') - version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', - url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2') - - # TODO: make this provide BLAS once it works better. Create a way - # TODO: to mark "beta" packages and require explicit invocation. - - # provides('blas') - - - def patch(self): - # Disable thraed check. LLNL's environment does not allow - # disabling of CPU throttling in a way that ATLAS actually - # understands. - filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);', - 'CONFIG/src/config.c') - # TODO: investigate a better way to add the check back in - # TODO: using, say, MSRs. Or move this to a variant. - - @when('@:3.10') - def install(self, spec, prefix): - with working_dir('ATLAS-Build', create=True): - configure = Executable('../configure') - configure('--prefix=%s' % prefix, '-C', 'ic', 'cc', '-C', 'if', 'f77', "--dylibs") - make() - make('check') - make('ptcheck') - make('time') - make("install") - - - def install(self, spec, prefix): - with working_dir('ATLAS-Build', create=True): - configure = Executable('../configure') - configure('--incdir=%s' % prefix.include, - '--libdir=%s' % prefix.lib, - '--cc=cc', - "--shared") - - make() - make('check') - make('ptcheck') - make('time') - make("install") diff --git a/var/spack/packages/autoconf/package.py b/var/spack/packages/autoconf/package.py deleted file mode 100644 index 5189faf054..0000000000 --- a/var/spack/packages/autoconf/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Autoconf(Package): - """Autoconf -- system configuration part of autotools""" - homepage = "https://www.gnu.org/software/autoconf/" - url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz" - - version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/automaded/package.py b/var/spack/packages/automaded/package.py deleted file mode 100644 index 9fbd93e3b3..0000000000 --- a/var/spack/packages/automaded/package.py +++ /dev/null @@ -1,51 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Automaded(Package): - """AutomaDeD (Automata-based Debugging for Dissimilar parallel - tasks) is a tool for automatic diagnosis of performance and - correctness problems in MPI applications. It creates - control-flow models of each MPI process and, when a failure - occurs, these models are leveraged to find the origin of - problems automatically. MPI calls are intercepted (using - wrappers) to create the models. When an MPI application hangs, - AutomaDeD creates a progress-dependence graph that helps - finding the process (or group of processes) that caused the hang. - """ - - homepage = "https://github.com/scalability-llnl/AutomaDeD" - url = "https://github.com/scalability-llnl/AutomaDeD/archive/v1.0.tar.gz" - - version('1.0', '16a3d4def2c4c77d0bc4b21de8b3ab03') - - depends_on('mpi') - depends_on('boost') - depends_on('callpath') - - def install(self, spec, prefix): - cmake("-DSTATE_TRACKER_WITH_CALLPATH=ON", *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/automake/package.py b/var/spack/packages/automake/package.py deleted file mode 100644 index 9115822730..0000000000 --- a/var/spack/packages/automake/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Automake(Package): - """Automake -- make file builder part of autotools""" - homepage = "http://www.gnu.org/software/automake/" - url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz" - - version('1.14.1', 'd052a3e884631b9c7892f2efce542d75') - - depends_on('autoconf') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/bear/package.py b/var/spack/packages/bear/package.py deleted file mode 100644 index 0d4436fccc..0000000000 --- a/var/spack/packages/bear/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Bear(Package): - """Bear is a tool that generates a compilation database for clang tooling from non-cmake build systems.""" - homepage = "https://github.com/rizsotto/Bear" - url = "https://github.com/rizsotto/Bear/archive/2.0.4.tar.gz" - - version('2.0.4', 'fd8afb5e8e18f8737ba06f90bd77d011') - - depends_on("cmake") - depends_on("python") - - def install(self, spec, prefix): - cmake('.', *std_cmake_args) - - make("all") - make("install") diff --git a/var/spack/packages/bib2xhtml/package.py b/var/spack/packages/bib2xhtml/package.py deleted file mode 100644 index 7f8e0cfe5a..0000000000 --- a/var/spack/packages/bib2xhtml/package.py +++ /dev/null @@ -1,27 +0,0 @@ -from spack import * -from glob import glob - -class Bib2xhtml(Package): - """bib2xhtml is a program that converts BibTeX files into HTML.""" - homepage = "http://www.spinellis.gr/sw/textproc/bib2xhtml/" - url='http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz' - - version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645') - - def url_for_version(self, v): - return ('http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v%s.tar.gz' % v) - - def install(self, spec, prefix): - # Add the bst include files to the install directory - bst_include = join_path(prefix.share, 'bib2xhtml') - mkdirp(bst_include) - for bstfile in glob('html-*bst'): - install(bstfile, bst_include) - - # Install the script and point it at the user's favorite perl - # and the bst include directory. - mkdirp(prefix.bin) - install('bib2xhtml', prefix.bin) - filter_file(r'#!/usr/bin/perl', - '#!/usr/bin/env BSTINPUTS=%s perl' % bst_include, - join_path(prefix.bin, 'bib2xhtml')) diff --git a/var/spack/packages/binutils/package.py b/var/spack/packages/binutils/package.py deleted file mode 100644 index cac0a0407f..0000000000 --- a/var/spack/packages/binutils/package.py +++ /dev/null @@ -1,30 +0,0 @@ -from spack import * - -class Binutils(Package): - """GNU binutils, which contain the linker, assembler, objdump and others""" - homepage = "http://www.gnu.org/software/binutils/" - url = "ftp://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2" - - version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66') - version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b') - version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') - version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') - - variant('libiberty', default=False, description='Also install libiberty.') - - def install(self, spec, prefix): - configure_args = [ - '--prefix=%s' % prefix, - '--disable-dependency-tracking', - '--enable-interwork', - '--enable-multilib', - '--enable-shared', - '--enable-64-bit-bfd', - '--enable-targets=all'] - - if '+libiberty' in spec: - configure_args.append('--enable-install-libiberty') - - configure(*configure_args) - make() - make("install") diff --git a/var/spack/packages/bison/package.py b/var/spack/packages/bison/package.py deleted file mode 100644 index 7c526fb958..0000000000 --- a/var/spack/packages/bison/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Bison(Package): - """Bison is a general-purpose parser generator that converts - an annotated context-free grammar into a deterministic LR or - generalized LR (GLR) parser employing LALR(1) parser tables.""" - - homepage = "http://www.gnu.org/software/bison/" - url = "http://ftp.gnu.org/gnu/bison/bison-3.0.tar.gz" - - version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py deleted file mode 100644 index 35824d53a2..0000000000 --- a/var/spack/packages/boost/package.py +++ /dev/null @@ -1,66 +0,0 @@ -from spack import * - -class Boost(Package): - """Boost provides free peer-reviewed portable C++ source - libraries, emphasizing libraries that work well with the C++ - Standard Library. - - Boost libraries are intended to be widely useful, and usable - across a broad spectrum of applications. The Boost license - encourages both commercial and non-commercial use. - """ - homepage = "http://www.boost.org" - url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2" - list_url = "http://sourceforge.net/projects/boost/files/boost/" - list_depth = 2 - - version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87') - version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546') - version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76') - version('1.56.0', 'a744cf167b05d72335f27c88115f211d') - version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354') - version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279') - version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2') - version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750') - version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679') - version('1.50.0', '52dd00be775e689f55a987baebccc462') - version('1.49.0', '0d202cb811f934282dea64856a175698') - version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95') - version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200') - version('1.46.1', '7375679575f4c8db605d426fc721d506') - version('1.46.0', '37b12f1702319b73876b0097982087e0') - version('1.45.0', 'd405c606354789d0426bc07bea617e58') - version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a') - version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1') - version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6') - version('1.41.0', '8bb65e133907db727a2a825c5400d0a6') - version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7') - version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0') - version('1.38.0', '5eca2116d39d61382b8f8235915cb267') - version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92') - version('1.36.0', '328bfec66c312150e4c2a78dcecb504b') - version('1.35.0', 'dce952a7214e72d6597516bcac84048b') - version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5') - version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0') - - - def url_for_version(self, version): - """Handle Boost's weird URLs, which write the version two different ways.""" - parts = [str(p) for p in Version(version)] - dots = ".".join(parts) - underscores = "_".join(parts) - return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % ( - dots, underscores) - - - def install(self, spec, prefix): - bootstrap = Executable('./bootstrap.sh') - bootstrap() - - # b2 used to be called bjam, before 1.47 (sigh) - b2name = './b2' if spec.satisfies('@1.47:') else './bjam' - - b2 = Executable(b2name) - b2('install', - '-j %s' % make_jobs, - '--prefix=%s' % prefix) diff --git a/var/spack/packages/bowtie2/bowtie2-2.5.patch b/var/spack/packages/bowtie2/bowtie2-2.5.patch deleted file mode 100644 index 290be39c73..0000000000 --- a/var/spack/packages/bowtie2/bowtie2-2.5.patch +++ /dev/null @@ -1,16 +0,0 @@ ---- Makefile 2015-02-26 10:50:00.000000000 -0800 -+++ Makefile.new 2015-07-29 18:03:59.891357399 -0700 -@@ -22,10 +22,10 @@ - # - - INC = --GCC_PREFIX = $(shell dirname `which gcc`) -+GCC_PREFIX = - GCC_SUFFIX = --CC = $(GCC_PREFIX)/gcc$(GCC_SUFFIX) --CPP = $(GCC_PREFIX)/g++$(GCC_SUFFIX) -+CC = cc -+CPP = c++ - CXX = $(CPP) - HEADERS = $(wildcard *.h) - BOWTIE_MM = 1 diff --git a/var/spack/packages/bowtie2/package.py b/var/spack/packages/bowtie2/package.py deleted file mode 100644 index 339aab6598..0000000000 --- a/var/spack/packages/bowtie2/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * -from glob import glob -class Bowtie2(Package): - """Description""" - homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml" - version('2.2.5','51fa97a862d248d7ee660efc1147c75f', url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip") - - patch('bowtie2-2.5.patch',when='@2.2.5', level=0) - - def install(self, spec, prefix): - make() - mkdirp(prefix.bin) - for bow in glob("bowtie2*"): - install(bow, prefix.bin) - # install('bowtie2',prefix.bin) - # install('bowtie2-align-l',prefix.bin) - # install('bowtie2-align-s',prefix.bin) - # install('bowtie2-build',prefix.bin) - # install('bowtie2-build-l',prefix.bin) - # install('bowtie2-build-s',prefix.bin) - # install('bowtie2-inspect',prefix.bin) - # install('bowtie2-inspect-l',prefix.bin) - # install('bowtie2-inspect-s',prefix.bin) - diff --git a/var/spack/packages/boxlib/package.py b/var/spack/packages/boxlib/package.py deleted file mode 100644 index 4f1b71132f..0000000000 --- a/var/spack/packages/boxlib/package.py +++ /dev/null @@ -1,25 +0,0 @@ -from spack import * - -class Boxlib(Package): - """BoxLib, a software framework for massively parallel - block-structured adaptive mesh refinement (AMR) codes.""" - - homepage = "https://ccse.lbl.gov/BoxLib/" - url = "https://ccse.lbl.gov/pub/Downloads/BoxLib.git"; - - # TODO: figure out how best to version this. No tags in the repo! - version('master', git='https://ccse.lbl.gov/pub/Downloads/BoxLib.git') - - depends_on('mpi') - - def install(self, spec, prefix): - args = std_cmake_args - args += ['-DCCSE_ENABLE_MPI=1', - '-DCMAKE_C_COMPILER=%s' % which('mpicc'), - '-DCMAKE_CXX_COMPILER=%s' % which('mpicxx'), - '-DCMAKE_Fortran_COMPILER=%s' % which('mpif90')] - - cmake('.', *args) - make() - make("install") - diff --git a/var/spack/packages/bzip2/package.py b/var/spack/packages/bzip2/package.py deleted file mode 100644 index d88336664d..0000000000 --- a/var/spack/packages/bzip2/package.py +++ /dev/null @@ -1,36 +0,0 @@ -from spack import * -from glob import glob - -class Bzip2(Package): - """bzip2 is a freely available, patent free high-quality data - compressor. It typically compresses files to within 10% to 15% - of the best available techniques (the PPM family of statistical - compressors), whilst being around twice as fast at compression - and six times faster at decompression.""" - homepage = "http://www.bzip.org" - url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz" - - version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b') - - def install(self, spec, prefix): - # No configure system -- have to filter the makefile for this package. - filter_file(r'CC=gcc', 'CC=cc', 'Makefile', string=True) - - make('-f', 'Makefile-libbz2_so') - make('clean') - make("install", "PREFIX=%s" % prefix) - - bzip2_exe = join_path(prefix.bin, 'bzip2') - install('bzip2-shared', bzip2_exe) - for i, libfile in enumerate(glob('libbz2.so*')): - install(libfile, prefix.lib) - if i == 0: - symlink(join_path(prefix.lib, libfile), join_path(prefix.lib, 'libbz2.so')) - - bunzip2 = join_path(prefix.bin, 'bunzip2') - remove(bunzip2) - symlink(bzip2_exe, bunzip2) - - bzcat = join_path(prefix.bin, 'bzcat') - remove(bzcat) - symlink(bzip2_exe, bzcat) diff --git a/var/spack/packages/cairo/package.py b/var/spack/packages/cairo/package.py deleted file mode 100644 index e1ac8aaa7d..0000000000 --- a/var/spack/packages/cairo/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Cairo(Package): - """Cairo is a 2D graphics library with support for multiple output devices.""" - homepage = "http://cairographics.org" - url = "http://cairographics.org/releases/cairo-1.14.0.tar.xz" - - version('1.14.0', 'fc3a5edeba703f906f2241b394f0cced') - - depends_on("libpng") - depends_on("glib") - depends_on("pixman") - depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig. - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--enable-tee") - make() - make("install") diff --git a/var/spack/packages/callpath/package.py b/var/spack/packages/callpath/package.py deleted file mode 100644 index f8a1eab9f7..0000000000 --- a/var/spack/packages/callpath/package.py +++ /dev/null @@ -1,47 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Callpath(Package): - """Library for representing callpaths consistently in - distributed-memory performance tools.""" - - homepage = "https://github.com/scalability-llnl/callpath" - url = "https://github.com/scalability-llnl/callpath/archive/v1.0.1.tar.gz" - - version('1.0.2', 'b1994d5ee7c7db9d27586fc2dcf8f373') - version('1.0.1', '0047983d2a52c5c335f8ba7f5bab2325') - - depends_on("libelf") - depends_on("libdwarf") - depends_on("dyninst") - depends_on("adept-utils") - depends_on("mpi") - - def install(self, spec, prefix): - # TODO: offer options for the walker used. - cmake('.', "-DCALLPATH_WALKER=dyninst", *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/cblas/package.py b/var/spack/packages/cblas/package.py deleted file mode 100644 index 3cfe5ee588..0000000000 --- a/var/spack/packages/cblas/package.py +++ /dev/null @@ -1,35 +0,0 @@ -from spack import * -import os - -class Cblas(Package): - """The BLAS (Basic Linear Algebra Subprograms) are routines that - provide standard building blocks for performing basic vector and - matrix operations.""" - - homepage = "http://www.netlib.org/blas/_cblas/" - - # tarball has no version, but on the date below, this MD5 was correct. - version('2015-06-06', '1e8830f622d2112239a4a8a83b84209a', - url='http://www.netlib.org/blas/blast-forum/cblas.tgz') - - depends_on('blas') - parallel = False - - def patch(self): - mf = FileFilter('Makefile.in') - - mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' % self.spec['blas'].prefix.lib) - mf.filter('^CC =.*', 'CC = cc') - mf.filter('^FC =.*', 'FC = f90') - - - def install(self, spec, prefix): - make('all') - mkdirp(prefix.lib) - mkdirp(prefix.include) - - # Rename the generated lib file to libcblas.a - install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib) - install('./include/cblas.h','%s' % prefix.include) - install('./include/cblas_f77.h','%s' % prefix.include) - diff --git a/var/spack/packages/cgm/package.py b/var/spack/packages/cgm/package.py deleted file mode 100644 index 05d6395c5a..0000000000 --- a/var/spack/packages/cgm/package.py +++ /dev/null @@ -1,30 +0,0 @@ -from spack import * - -class Cgm(Package): - """The Common Geometry Module, Argonne (CGMA) is a code library - which provides geometry functionality used for mesh generation and - other applications.""" - homepage = "http://trac.mcs.anl.gov/projects/ITAPS/wiki/CGM" - url = "http://ftp.mcs.anl.gov/pub/fathom/cgm13.1.1.tar.gz" - - version('13.1.1', '4e8dbc4ba8f65767b29f985f7a23b01f') - version('13.1.0', 'a6c7b22660f164ce893fb974f9cb2028') - version('13.1' , '95f724bda04919fc76818a5b7bc0b4ed') - - depends_on("mpi") - - def patch(self): - filter_file('^(#include "CGMParallelConventions.h")', - '//\1', - 'geom/parallel/CGMReadParallel.cpp') - - - def install(self, spec, prefix): - configure("--with-mpi", - "--prefix=%s" % prefix, - "CFLAGS=-static", - "CXXFLAGS=-static", - "FCFLAGS=-static") - - make() - make("install") diff --git a/var/spack/packages/clang/package.py b/var/spack/packages/clang/package.py deleted file mode 100644 index 4f977bf9a4..0000000000 --- a/var/spack/packages/clang/package.py +++ /dev/null @@ -1,51 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Clang(Package): - """The goal of the Clang project is to create a new C, C++, - Objective C and Objective C++ front-end for the LLVM compiler. - """ - homepage = 'http://clang.llvm.org' - url = 'http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz' - - depends_on('llvm@3.7.0', when='@3.7.0') - depends_on('llvm@3.6.2', when='@3.6.2') - depends_on('llvm@3.5.1', when='@3.5.1') - - version('3.7.0', '8f9d27335e7331cf0a4711e952f21f01', url='http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz') - version('3.6.2', 'ff862793682f714bb7862325b9c06e20', url='http://llvm.org/releases/3.6.2/cfe-3.6.2.src.tar.xz') - version('3.5.1', '93f9532f8f7e6f1d8e5c1116907051cb', url='http://llvm.org/releases/3.5.1/cfe-3.5.1.src.tar.xz') - - def install(self, spec, prefix): - env['CXXFLAGS'] = self.compiler.cxx11_flag - - with working_dir('spack-build', create=True): - cmake('..', - '-DCLANG_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, - '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, - *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/cloog/package.py b/var/spack/packages/cloog/package.py deleted file mode 100644 index 814a33c76c..0000000000 --- a/var/spack/packages/cloog/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * - -class Cloog(Package): - """CLooG is a free software and library to generate code for - scanning Z-polyhedra. That is, it finds a code (e.g. in C, - FORTRAN...) that reaches each integral point of one or more - parameterized polyhedra.""" - - homepage = "http://www.cloog.org" - url = "http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz" - list_url = "http://www.bastoul.net/cloog/pages/download" - - version('0.18.1', 'e34fca0540d840e5d0f6427e98c92252') - version('0.18.0', 'be78a47bd82523250eb3e91646db5b3d') - version('0.17.0', '0aa3302c81f65ca62c114e5264f8a802') - - depends_on("gmp") - depends_on("isl") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-osl=no", - "--with-isl=%s" % spec['isl'].prefix, - "--with-gmp=%s" % spec['gmp'].prefix) - make() - make("install") diff --git a/var/spack/packages/cmake/package.py b/var/spack/packages/cmake/package.py deleted file mode 100644 index 9efa370c8b..0000000000 --- a/var/spack/packages/cmake/package.py +++ /dev/null @@ -1,45 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Cmake(Package): - """A cross-platform, open-source build system. CMake is a family of - tools designed to build, test and package software.""" - homepage = 'https://www.cmake.org' - - version('2.8.10.2', '097278785da7182ec0aea8769d06860c', - url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz') - - version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f', - url = 'http://www.cmake.org/files/v3.0/cmake-3.0.2.tar.gz') - -# version('3.0.1', 'e2e05d84cb44a42f1371d9995631dcf5') -# version('3.0.0', '21a1c85e1a3b803c4b48e7ff915a863e') - - def install(self, spec, prefix): - configure('--prefix=' + prefix, - '--parallel=' + str(make_jobs)) - make() - make('install') diff --git a/var/spack/packages/coreutils/package.py b/var/spack/packages/coreutils/package.py deleted file mode 100644 index 78c608d8eb..0000000000 --- a/var/spack/packages/coreutils/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Coreutils(Package): - """The GNU Core Utilities are the basic file, shell and text - manipulation utilities of the GNU operating system. These are - the core utilities which are expected to exist on every - operating system. - """ - homepage = "http://www.gnu.org/software/coreutils/" - url = "http://ftp.gnu.org/gnu/coreutils/coreutils-8.23.tar.xz" - - version('8.23', 'abed135279f87ad6762ce57ff6d89c41') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/cppcheck/package.py b/var/spack/packages/cppcheck/package.py deleted file mode 100644 index 8e98f457ee..0000000000 --- a/var/spack/packages/cppcheck/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Cppcheck(Package): - """A tool for static C/C++ code analysis.""" - homepage = "http://cppcheck.sourceforge.net/" - url = "http://downloads.sourceforge.net/project/cppcheck/cppcheck/1.68/cppcheck-1.68.tar.bz2" - - version('1.68', 'c015195f5d61a542f350269030150708') - - def install(self, spec, prefix): - # cppcheck does not have a configure script - make() - # manually install the final cppcheck binary - mkdirp(prefix.bin) - install('cppcheck', prefix.bin) diff --git a/var/spack/packages/cram/package.py b/var/spack/packages/cram/package.py deleted file mode 100644 index 4b8ec56f25..0000000000 --- a/var/spack/packages/cram/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Cram(Package): - """Cram runs many small MPI jobs inside one large MPI job.""" - homepage = "https://github.com/scalability-llnl/cram" - url = "http://github.com/scalability-llnl/cram/archive/v1.0.1.tar.gz" - - version('1.0.1', 'c73711e945cf5dc603e44395f6647f5e') - - depends_on("mpi") - - def install(self, spec, prefix): - cmake(".", *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/cscope/package.py b/var/spack/packages/cscope/package.py deleted file mode 100644 index 9aac0f7304..0000000000 --- a/var/spack/packages/cscope/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Cscope(Package): - """Cscope is a developer's tool for browsing source code.""" - homepage = "http://http://cscope.sourceforge.net/" - url = "http://downloads.sourceforge.net/project/cscope/cscope/15.8b/cscope-15.8b.tar.gz" - - version('15.8b', '8f9409a238ee313a96f9f87fe0f3b176') - - # Can be configured to use flex (not necessary) - # ./configure --with-flex - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/cube/package.py b/var/spack/packages/cube/package.py deleted file mode 100644 index d97cd25636..0000000000 --- a/var/spack/packages/cube/package.py +++ /dev/null @@ -1,55 +0,0 @@ -# FIXME: Add copyright statement -# -from spack import * -from contextlib import closing - -class Cube(Package): - """Cube the profile viewer for Score-P and Scalasca profiles. It - displays a multi-dimensional performance space consisting - of the dimensions (i) performance metric, (ii) call path, - and (iii) system resource.""" - - homepage = "http://www.scalasca.org/software/cube-4.x/download.html" - url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" - - version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20') - - version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f', - url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz') - - # Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt - # to guess a matching C compiler when configuring scorep-score - backend_user_provided = """\ -CC=cc -CXX=CC -F77=f77 -FC=f90 -#CFLAGS=-fPIC -#CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=CC -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -""" - - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - - with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) - - configure_args = ["--prefix=%s" % prefix, - "--with-custom-compilers", - "--without-paraver", - "--without-gui"] - - configure(*configure_args) - - make(parallel=False) - make("install", parallel=False) diff --git a/var/spack/packages/czmq/package.py b/var/spack/packages/czmq/package.py deleted file mode 100644 index a2f1947554..0000000000 --- a/var/spack/packages/czmq/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Czmq(Package): - """ A C interface to the ZMQ library """ - homepage = "http://czmq.zeromq.org" - url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz" - - version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz') - - depends_on('zeromq') - - def install(self, spec, prefix): - bash = which("bash") - bash("./autogen.sh") - configure("--prefix=%s" % prefix) - - make() - make("install") - diff --git a/var/spack/packages/dbus/package.py b/var/spack/packages/dbus/package.py deleted file mode 100644 index f7c302d611..0000000000 --- a/var/spack/packages/dbus/package.py +++ /dev/null @@ -1,31 +0,0 @@ -from spack import * - -class Dbus(Package): - """D-Bus is a message bus system, a simple way for applications to - talk to one another. D-Bus supplies both a system daemon (for - events such new hardware device printer queue ) and a - per-user-login-session daemon (for general IPC needs among user - applications). Also, the message bus is built on top of a - general one-to-one message passing framework, which can be used - by any two applications to communicate directly (without going - through the message bus daemon).""" - - homepage = "http://dbus.freedesktop.org/" - url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz" - - version('1.9.0', 'ec6895a4d5c0637b01f0d0e7689e2b36') - version('1.8.8', 'b9f4a18ee3faa1e07c04aa1d83239c43') - version('1.8.6', '6a08ba555d340e9dfe2d623b83c0eea8') - version('1.8.4', '4717cb8ab5b80978fcadf2b4f2f72e1b') - version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') - - def install(self, spec, prefix): - configure( - "--prefix=%s" % prefix, - "--disable-systemd") - make() - make("install") - - # dbus needs a machine id generated after install - dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen')) - dbus_uuidgen('--ensure') diff --git a/var/spack/packages/docbook-xml/package.py b/var/spack/packages/docbook-xml/package.py deleted file mode 100644 index fce1de7deb..0000000000 --- a/var/spack/packages/docbook-xml/package.py +++ /dev/null @@ -1,19 +0,0 @@ -import os -import glob -from spack import * - - -class DocbookXml(Package): - """Docbook DTD XML files.""" - homepage = "http://www.oasis-open.org/docbook" - url = "http://www.oasis-open.org/docbook/xml/4.5/docbook-xml-4.5.zip" - - version('4.5', '03083e288e87a7e829e437358da7ef9e') - - def install(self, spec, prefix): - cp = which('cp') - - install_args = ['-a', '-t', prefix] - install_args.extend(glob.glob('*')) - - cp(*install_args) diff --git a/var/spack/packages/doxygen/package.py b/var/spack/packages/doxygen/package.py deleted file mode 100644 index 3d4a4e47a7..0000000000 --- a/var/spack/packages/doxygen/package.py +++ /dev/null @@ -1,25 +0,0 @@ -#------------------------------------------------------------------------------ -# Author: Justin Too -# Date: September 11, 2015 -#------------------------------------------------------------------------------ - -from spack import * - -class Doxygen(Package): - """Doxygen is the de facto standard tool for generating documentation - from annotated C++ sources, but it also supports other popular programming - languages such as C, Objective-C, C#, PHP, Java, Python, IDL (Corba, - Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D.. - """ - homepage = "http://www.stack.nl/~dimitri/doxygen/" - url = "http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.10.src.tar.gz" - - version('1.8.10', '79767ccd986f12a0f949015efb5f058f') - - depends_on("cmake@2.8.12:") - - def install(self, spec, prefix): - cmake('.', *std_cmake_args) - - make() - make("install") diff --git a/var/spack/packages/dri2proto/package.py b/var/spack/packages/dri2proto/package.py deleted file mode 100644 index 11dfa568e2..0000000000 --- a/var/spack/packages/dri2proto/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Dri2proto(Package): - """DRI2 Protocol Headers.""" - homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/" - url = "http://xorg.freedesktop.org/releases/individual/proto/dri2proto-2.8.tar.gz" - - version('2.8', '19ea18f63d8ae8053c9fa84b60365b77') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/dtcmp/package.py b/var/spack/packages/dtcmp/package.py deleted file mode 100644 index 9d940583c1..0000000000 --- a/var/spack/packages/dtcmp/package.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -from spack import * - -class Dtcmp(Package): - """The Datatype Comparison Library provides comparison operations and - parallel sort algorithms for MPI applications.""" - - homepage = "https://github.com/hpc/dtcmp" - url = "https://github.com/hpc/dtcmp/releases/download/v1.0.3/dtcmp-1.0.3.tar.gz" - - version('1.0.3', 'cdd8ccf71e8ff67de2558594a7fcd317') - - depends_on('mpi') - depends_on('lwgrp') - - def install(self, spec, prefix): - configure("--prefix=" + prefix, - "--with-lwgrp=" + spec['lwgrp'].prefix) - make() - make("install") diff --git a/var/spack/packages/dyninst/package.py b/var/spack/packages/dyninst/package.py deleted file mode 100644 index 41ec57dd2f..0000000000 --- a/var/spack/packages/dyninst/package.py +++ /dev/null @@ -1,68 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Dyninst(Package): - """API for dynamic binary instrumentation. Modify programs while they - are executing without recompiling, re-linking, or re-executing.""" - homepage = "https://paradyn.org" - url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" - list_url = "http://www.dyninst.org/downloads/dyninst-8.x" - - version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', - url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") - version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a', - url="http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz") - version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac', - url="http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz") - - depends_on("libelf") - depends_on("libdwarf") - depends_on("boost@1.42:") - - # new version uses cmake - def install(self, spec, prefix): - libelf = spec['libelf'].prefix - libdwarf = spec['libdwarf'].prefix - - with working_dir('spack-build', create=True): - cmake('..', - '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include, - '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib, - '-DBoost_NO_SYSTEM_PATHS=TRUE', - '-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'), - '-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'), - '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include, - '-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'), - *std_cmake_args) - make() - make("install") - - - @when('@:8.1') - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/elfutils/package.py b/var/spack/packages/elfutils/package.py deleted file mode 100644 index 926d234584..0000000000 --- a/var/spack/packages/elfutils/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * - -class Elfutils(Package): - """elfutils is a collection of various binary tools such as - eu-objdump, eu-readelf, and other utilities that allow you to - inspect and manipulate ELF files. Refer to Table 5.Tools Included - in elfutils for Red Hat Developer for a complete list of binary - tools that are distributed with the Red Hat Developer Toolset - version of elfutils.""" - - homepage = "https://fedorahosted.org/elfutils/" - - version('0.163', - git='git://git.fedorahosted.org/git/elfutils.git', - tag='elfutils-0.163') - - provides('elf') - - def install(self, spec, prefix): - autoreconf = which('autoreconf') - autoreconf('-if') - - configure('--prefix=%s' % prefix, '--enable-maintainer-mode') - make() - make("install") - diff --git a/var/spack/packages/extrae/package.py b/var/spack/packages/extrae/package.py deleted file mode 100644 index 3ad4cbaf86..0000000000 --- a/var/spack/packages/extrae/package.py +++ /dev/null @@ -1,46 +0,0 @@ -from spack import * - -# typical working line with extrae 3.0.1 -# ./configure --prefix=/usr/local --with-mpi=/usr/lib64/mpi/gcc/openmpi --with-unwind=/usr/local --with-papi=/usr --with-dwarf=/usr --with-elf=/usr --with-dyninst=/usr --with-binutils=/usr --with-xml-prefix=/usr --enable-openmp --enable-nanos --enable-pthread --disable-parallel-merge LDFLAGS=-pthread - -class Extrae(Package): - """Extrae is the package devoted to generate tracefiles which can - be analyzed later by Paraver. Extrae is a tool that uses - different interposition mechanisms to inject probes into the - target application so as to gather information regarding the - application performance. The Extrae instrumentation package can - instrument the MPI programin model, and the following parallel - programming models either alone or in conjunction with MPI : - OpenMP, CUDA, OpenCL, pthread, OmpSs""" - homepage = "http://www.bsc.es/computer-sciences/extrae" - url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.0.1.tar.bz2" - version('3.0.1', 'a6a8ca96cd877723cd8cc5df6bdb922b') - - depends_on("mpi") - depends_on("dyninst") - depends_on("libunwind") - depends_on("boost") - depends_on("libdwarf") - depends_on("papi") - - def install(self, spec, prefix): - if 'openmpi' in spec: - mpi = spec['openmpi'] - elif 'mpich' in spec: - mpi = spec['mpich'] - elif 'mvapich2' in spec: - mpi = spec['mvapich2'] - - configure("--prefix=%s" % prefix, - "--with-mpi=%s" % mpi.prefix, - "--with-unwind=%s" % spec['libunwind'].prefix, - "--with-dyninst=%s" % spec['dyninst'].prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-dwarf=%s" % spec['libdwarf'].prefix, - "--with-papi=%s" % spec['papi'].prefix, - "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, - "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) - - make() - make("install", parallel=False) - diff --git a/var/spack/packages/exuberant-ctags/package.py b/var/spack/packages/exuberant-ctags/package.py deleted file mode 100644 index efd2b541b2..0000000000 --- a/var/spack/packages/exuberant-ctags/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class ExuberantCtags(Package): - """The canonical ctags generator""" - homepage = "ctags.sourceforge.net" - url = "http://downloads.sourceforge.net/project/ctags/ctags/5.8/ctags-5.8.tar.gz" - - version('5.8', 'c00f82ecdcc357434731913e5b48630d') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/fish/package.py b/var/spack/packages/fish/package.py deleted file mode 100644 index 1225558705..0000000000 --- a/var/spack/packages/fish/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Fish(Package): - """fish is a smart and user-friendly command line shell for OS X, Linux, and - the rest of the family. - """ - - homepage = "http://fishshell.com/" - url = "http://fishshell.com/files/2.2.0/fish-2.2.0.tar.gz" - list_url = homepage - - version('2.2.0', 'a76339fd14ce2ec229283c53e805faac48c3e99d9e3ede9d82c0554acfc7b77a') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/flex/package.py b/var/spack/packages/flex/package.py deleted file mode 100644 index b065904912..0000000000 --- a/var/spack/packages/flex/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Flex(Package): - """Flex is a tool for generating scanners.""" - - homepage = "http://flex.sourceforge.net/" - url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz" - - version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/flux/package.py b/var/spack/packages/flux/package.py deleted file mode 100644 index c128f46be8..0000000000 --- a/var/spack/packages/flux/package.py +++ /dev/null @@ -1,36 +0,0 @@ -from spack import * -import os - -class Flux(Package): - """ A next-generation resource manager (pre-alpha) """ - - homepage = "https://github.com/flux-framework/flux-core" - url = "https://github.com/flux-framework/flux-core" - - version('master', branch='master', git='https://github.com/flux-framework/flux-core') - - # Also needs autotools, but should use the system version if available - depends_on("zeromq@4.0.4:") - depends_on("czmq@2.2:") - depends_on("lua@5.1:5.1.99") - depends_on("munge") - depends_on("libjson-c") - depends_on("libxslt") - # TODO: This provides a catalog, hacked with environment below for now - depends_on("docbook-xml") - depends_on("asciidoc") - depends_on("python") - depends_on("py-cffi") - - def install(self, spec, prefix): - # Bootstrap with autotools - bash = which('bash') - bash('./autogen.sh') - - # Fix asciidoc dependency on xml style sheets and whatnot - os.environ['XML_CATALOG_FILES'] = os.path.join(spec['docbook-xml'].prefix, - 'catalog.xml') - # Configure, compile & install - configure("--prefix=" + prefix) - make("install", "V=1") - diff --git a/var/spack/packages/fontconfig/package.py b/var/spack/packages/fontconfig/package.py deleted file mode 100644 index 89b13604e8..0000000000 --- a/var/spack/packages/fontconfig/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Fontconfig(Package): - """Fontconfig customizing font access""" - homepage = "http://www.freedesktop.org/wiki/Software/fontconfig/" - url = "http://www.freedesktop.org/software/fontconfig/release/fontconfig-2.11.1.tar.gz" - - version('2.11.1' , 'e75e303b4f7756c2b16203a57ac87eba') - - depends_on('freetype') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/freetype/package.py b/var/spack/packages/freetype/package.py deleted file mode 100644 index 0309b858a1..0000000000 --- a/var/spack/packages/freetype/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Freetype(Package): - """Font package""" - homepage = "http://http://www.freetype.org" - url = "http://download.savannah.gnu.org/releases/freetype/freetype-2.5.3.tar.gz" - - version('2.5.3' , 'cafe9f210e45360279c730d27bf071e9') - - depends_on('libpng') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/gasnet/package.py b/var/spack/packages/gasnet/package.py deleted file mode 100644 index 705961d1de..0000000000 --- a/var/spack/packages/gasnet/package.py +++ /dev/null @@ -1,35 +0,0 @@ -from spack import * - -class Gasnet(Package): - """GASNet is a language-independent, low-level networking layer - that provides network-independent, high-performance communication - primitives tailored for implementing parallel global address space - SPMD languages and libraries such as UPC, Co-Array Fortran, SHMEM, - Cray Chapel, and Titanium. - """ - homepage = "http://gasnet.lbl.gov" - url = "http://gasnet.lbl.gov/GASNet-1.24.0.tar.gz" - - version('1.24.0', 'c8afdf48381e8b5a7340bdb32ca0f41a') - - - def install(self, spec, prefix): - # TODO: don't use paths with @ in them. - change_sed_delimiter('@', ';', 'configure') - - configure("--prefix=%s" % prefix, - # TODO: factor IB suport out into architecture description. - "--enable-ibv", - "--enable-udp", - "--disable-mpi", - "--enable-par", - "--enable-mpi-compat", - "--enable-segment-fast", - "--disable-aligned-segments", - # TODO: make an option so that Legion can request builds with/without this. - # See the Legion webpage for details on when to/not to use. - "--disable-pshm", - "--with-segment-mmap-max=64MB") - - make() - make("install") diff --git a/var/spack/packages/gcc/package.py b/var/spack/packages/gcc/package.py deleted file mode 100644 index a49a1348aa..0000000000 --- a/var/spack/packages/gcc/package.py +++ /dev/null @@ -1,122 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -from contextlib import closing -from glob import glob - -class Gcc(Package): - """The GNU Compiler Collection includes front ends for C, C++, - Objective-C, Fortran, and Java.""" - homepage = "https://gcc.gnu.org" - - url = "http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2" - list_url = 'http://open-source-box.org/gcc/' - list_depth = 2 - - DEPENDS_ON_ISL_PREDICATE = '@5.0:' - - version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') - version('4.9.3', '6f831b4d251872736e8e9cc09746f327') - version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43') - version('4.9.1', 'fddf71348546af523353bd43d34919c1') - version('4.8.5', '80d2c2982a3392bb0b89673ff136e223') - version('4.8.4', '5a84a30839b2aca22a2d723de2a626ec') - version('4.7.4', '4c696da46297de6ae77a82797d2abe28') - version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') - version('4.5.4', '27e459c2566b8209ab064570e1b378f7') - - depends_on("mpfr") - depends_on("gmp") - depends_on("mpc") # when @4.5: - depends_on("binutils~libiberty") - - # Save these until we can do optional deps. - depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE) - #depends_on("ppl") - #depends_on("cloog") - - def install(self, spec, prefix): - # libjava/configure needs a minor fix to install into spack paths. - filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True) - - enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc')) - if spec.satisfies("@4.7.1:"): - enabled_languages.add('go') - - # Generic options to compile GCC - options = ["--prefix=%s" % prefix, - "--libdir=%s/lib64" % prefix, - "--disable-multilib", - "--enable-languages=" + ','.join(enabled_languages), - "--with-mpc=%s" % spec['mpc'].prefix, - "--with-mpfr=%s" % spec['mpfr'].prefix, - "--with-gmp=%s" % spec['gmp'].prefix, - "--enable-lto", - "--with-gnu-ld", - "--with-gnu-as", - "--with-quad"] - # Binutils - binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, - "--with-boot-ldflags=%s" % self.rpath_args, - "--with-ld=%s/bin/ld" % spec['binutils'].prefix, - "--with-as=%s/bin/as" % spec['binutils'].prefix] - options.extend(binutils_options) - # Isl - if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE): - isl_options = ["--with-isl=%s" % spec['isl'].prefix] - options.extend(isl_options) - - # Rest of install is straightforward. - configure(*options) - make() - make("install") - - self.write_rpath_specs() - - - @property - def spec_dir(self): - # e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2 - spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix) - return spec_dir[0] if spec_dir else None - - - def write_rpath_specs(self): - """Generate a spec file so the linker adds a rpath to the libs - the compiler used to build the executable.""" - if not self.spec_dir: - tty.warn("Could not install specs for %s." % self.spec.format('$_$@')) - return - - gcc = Executable(join_path(self.prefix.bin, 'gcc')) - lines = gcc('-dumpspecs', return_output=True).strip().split("\n") - specs_file = join_path(self.spec_dir, 'specs') - with closing(open(specs_file, 'w')) as out: - for line in lines: - out.write(line + "\n") - if line.startswith("*link:"): - out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) - set_install_permissions(specs_file) diff --git a/var/spack/packages/gdk-pixbuf/package.py b/var/spack/packages/gdk-pixbuf/package.py deleted file mode 100644 index 14a5569984..0000000000 --- a/var/spack/packages/gdk-pixbuf/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class GdkPixbuf(Package): - """The Gdk Pixbuf is a toolkit for image loading and pixel buffer - manipulation. It is used by GTK+ 2 and GTK+ 3 to load and - manipulate images. In the past it was distributed as part of - GTK+ 2 but it was split off into a separate package in - preparation for the change to GTK+ 3.""" - homepage = "https://developer.gnome.org/gdk-pixbuf/" - url = "http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.31/gdk-pixbuf-2.31.1.tar.xz" - - version('2.31.2', '6be6bbc4f356d4b79ab4226860ab8523') - - depends_on("glib") - depends_on("jpeg") - depends_on("libpng") - depends_on("libtiff") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/geos/package.py b/var/spack/packages/geos/package.py deleted file mode 100644 index 4a2657e32f..0000000000 --- a/var/spack/packages/geos/package.py +++ /dev/null @@ -1,31 +0,0 @@ -from spack import * - -class Geos(Package): - """GEOS (Geometry Engine - Open Source) is a C++ port of the Java - Topology Suite (JTS). As such, it aims to contain the complete - functionality of JTS in C++. This includes all the OpenGIS - Simple Features for SQL spatial predicate functions and spatial - operators, as well as specific JTS enhanced topology functions.""" - - homepage = "http://trac.osgeo.org/geos/" - url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2" - - version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae') - version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e') - version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488') - version('3.3.9', '4794c20f07721d5011c93efc6ccb8e4e') - version('3.3.8', '75be476d0831a2d14958fed76ca266de') - version('3.3.7', '95ab996d22672b067d92c7dee2170460') - version('3.3.6', '6fadfb941541875f4976f75fb0bbc800') - version('3.3.5', '2ba61afb7fe2c5ddf642d82d7b16e75b') - version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1') - version('3.3.3', '8454e653d7ecca475153cc88fd1daa26') - - extends('python') - depends_on('swig') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--enable-python") - make() - make("install") diff --git a/var/spack/packages/gflags/package.py b/var/spack/packages/gflags/package.py deleted file mode 100644 index 62dd80a094..0000000000 --- a/var/spack/packages/gflags/package.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -from spack import * - -class Gflags(Package): - """The gflags package contains a C++ library that implements - commandline flags processing. It includes built-in support for - standard types such as string and the ability to define flags - in the source file in which they are used. Online documentation - available at: https://gflags.github.io/gflags/""" - - homepage = "https://gflags.github.io/gflags" - url = "https://github.com/gflags/gflags/archive/v2.1.2.tar.gz" - - version('2.1.2', 'ac432de923f9de1e9780b5254884599f') - - def install(self, spec, prefix): - cmake("-DCMAKE_INSTALL_PREFIX=" + prefix, - "-DBUILD_SHARED_LIBS=ON") - make() - make("test") - make("install") diff --git a/var/spack/packages/ghostscript/package.py b/var/spack/packages/ghostscript/package.py deleted file mode 100644 index 0ab49d425f..0000000000 --- a/var/spack/packages/ghostscript/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Ghostscript(Package): - """an interpreter for the PostScript language and for PDF. """ - homepage = "http://ghostscript.com/" - url = "http://downloads.ghostscript.com/public/ghostscript-9.16.tar.gz" - - version('9.16', '829319325bbdb83f5c81379a8f86f38f') - - parallel = False - - def install(self, spec, prefix): - configure("--prefix=%s" %prefix, "--enable-shared") - - make() - make("install") - diff --git a/var/spack/packages/git/package.py b/var/spack/packages/git/package.py deleted file mode 100644 index 0f1a3ba05b..0000000000 --- a/var/spack/packages/git/package.py +++ /dev/null @@ -1,27 +0,0 @@ -from spack import * - -class Git(Package): - """Git is a free and open source distributed version control - system designed to handle everything from small to very large - projects with speed and efficiency.""" - homepage = "http://git-scm.com" - url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.xz" - - version('2.2.1', '43e01f9d96ba8c11611e0eef0d9f9f28') - - # Use system openssl. - # depends_on("openssl") - - # Use system perl for now. - # depends_on("perl") - # depends_on("pcre") - - depends_on("zlib") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--without-pcre", - "--without-python") - - make() - make("install") diff --git a/var/spack/packages/glib/package.py b/var/spack/packages/glib/package.py deleted file mode 100644 index 178f0b9df5..0000000000 --- a/var/spack/packages/glib/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Glib(Package): - """The GLib package contains a low-level libraries useful for - providing data structure handling for C, portability wrappers - and interfaces for such runtime functionality as an event loop, - threads, dynamic loading and an object system.""" - homepage = "https://developer.gnome.org/glib/" - url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz" - - version('2.42.1', '89c4119e50e767d3532158605ee9121a') - - depends_on("libffi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/glm/package.py b/var/spack/packages/glm/package.py deleted file mode 100644 index d00c301b4c..0000000000 --- a/var/spack/packages/glm/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - - -class Glm(Package): - """ - OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on - the OpenGL Shading Language (GLSL) specification. - """ - - homepage = "https://github.com/g-truc/glm" - url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz" - - version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8') - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/global/package.py b/var/spack/packages/global/package.py deleted file mode 100644 index a77b1bdc09..0000000000 --- a/var/spack/packages/global/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * -import os - - -class Global(Package): - """ The Gnu Global tagging system """ - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.gnu.org/software/global" - url = "http://tamacom.com/global/global-6.5.tar.gz" - - version('6.5', 'dfec818b4f53d91721e247cf7b218078') - - depends_on('exuberant-ctags') - - def install(self, spec, prefix): - config_args = ['--prefix={}'.format(prefix)] - - config_args.append('--with-exuberant-ctags={}'.format( - os.path.join(spec['exuberant-ctags'].prefix.bin, 'ctags'))) - - configure(*config_args) - - make() - make("install") diff --git a/var/spack/packages/glog/package.py b/var/spack/packages/glog/package.py deleted file mode 100644 index d73386b394..0000000000 --- a/var/spack/packages/glog/package.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -from spack import * - -class Glog(Package): - """C++ implementation of the Google logging module.""" - - homepage = "https://github.com/google/glog" - url = "https://github.com/google/glog/archive/v0.3.3.tar.gz" - - version('0.3.3', 'c1f86af27bd9c73186730aa957607ed0') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/gmp/package.py b/var/spack/packages/gmp/package.py deleted file mode 100644 index d6af821b34..0000000000 --- a/var/spack/packages/gmp/package.py +++ /dev/null @@ -1,40 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Gmp(Package): - """GMP is a free library for arbitrary precision arithmetic, - operating on signed integers, rational numbers, and - floating-point numbers.""" - homepage = "https://gmplib.org" - url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2" - - version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470') - version('6.0.0' , '6ef5869ae735db9995619135bd856b84') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/gnutls/package.py b/var/spack/packages/gnutls/package.py deleted file mode 100644 index cf57a24a6d..0000000000 --- a/var/spack/packages/gnutls/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Gnutls(Package): - """GnuTLS is a secure communications library implementing the SSL, - TLS and DTLS protocols and technologies around them. It - provides a simple C language application programming interface - (API) to access the secure communications protocols as well as - APIs to parse and write X.509, PKCS #12, OpenPGP and other - required structures. It is aimed to be portable and efficient - with focus on security and interoperability.""" - - homepage = "http://www.gnutls.org" - url = "ftp://ftp.gnutls.org/gcrypt/gnutls/v3.3/gnutls-3.3.9.tar.xz" - - version('3.3.9', 'ff61b77e39d09f1140ab5a9cf52c58b6') - - depends_on("nettle") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/gperf/package.py b/var/spack/packages/gperf/package.py deleted file mode 100644 index 32551b67b4..0000000000 --- a/var/spack/packages/gperf/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Gperf(Package): - """GNU gperf is a perfect hash function generator. For a given - list of strings, it produces a hash function and hash table, in - form of C or C++ code, for looking up a value depending on the - input string. The hash function is perfect, which means that the - hash table has no collisions, and the hash table lookup needs a - single string comparison only.""" - - homepage = "https://www.gnu.org/software/gperf/" - url = "http://ftp.gnu.org/pub/gnu/gperf/gperf-3.0.4.tar.gz" - - version('3.0.4', 'c1f1db32fb6598d6a93e6e88796a8632') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/gperftools/package.py b/var/spack/packages/gperftools/package.py deleted file mode 100644 index 8900462324..0000000000 --- a/var/spack/packages/gperftools/package.py +++ /dev/null @@ -1,38 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Gperftools(Package): - """Google's fast malloc/free implementation, especially for multi-threaded applications. - Contains tcmalloc, heap-checker, heap-profiler, and cpu-profiler.""" - homepage = "https://code.google.com/p/gperftools" - url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz" - - version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz") - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/graphlib/package.py b/var/spack/packages/graphlib/package.py deleted file mode 100644 index ddac0b2b66..0000000000 --- a/var/spack/packages/graphlib/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Graphlib(Package): - """Library to create, manipulate, and export graphs Graphlib.""" - homepage = "http://https://github.com/lee218llnl/graphlib" - url = "https://github.com/lee218llnl/graphlib/archive/v2.0.0.tar.gz" - - version('2.0.0', '43c6df84f1d38ba5a5dce0ae19371a70') - - def install(self, spec, prefix): - cmake(".", *std_cmake_args) - - make() - make("install") diff --git a/var/spack/packages/graphviz/package.py b/var/spack/packages/graphviz/package.py deleted file mode 100644 index 7af7da1881..0000000000 --- a/var/spack/packages/graphviz/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Graphviz(Package): - """Graph Visualization Software""" - homepage = "http://www.graphviz.org" - url = "http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.38.0.tar.gz" - - version('2.38.0', '5b6a829b2ac94efcd5fa3c223ed6d3ae') - - parallel = False - - depends_on("swig") - depends_on("python") - depends_on("ghostscript") - - def install(self, spec, prefix): - configure("--prefix=%s" %prefix) - - make() - make("install") - diff --git a/var/spack/packages/gtkplus/package.py b/var/spack/packages/gtkplus/package.py deleted file mode 100644 index 0ebc7100de..0000000000 --- a/var/spack/packages/gtkplus/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Gtkplus(Package): - """The GTK+ 2 package contains libraries used for creating graphical user interfaces for applications.""" - homepage = "http://www.gtk.org" - - version('2.24.25', '612350704dd3aacb95355a4981930c6f', - url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz") - - depends_on("atk") - depends_on("gdk-pixbuf") - depends_on("pango") - - def patch(self): - # remove disable deprecated flag. - filter_file(r'CFLAGS="-DGDK_PIXBUF_DISABLE_DEPRECATED $CFLAGS"', - '', 'configure', string=True) - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/harfbuzz/package.py b/var/spack/packages/harfbuzz/package.py deleted file mode 100644 index ed7c42a909..0000000000 --- a/var/spack/packages/harfbuzz/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Harfbuzz(Package): - """The Harfbuzz package contains an OpenType text shaping engine.""" - homepage = "http://www.freedesktop.org/wiki/Software/HarfBuzz/" - url = "http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-0.9.37.tar.bz2" - - version('0.9.37', 'bfe733250e34629a188d82e3b971bc1e') - - depends_on("glib") - depends_on("icu") - depends_on("freetype") - - def patch(self): - change_sed_delimiter('@', ';', 'src/Makefile.in') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py deleted file mode 100644 index 15e0ef9338..0000000000 --- a/var/spack/packages/hdf5/package.py +++ /dev/null @@ -1,42 +0,0 @@ -from spack import * - -class Hdf5(Package): - """HDF5 is a data model, library, and file format for storing and managing - data. It supports an unlimited variety of datatypes, and is designed for - flexible and efficient I/O and for high volume and complex data. - """ - - homepage = "http://www.hdfgroup.org/HDF5/" - url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz" - list_url = "http://www.hdfgroup.org/ftp/HDF5/releases" - list_depth = 3 - - version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') - version('1.8.13', 'c03426e9e77d7766944654280b467289') - - depends_on("mpi") - depends_on("zlib") - - # TODO: currently hard-coded to use OpenMPI - def install(self, spec, prefix): - - configure( - "--prefix=%s" % prefix, - "--with-zlib=%s" % spec['zlib'].prefix, - "--enable-parallel", - "--enable-shared", - "CC=%s" % spec['mpich'].prefix.bin + "/mpicc", - "CXX=%s" % spec['mpich'].prefix.bin + "/mpic++") - - make() - make("install") - - def url_for_version(self, version): - v = str(version) - - if version == Version("1.2.2"): - return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz" - elif version < Version("1.7"): - return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz" - else: - return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz" diff --git a/var/spack/packages/hwloc/package.py b/var/spack/packages/hwloc/package.py deleted file mode 100644 index 31a31f376a..0000000000 --- a/var/spack/packages/hwloc/package.py +++ /dev/null @@ -1,25 +0,0 @@ -from spack import * - -class Hwloc(Package): - """The Portable Hardware Locality (hwloc) software package - provides a portable abstraction (across OS, versions, - architectures, ...) of the hierarchical topology of modern - architectures, including NUMA memory nodes, sockets, shared - caches, cores and simultaneous multithreading. It also gathers - various system attributes such as cache and memory information - as well as the locality of I/O devices such as network - interfaces, InfiniBand HCAs or GPUs. It primarily aims at - helping applications with gathering information about modern - computing hardware so as to exploit it accordingly and - efficiently.""" - homepage = "http://www.open-mpi.org/projects/hwloc/" - url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" - - version('1.9', '1f9f9155682fe8946a97c08896109508') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") - diff --git a/var/spack/packages/hypre/package.py b/var/spack/packages/hypre/package.py deleted file mode 100644 index 198b3f00dc..0000000000 --- a/var/spack/packages/hypre/package.py +++ /dev/null @@ -1,32 +0,0 @@ -from spack import * - -class Hypre(Package): - """Hypre is a library of high performance preconditioners that - features parallel multigrid methods for both structured and - unstructured grid problems.""" - - homepage = "https://computation.llnl.gov/project/linear_solvers/software.php" - url = "https://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz" - - version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') - - depends_on("mpi") - depends_on("blas") - depends_on("lapack") - - def install(self, spec, prefix): - blas_dir = spec['blas'].prefix - lapack_dir = spec['lapack'].prefix - - # Hypre's source is staged under ./src so we'll have to manually - # cd into it. - with working_dir("src"): - configure( - "--prefix=%s" % prefix, - "--with-blas-libs=blas", - "--with-blas-lib-dirs=%s/lib" % blas_dir, - "--with-lapack-libs=\"lapack blas\"", - "--with-lapack-lib-dirs=%s/lib" % lapack_dir, - "--with-MPI") - make() - make("install") diff --git a/var/spack/packages/icu/package.py b/var/spack/packages/icu/package.py deleted file mode 100644 index f256ec5712..0000000000 --- a/var/spack/packages/icu/package.py +++ /dev/null @@ -1,25 +0,0 @@ -from spack import * - -class Icu(Package): - """The International Components for Unicode (ICU) package is a - mature, widely used set of C/C++ libraries providing Unicode and - Globalization support for software applications. ICU is widely - portable and gives applications the same results on all - platforms.""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.example.com" - url = "http://download.icu-project.org/files/icu4c/54.1/icu4c-54_1-src.tgz" - - version('54.1', 'e844caed8f2ca24c088505b0d6271bc0') - - - def url_for_version(self, version): - return "http://download.icu-project.org/files/icu4c/%s/icu4c-%s-src.tgz" % ( - version, str(version).replace('.', '_')) - - - def install(self, spec, prefix): - with working_dir("source"): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/icu4c/package.py b/var/spack/packages/icu4c/package.py deleted file mode 100644 index 55b44463b2..0000000000 --- a/var/spack/packages/icu4c/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Icu4c(Package): - """ICU is a mature, widely used set of C/C++ and Java libraries - providing Unicode and Globalization support for software applications.""" - - homepage = "http://site.icu-project.org/" - url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz" - - version('54_1', 'e844caed8f2ca24c088505b0d6271bc0') - - def install(self, spec, prefix): - cd("source") - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/isl/package.py b/var/spack/packages/isl/package.py deleted file mode 100644 index 836ef3ea40..0000000000 --- a/var/spack/packages/isl/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Isl(Package): - """isl is a thread-safe C library for manipulating sets and - relations of integer points bounded by affine constraints.""" - homepage = "http://isl.gforge.inria.fr" - url = "http://isl.gforge.inria.fr/isl-0.14.tar.bz2" - - version('0.14', 'acd347243fca5609e3df37dba47fd0bb') - - depends_on("gmp") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-gmp-prefix=%s" % spec['gmp'].prefix) - make() - make("install") diff --git a/var/spack/packages/jdk/package.py b/var/spack/packages/jdk/package.py deleted file mode 100644 index 8f8076dd14..0000000000 --- a/var/spack/packages/jdk/package.py +++ /dev/null @@ -1,46 +0,0 @@ -#------------------------------------------------------------------------------ -# Author: Justin Too -#------------------------------------------------------------------------------ -import distutils -from distutils import dir_util -from subprocess import call - -import spack -from spack import * -import llnl.util.tty as tty - -class Jdk(Package): - """The Java Development Kit (JDK) released by Oracle Corporation - in the form of a binary product aimed at Java developers.""" - homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" - - version('8u25-linux-x64', 'e145c03a7edc845215092786bcfba77e', - url="http://download.oracle.com/otn-pub/java/jdk/8u25-b17/jdk-8u25-linux-x64.tar.gz") - - # Oracle requires that you accept their License Agreement in order - # to access the Java packages in download.oracle.com. In order to - # automate this process, we need to utilize these additional curl - # commandline options. - # - # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux - curl_options=[ - '-j', # junk cookies - '-H', # specify required License Agreement cookie - 'Cookie: oraclelicense=accept-securebackup-cookie'] - - def do_fetch(self): - # Add our custom curl commandline options - tty.msg( - "[Jdk] Adding required commandline options to curl " + - "before performing fetch: %s" % - (self.curl_options)) - - for option in self.curl_options: - spack.curl.add_default_arg(option) - - # Now perform the actual fetch - super(Jdk, self).do_fetch() - - - def install(self, spec, prefix): - distutils.dir_util.copy_tree(".", prefix) diff --git a/var/spack/packages/jpeg/package.py b/var/spack/packages/jpeg/package.py deleted file mode 100644 index 87820467db..0000000000 --- a/var/spack/packages/jpeg/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Jpeg(Package): - """jpeg library""" - homepage = "http://www.ijg.org" - url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" - - version('9a', '3353992aecaee1805ef4109aadd433e7') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/launchmon/package.py b/var/spack/packages/launchmon/package.py deleted file mode 100644 index 6fbe6a68d0..0000000000 --- a/var/spack/packages/launchmon/package.py +++ /dev/null @@ -1,47 +0,0 @@ -############################################################################## -# Copyright (c) 2014, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Launchmon(Package): - """Software infrastructure that enables HPC run-time tools to - co-locate tool daemons with a parallel job.""" - homepage = "http://sourceforge.net/projects/launchmon" - url = "http://downloads.sourceforge.net/project/launchmon/launchmon/1.0.1%20release/launchmon-1.0.1.tar.gz" - - version('1.0.1', '2f12465803409fd07f91174a4389eb2b') - version('1.0.1-2', git='https://github.com/scalability-llnl/launchmon.git', commit='ff7e22424b8f375318951eb1c9282fcbbfa8aadf') - - depends_on('autoconf') - depends_on('automake') - depends_on('libtool') - - def install(self, spec, prefix): - configure( - "--prefix=" + prefix, - "--with-bootfabric=cobo", - "--with-rm=slurm") - - make() - make("install") diff --git a/var/spack/packages/launchmon/patch.lmon_install_dir b/var/spack/packages/launchmon/patch.lmon_install_dir deleted file mode 100644 index 8a1d93fdc9..0000000000 --- a/var/spack/packages/launchmon/patch.lmon_install_dir +++ /dev/null @@ -1,147 +0,0 @@ -Index: launchmon/src/linux/lmon_api/Makefile.am -=================================================================== ---- launchmon/src/linux/lmon_api/Makefile.am (revision 481) -+++ launchmon/src/linux/lmon_api/Makefile.am (working copy) -@@ -80,13 +80,10 @@ - libmonfeapi_la_CFLAGS = $(AM_CFLAGS) - libmonfeapi_la_CXXFLAGS = $(AM_CXXFLAGS) - --libmonfeapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ -- -L$(top_srcdir)/@GCRYPTLOC@ \ -- -L$(top_srcdir)/@GPGERRLOC@ \ -- $(AM_LDFLAGS) \ -- -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ -+libmonfeapi_la_LDFLAGS = $(AM_LDFLAGS) \ -+ -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ - --libmonfeapi_la_LIBADD = @LIBPTHREAD@ @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ @LIBRT@ -+libmonfeapi_la_LIBADD = @LIBPTHREAD@ $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ @LIBRT@ - - libmonbeapi_la_SOURCES = lmon_be.cxx \ - lmon_daemon_internal.cxx \ -@@ -113,13 +110,10 @@ - libmonbeapi_la_CFLAGS = $(AM_CFLAGS) - libmonbeapi_la_CXXFLAGS = $(AM_CXXFLAGS) - --libmonbeapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ -- -L$(top_srcdir)/@GCRYPTLOC@ \ -- -L$(top_srcdir)/@GPGERRLOC@ \ -- $(AM_LDFLAGS) \ -+libmonbeapi_la_LDFLAGS = $(AM_LDFLAGS) \ - -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ - --libmonbeapi_la_LIBADD = @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ -+libmonbeapi_la_LIBADD = $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ - - - # -@@ -146,10 +140,8 @@ - - libmonmwapi_la_CXXFLAGS = $(AM_CXXFLAGS) - --libmonmwapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ -- -L$(top_srcdir)/@GCRYPTLOC@ \ -- -L$(top_srcdir)/@GPGERRLOC@ \ -- $(AM_LDFLAGS) \ -+libmonmwapi_la_LDFLAGS = $(AM_LDFLAGS) \ - -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ - --libmonmwapi_la_LIBADD = @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ -+ -+libmonmwapi_la_LIBADD = $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ -Index: tools/cobo/test/Makefile.am -=================================================================== ---- tools/cobo/test/Makefile.am (revision 481) -+++ tools/cobo/test/Makefile.am (working copy) -@@ -37,12 +37,12 @@ - - client_SOURCES = client.c - --client_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ -+client_LDFLAGS = - --client_LDADD = @LIBCOMM@ -+client_LDADD = $(top_srcdir)/@COMMLOC@/@LIBCOMM@ - - server_rsh_SOURCES = server_rsh.c - --server_rsh_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ -+server_rsh_LDFLAGS = - --server_rsh_LDADD = @LIBCOMM@ -+server_rsh_LDADD = $(top_srcdir)/@COMMLOC@/@LIBCOMM@ -Index: tools/pmgr_collective/test/Makefile.am -=================================================================== ---- tools/pmgr_collective/test/Makefile.am (revision 481) -+++ tools/pmgr_collective/test/Makefile.am (working copy) -@@ -31,18 +31,18 @@ - ## Jun 10 2008 DHA: Copied from the old Makefile. - ## - --INCLUDES = -I$(top_srcdir)/@COMMLOC@ -+INCLUDES = - - noinst_PROGRAMS = client mpirun_rsh - - client_SOURCES = client.c - --client_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ -+client_LDFLAGS = - --client_LDADD = @LIBCOMM@ -+client_LDADD = @COMMLOC@/@LIBCOMM@ - - mpirun_rsh_SOURCES = mpirun_rsh.c - --mpirun_rsh_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ -+mpirun_rsh_LDFLAGS = - --mpirun_rsh_LDADD = @LIBCOMM@ -+mpirun_rsh_LDADD = @COMMLOC@/@LIBCOMM@ -Index: config/x_ac_bootfabric.m4 -=================================================================== ---- config/x_ac_bootfabric.m4 (revision 481) -+++ config/x_ac_bootfabric.m4 (working copy) -@@ -63,7 +63,7 @@ - #AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) - #AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) - #AC_SUBST(COMMLOC, tools/pmgr_collective/src) -- #AC_SUBST(LIBCOMM, -lpmgr_collective) -+ #AC_SUBST(LIBCOMM, libcobo.la) - #else - commfab_found="no" - AC_MSG_ERROR([--with-bootfabric=pmgr is given, but pmgr_collective has been deprecated]) -@@ -87,7 +87,7 @@ - AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) - AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) - AC_SUBST(COMMLOC, tools/cobo/src) -- AC_SUBST(LIBCOMM, -lcobo) -+ AC_SUBST(LIBCOMM, libcobo.la) - - if test "x$with_cobo_port" != "xcheck" -a "x$with_cobo_port" != "xyes"; then - AC_DEFINE(COBO_BEGIN_PORT, $with_cobo_port, [Define a beginning port for COBO_BASED]) -@@ -117,7 +117,7 @@ - AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) - AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) - AC_SUBST(COMMLOC, tools/cobo/src) -- AC_SUBST(LIBCOMM, -lcobo) -+ AC_SUBST(LIBCOMM, libcobo.la) - - if test "x$with_cobo_port" != "xcheck" -a "x$with_cobo_port" != "xyes"; then - AC_DEFINE(COBO_BEGIN_PORT, $with_cobo_port, [Define a beginning port for COBO_BASED]) -Index: config/x_ac_gcrpyt.m4 -=================================================================== ---- config/x_ac_gcrypt.m4 2011-10-22 00:50:38.000000000 -0700 -+++ config/x_ac_gcrypt.patched.m4 2014-03-14 11:33:59.189220000 -0700 -@@ -55,8 +55,8 @@ - AC_CONFIG_SUBDIRS([tools/libgpg-error]) - AC_SUBST(GPGERRLOC, [tools/libgpg-error/src]) - AC_SUBST(GCRYPTLOC, [tools/libgcrypt/src]) -- AC_SUBST(LIBGCRYPT, [-lgcrypt]) -- AC_SUBST(LIBGPGERR, [-lgpg-error]) -+ AC_SUBST(LIBGCRYPT, [libgcrypt.la]) -+ AC_SUBST(LIBGPGERR, [libgpg-error.la]) - gcrypt_configured="yes" - else - AC_MSG_ERROR([tools/libgpg-error or tools/libgcrypt not found]) - diff --git a/var/spack/packages/lcms/package.py b/var/spack/packages/lcms/package.py deleted file mode 100644 index a53c2f997a..0000000000 --- a/var/spack/packages/lcms/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Lcms(Package): - """Little cms is a color management library. Implements fast - transforms between ICC profiles. It is focused on speed, and is - portable across several platforms (MIT license).""" - homepage = "http://www.littlecms.com" - url = "http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz" - - version('2.6', 'f4c08d38ceade4a664ebff7228910a33') - - depends_on("jpeg") - depends_on("libtiff") - depends_on("zlib") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/leveldb/package.py b/var/spack/packages/leveldb/package.py deleted file mode 100644 index da68a9cbcb..0000000000 --- a/var/spack/packages/leveldb/package.py +++ /dev/null @@ -1,29 +0,0 @@ -import os -import glob -from spack import * - -class Leveldb(Package): - """LevelDB is a fast key-value storage library written at Google - that provides an ordered mapping from string keys to string values.""" - - homepage = "https://github.com/google/leveldb" - url = "https://github.com/google/leveldb/archive/v1.18.tar.gz" - - version('1.18', '73770de34a2a5ab34498d2e05b2b7fa0') - - depends_on("snappy") - - def install(self, spec, prefix): - make() - - mkdirp(prefix.include) - mkdirp(prefix.lib) - - cp = which('cp') - - # cp --preserve=links libleveldb.* prefix/lib - args = glob.glob('libleveldb.*') - args.append(prefix + '/lib') - cp('--preserve=links', *args) - - cp('-r', 'include/leveldb', prefix + '/include') diff --git a/var/spack/packages/libNBC/package.py b/var/spack/packages/libNBC/package.py deleted file mode 100644 index 6d08f3219c..0000000000 --- a/var/spack/packages/libNBC/package.py +++ /dev/null @@ -1,43 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Libnbc(Package): - """LibNBC is a prototypic implementation of a nonblocking - interface for MPI collective operations. Based on ANSI C and - MPI-1, it supports all MPI-1 collective operations in a - nonblocking manner. LibNBC is distributed under the BSD license. - """ - homepage = "http://unixer.de/research/nbcoll/libnbc/" - url = "http://unixer.de/research/nbcoll/libnbc/libNBC-1.1.1.tar.gz" - - version('1.1.1', 'ece5c94992591a9fa934a90e5dbe50ce') - - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libarchive/package.py b/var/spack/packages/libarchive/package.py deleted file mode 100644 index cbd4b89cd0..0000000000 --- a/var/spack/packages/libarchive/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Libarchive(Package): - """libarchive: C library and command-line tools for reading and - writing tar, cpio, zip, ISO, and other archive formats.""" - homepage = "http://www.libarchive.org" - url = "http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz" - - version('3.1.2', 'efad5a503f66329bb9d2f4308b5de98a') - version('3.1.1', '1f3d883daf7161a0065e42a15bbf168f') - version('3.1.0', '095a287bb1fd687ab50c85955692bf3a') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libcircle/package.py b/var/spack/packages/libcircle/package.py deleted file mode 100644 index 3f7c996fb0..0000000000 --- a/var/spack/packages/libcircle/package.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -from spack import * - -class Libcircle(Package): - """libcircle provides an efficient distributed queue on a cluster, - using self-stabilizing work stealing.""" - - homepage = "https://github.com/hpc/libcircle" - - version('0.2.1-rc.1', '2b1369a5736457239f908abf88143ec2', - url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz') - - depends_on('mpi') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/libdrm/package.py b/var/spack/packages/libdrm/package.py deleted file mode 100644 index 00736b7811..0000000000 --- a/var/spack/packages/libdrm/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Libdrm(Package): - """A userspace library for accessing the DRM, direct - rendering manager, on Linux, BSD and other operating - systems that support the ioctl interface.""" - - homepage = "http://dri.freedesktop.org/libdrm/" # no real website... - url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" - - version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') - version('2.4.33', '86e4e3debe7087d5404461e0032231c8') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libdwarf/package.py b/var/spack/packages/libdwarf/package.py deleted file mode 100644 index 099a974e93..0000000000 --- a/var/spack/packages/libdwarf/package.py +++ /dev/null @@ -1,81 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * -import os - -# Only build certain parts of dwarf because the other ones break. -dwarf_dirs = ['libdwarf', 'dwarfdump2'] - -class Libdwarf(Package): - """The DWARF Debugging Information Format is of interest to - programmers working on compilers and debuggers (and any one - interested in reading or writing DWARF information). It was - developed by a committee (known as the PLSIG at the time) - starting around 1991. Starting around 1991 SGI developed the - libdwarf and dwarfdump tools for internal use and as part of - SGI IRIX developer tools. Since that time dwarfdump and - libdwarf have been shipped (as an executable and archive - respectively, not source) with every release of the SGI - MIPS/IRIX C compiler.""" - - homepage = "http://www.prevanders.net/dwarf.html" - url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" - list_url = homepage - - version('20130729', '4cc5e48693f7b93b7aa0261e63c0e21d') - version('20130207', '64b42692e947d5180e162e46c689dfbf') - version('20130126', 'ded74a5e90edb5a12aac3c29d260c5db') - - depends_on("libelf") - - parallel = False - - - def install(self, spec, prefix): - # dwarf build does not set arguments for ar properly - make.add_default_arg('ARFLAGS=rcs') - - # Dwarf doesn't provide an install, so we have to do it. - mkdirp(prefix.bin, prefix.include, prefix.lib, prefix.man1) - - with working_dir('libdwarf'): - configure("--prefix=" + prefix, "--enable-shared") - make() - - install('libdwarf.a', prefix.lib) - install('libdwarf.so', prefix.lib) - install('libdwarf.h', prefix.include) - install('dwarf.h', prefix.include) - - with working_dir('dwarfdump2'): - configure("--prefix=" + prefix) - - # This makefile has strings of copy commands that - # cause a race in parallel - make(parallel=False) - - install('dwarfdump', prefix.bin) - install('dwarfdump.conf', prefix.lib) - install('dwarfdump.1', prefix.man1) diff --git a/var/spack/packages/libelf/package.py b/var/spack/packages/libelf/package.py deleted file mode 100644 index 9338b8f393..0000000000 --- a/var/spack/packages/libelf/package.py +++ /dev/null @@ -1,49 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Libelf(Package): - """libelf lets you read, modify or create ELF object files in an - architecture-independent way. The library takes care of size - and endian issues, e.g. you can process a file for SPARC - processors on an Intel-based system.""" - - homepage = "http://www.mr511.de/software/english.html" - url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" - - version('0.8.13', '4136d7b4c04df68b686570afa26988ac') - version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') - - provides('elf') - - def install(self, spec, prefix): - configure("--prefix=" + prefix, - "--enable-shared", - "--disable-dependency-tracking", - "--disable-debug") - make() - - # The mkdir commands in libelf's install can fail in parallel - make("install", parallel=False) diff --git a/var/spack/packages/libevent/package.py b/var/spack/packages/libevent/package.py deleted file mode 100644 index 11b1083d67..0000000000 --- a/var/spack/packages/libevent/package.py +++ /dev/null @@ -1,30 +0,0 @@ -from spack import * - -class Libevent(Package): - """The libevent API provides a mechanism to execute a callback function - when a specific event occurs on a file descriptor or after a timeout has been - reached. Furthermore, libevent also support callbacks due to signals or regular - timeouts. - """ - - homepage = "http://libevent.org" - url = "https://github.com/downloads/libevent/libevent/libevent-2.0.21-stable.tar.gz" - list_url = "http://libevent.org/old-releases.html" - - version('2.0.21', 'b2405cc9ebf264aa47ff615d9de527a2') - version('2.0.20', '94270cdee32c0cd0aa9f4ee6ede27e8e') - version('2.0.19', '91111579769f46055b0a438f5cc59572') - version('2.0.18', 'aa1ce9bc0dee7b8084f6855765f2c86a') - version('2.0.17', 'dad64aaaaff16b5fbec25160c06fee9a') - version('2.0.16', '899efcffccdb3d5111419df76e7dc8df') - version('2.0.15', '2643abe7ba242df15c08b2cc14ec8759') - version('2.0.14', 'cac0f379da35d3b98f83ac16fcfe1df4') - version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc') - version('2.0.12', '42986228baf95e325778ed328a93e070') - - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libffi/package.py b/var/spack/packages/libffi/package.py deleted file mode 100644 index acec031717..0000000000 --- a/var/spack/packages/libffi/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Libffi(Package): - """The libffi library provides a portable, high level programming - interface to various calling conventions. This allows a programmer - to call any function specified by a call interface description at - run time.""" - homepage = "https://sourceware.org/libffi/" - - version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") - #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") - diff --git a/var/spack/packages/libgcrypt/package.py b/var/spack/packages/libgcrypt/package.py deleted file mode 100644 index 1d0a57f317..0000000000 --- a/var/spack/packages/libgcrypt/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Libgcrypt(Package): - """Libgcrypt is a general purpose cryptographic library based on - the code from GnuPG. It provides functions for all cryptographic - building blocks: symmetric ciphers, hash algorithms, MACs, public - key algorithms, large integer functions, random numbers and a lot - of supporting functions. """ - homepage = "http://www.gnu.org/software/libgcrypt/" - url = "ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2" - - version('1.6.2', 'b54395a93cb1e57619943c082da09d5f') - - depends_on("libgpg-error") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libgpg-error/package.py b/var/spack/packages/libgpg-error/package.py deleted file mode 100644 index 6c1d1a10a7..0000000000 --- a/var/spack/packages/libgpg-error/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class LibgpgError(Package): - """Libgpg-error is a small library that defines common error - values for all GnuPG components. Among these are GPG, GPGSM, - GPGME, GPG-Agent, libgcrypt, Libksba, DirMngr, Pinentry, - SmartCard Daemon and possibly more in the future. """ - - homepage = "https://www.gnupg.org/related_software/libgpg-error" - url = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.18.tar.bz2" - - version('1.18', '12312802d2065774b787cbfc22cc04e9') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libjpeg-turbo/package.py b/var/spack/packages/libjpeg-turbo/package.py deleted file mode 100644 index 07ee183947..0000000000 --- a/var/spack/packages/libjpeg-turbo/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class LibjpegTurbo(Package): - """libjpeg-turbo is a fork of the original IJG libjpeg which uses - SIMD to accelerate baseline JPEG compression and - decompression. libjpeg is a library that implements JPEG image - encoding, decoding and transcoding.""" - homepage = "http://libjpeg-turbo.virtualgl.org" - url = "http://downloads.sourceforge.net/libjpeg-turbo/libjpeg-turbo-1.3.1.tar.gz" - - version('1.3.1', '2c3a68129dac443a72815ff5bb374b05') - - # Can use either of these. - depends_on("yasm") - depends_on("nasm") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libjson-c/package.py b/var/spack/packages/libjson-c/package.py deleted file mode 100644 index c0801cce9c..0000000000 --- a/var/spack/packages/libjson-c/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class LibjsonC(Package): - """ A JSON implementation in C """ - homepage = "https://github.com/json-c/json-c/wiki" - url = "https://s3.amazonaws.com/json-c_releases/releases/json-c-0.11.tar.gz" - - version('0.11', 'aa02367d2f7a830bf1e3376f77881e98') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/libmng/package.py b/var/spack/packages/libmng/package.py deleted file mode 100644 index e5336ea2c2..0000000000 --- a/var/spack/packages/libmng/package.py +++ /dev/null @@ -1,23 +0,0 @@ -from spack import * - -class Libmng(Package): - """libmng -THE reference library for reading, displaying, writing - and examining Multiple-Image Network Graphics. MNG is the animation - extension to the popular PNG image-format.""" - homepage = "http://sourceforge.net/projects/libmng/" - url = "http://downloads.sourceforge.net/project/libmng/libmng-devel/2.0.2/libmng-2.0.2.tar.gz" - - version('2.0.2', '1ffefaed4aac98475ee6267422cbca55') - - depends_on("jpeg") - depends_on("zlib") - depends_on("lcms") - - def patch(self): - # jpeg requires stdio to beincluded before its headrs. - filter_file(r'^(\#include \)', '#include\n\\1', 'libmng_types.h') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libmonitor/package.py b/var/spack/packages/libmonitor/package.py deleted file mode 100644 index 3b95b86ddf..0000000000 --- a/var/spack/packages/libmonitor/package.py +++ /dev/null @@ -1,36 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Libmonitor(Package): - """Libmonitor is a library for process and thread control.""" - homepage = "http://hpctoolkit.org" - - version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146) - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/libpciaccess/package.py b/var/spack/packages/libpciaccess/package.py deleted file mode 100644 index 6022fc34a3..0000000000 --- a/var/spack/packages/libpciaccess/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Libpciaccess(Package): - """Generic PCI access library.""" - - homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/" - url = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/" - - version('0.13.4', git='http://anongit.freedesktop.org/git/xorg/lib/libpciaccess.git', - tag='libpciaccess-0.13.4') - - depends_on('autoconf') - depends_on('libtool') - - def install(self, spec, prefix): - from subprocess import call - call(["./autogen.sh"]) - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libpng/package.py b/var/spack/packages/libpng/package.py deleted file mode 100644 index e02b08663e..0000000000 --- a/var/spack/packages/libpng/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Libpng(Package): - """libpng graphics file format""" - homepage = "http://www.libpng.org/pub/png/libpng.html" - url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" - - version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') - version('1.6.15', '829a256f3de9307731d4f52dc071916d') - version('1.6.14', '2101b3de1d5f348925990f9aa8405660') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/libsodium/package.py b/var/spack/packages/libsodium/package.py deleted file mode 100644 index 1c8a16d998..0000000000 --- a/var/spack/packages/libsodium/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Libsodium(Package): - """Sodium is a modern, easy-to-use software library for encryption, - decryption, signatures, password hashing and more.""" - homepage = "https://download.libsodium.org/doc/" - url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.3.tar.gz" - - version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab') - version('1.0.2', 'dc40eb23e293448c6fc908757738003f') - version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430') - version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7') - version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libtiff/package.py b/var/spack/packages/libtiff/package.py deleted file mode 100644 index 63c6704cb8..0000000000 --- a/var/spack/packages/libtiff/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Libtiff(Package): - """libtiff graphics format library""" - homepage = "http://www.remotesensing.org/libtiff/" - url = "http://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz" - - version('4.0.3', '051c1068e6a0627f461948c365290410') - - depends_on('jpeg') - depends_on('zlib') - depends_on('xz') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libtool/package.py b/var/spack/packages/libtool/package.py deleted file mode 100644 index a07daf9781..0000000000 --- a/var/spack/packages/libtool/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Libtool(Package): - """libtool -- library building part of autotools""" - homepage = "https://www.gnu.org/software/libtool/" - url = "http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz" - - version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libunwind/package.py b/var/spack/packages/libunwind/package.py deleted file mode 100644 index 239fcbcfd5..0000000000 --- a/var/spack/packages/libunwind/package.py +++ /dev/null @@ -1,38 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Libunwind(Package): - """A portable and efficient C programming interface (API) to determine - the call-chain of a program.""" - homepage = "http://www.nongnu.org/libunwind/" - url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz" - - version('1.1', 'fb4ea2f6fbbe45bf032cd36e586883ce') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/libuuid/package.py b/var/spack/packages/libuuid/package.py deleted file mode 100644 index 373c5bfcac..0000000000 --- a/var/spack/packages/libuuid/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Libuuid(Package): - """Portable uuid C library""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://sourceforge.net/projects/libuuid/" - url = "http://downloads.sourceforge.net/project/libuuid/libuuid-1.0.3.tar.gz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Flibuuid%2F&ts=1433881396&use_mirror=iweb" - - version('1.0.3', 'd44d866d06286c08ba0846aba1086d68') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - # FIXME: Add logic to build and install here - make() - make("install") diff --git a/var/spack/packages/libxcb/package.py b/var/spack/packages/libxcb/package.py deleted file mode 100644 index 16a5525c0d..0000000000 --- a/var/spack/packages/libxcb/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Libxcb(Package): - """The X protocol C-language Binding (XCB) is a replacement - for Xlib featuring a small footprint, latency hiding, direct - access to the protocol, improved threading support, and - extensibility.""" - - homepage = "http://xcb.freedesktop.org/" - url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz" - - version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb') - version('1.11.1', '118623c15a96b08622603a71d8789bf3') - depends_on("python") - depends_on("xcb-proto") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libxml2/package.py b/var/spack/packages/libxml2/package.py deleted file mode 100644 index 3a0af6b368..0000000000 --- a/var/spack/packages/libxml2/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Libxml2(Package): - """Libxml2 is the XML C parser and toolkit developed for the Gnome - project (but usable outside of the Gnome platform), it is free - software available under the MIT License.""" - homepage = "http://xmlsoft.org" - url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz" - - version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788') - - extends('python') - depends_on('zlib') - depends_on('xz') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libxshmfence/package.py b/var/spack/packages/libxshmfence/package.py deleted file mode 100644 index 3aa2448b46..0000000000 --- a/var/spack/packages/libxshmfence/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Libxshmfence(Package): - """This is a tiny library that exposes a event API on top of Linux - futexes.""" - - homepage = "http://keithp.com/blogs/dri3_extension/" # not really... - url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz" - - version('1.2', 'f0b30c0fc568b22ec524859ee28556f1') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/libxslt/package.py b/var/spack/packages/libxslt/package.py deleted file mode 100644 index f97332d020..0000000000 --- a/var/spack/packages/libxslt/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * - -class Libxslt(Package): - """Libxslt is the XSLT C library developed for the GNOME - project. XSLT itself is a an XML language to define - transformation for XML. Libxslt is based on libxml2 the XML C - library developed for the GNOME project. It also implements - most of the EXSLT set of processor-portable extensions - functions and some of Saxon's evaluate and expressions - extensions.""" - homepage = "http://www.xmlsoft.org/XSLT/index.html" - url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz" - - version('1.1.28', '9667bf6f9310b957254fdcf6596600b7') - - depends_on("libxml2") - depends_on("xz") - depends_on("zlib") - depends_on("libgcrypt") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/llvm-lld/package.py b/var/spack/packages/llvm-lld/package.py deleted file mode 100644 index f229211396..0000000000 --- a/var/spack/packages/llvm-lld/package.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class LlvmLld(Package): - """lld - The LLVM Linker - lld is a new set of modular code for creating linker tools.""" - homepage = "http://lld.llvm.org" - url = "http://llvm.org/releases/3.4/lld-3.4.src.tar.gz" - - depends_on('llvm') - - version('3.4', '3b6a17e58c8416c869c14dd37682f78e') - - def install(self, spec, prefix): - env['CXXFLAGS'] = self.compier.cxx11_flag - - with working_dir('spack-build', create=True): - cmake('..', - '-DLLD_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, - '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, - *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py deleted file mode 100644 index a6759c3033..0000000000 --- a/var/spack/packages/llvm/package.py +++ /dev/null @@ -1,53 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by David Beckingsale, david@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - - -class Llvm(Package): - """The LLVM Project is a collection of modular and reusable compiler and - toolchain technologies. Despite its name, LLVM has little to do with - traditional virtual machines, though it does provide helpful libraries - that can be used to build them. The name "LLVM" itself is not an acronym; - it is the full name of the project. - """ - homepage = 'http://llvm.org/' - url = 'http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz' - - version('3.7.0', 'b98b9495e5655a672d6cb83e1a180f8e', url='http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz') - version('3.6.2', '0c1ee3597d75280dee603bae9cbf5cc2', url='http://llvm.org/releases/3.6.2/llvm-3.6.2.src.tar.xz') - version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz') - - depends_on('python@2.7:') - - def install(self, spec, prefix): - env['CXXFLAGS'] = self.compiler.cxx11_flag - - with working_dir('spack-build', create=True): - cmake('..', - '-DLLVM_REQUIRES_RTTI=1', - '-DPYTHON_EXECUTABLE=%s/bin/python' % spec['python'].prefix, - *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/lmdb/package.py b/var/spack/packages/lmdb/package.py deleted file mode 100644 index 875b8100c5..0000000000 --- a/var/spack/packages/lmdb/package.py +++ /dev/null @@ -1,39 +0,0 @@ -import os -from spack import * - -class Lmdb(Package): - """Read-only mirror of official repo on openldap.org. Issues and - pull requests here are ignored. Use OpenLDAP ITS for issues. - http://www.openldap.org/software/repo.html""" - - - homepage = "http://www.openldap.org/software/repo.html" - url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.16.tar.gz" - - version('0.9.16', '0de89730b8f3f5711c2b3a4ba517b648') - - def install(self, spec, prefix): - os.chdir('libraries/liblmdb') - - make() - - mkdirp(prefix.bin) - mkdirp(prefix + '/man/man1') - mkdirp(prefix.lib) - mkdirp(prefix.include) - - bins = ['mdb_stat', 'mdb_copy', 'mdb_dump', 'mdb_load'] - for f in bins: - install(f, prefix.bin) - - mans = ['mdb_stat.1', 'mdb_copy.1', 'mdb_dump.1', 'mdb_load.1'] - for f in mans: - install(f, prefix + '/man/man1') - - libs = ['liblmdb.a', 'liblmdb.so'] - for f in libs: - install(f, prefix.lib) - - includes = ['lmdb.h'] - for f in includes: - install(f, prefix.include) diff --git a/var/spack/packages/lua/package.py b/var/spack/packages/lua/package.py deleted file mode 100644 index 57c443cc2d..0000000000 --- a/var/spack/packages/lua/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * -import os - -class Lua(Package): - """ The Lua programming language interpreter and library """ - homepage = "http://www.lua.org" - url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz" - - version('5.3.1', '797adacada8d85761c079390ff1d9961') - version('5.3.0', 'a1b0a7e92d0c85bbff7a8d27bf29f8af') - version('5.2.4', '913fdb32207046b273fdb17aad70be13') - version('5.2.3', 'dc7f94ec6ff15c985d2d6ad0f1b35654') - version('5.2.2', 'efbb645e897eae37cad4344ce8b0a614') - version('5.2.1', 'ae08f641b45d737d12d30291a5e5f6e3') - version('5.2.0', 'f1ea831f397214bae8a265995ab1a93e') - version('5.1.5', '2e115fe26e435e33b0d5c022e4490567') - version('5.1.4', 'd0870f2de55d59c1c8419f36e8fac150') - version('5.1.3', 'a70a8dfaa150e047866dc01a46272599') - - depends_on('ncurses') - - def install(self, spec, prefix): - make('INSTALL_TOP=%s' % prefix, - 'MYLDFLAGS=-L%s/lib' % spec['ncurses'].prefix, - 'linux', - 'install') diff --git a/var/spack/packages/lwgrp/package.py b/var/spack/packages/lwgrp/package.py deleted file mode 100644 index 5963382b92..0000000000 --- a/var/spack/packages/lwgrp/package.py +++ /dev/null @@ -1,18 +0,0 @@ -import os -from spack import * - -class Lwgrp(Package): - """Thie light-weight group library provides process group - representations using O(log N) space and time.""" - - homepage = "https://github.com/hpc/lwgrp" - url = "https://github.com/hpc/lwgrp/releases/download/v1.0.2/lwgrp-1.0.2.tar.gz" - - version('1.0.2', 'ab7ba3bdd8534a651da5076f47f27d8a') - - depends_on('mpi') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/lwm2/package.py b/var/spack/packages/lwm2/package.py deleted file mode 100644 index 31afff8816..0000000000 --- a/var/spack/packages/lwm2/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Lwm2(Package): - """LWM2: Light Weight Measurement Module. This is a PMPI module - that can collect a number of time-sliced MPI and POSIX I/O - measurements from a program. - """ - homepage = "https://jay.grs.rwth-aachen.de/redmine/projects/lwm2" - - version('torus', hg='https://jay.grs.rwth-aachen.de/hg/lwm2', revision='torus') - - depends_on("papi") - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/matio/package.py b/var/spack/packages/matio/package.py deleted file mode 100644 index 12cfb80926..0000000000 --- a/var/spack/packages/matio/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - - -class Matio(Package): - """matio is an C library for reading and writing Matlab MAT files""" - homepage = "http://sourceforge.net/projects/matio/" - url = "http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz" - - version('1.5.2', '85b007b99916c63791f28398f6a4c6f1') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/packages/memaxes/package.py b/var/spack/packages/memaxes/package.py deleted file mode 100644 index 76d5d3f831..0000000000 --- a/var/spack/packages/memaxes/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Memaxes(Package): - """MemAxes is a visualizer for sampled memory trace data.""" - - homepage = "https://github.com/scalability-llnl/MemAxes" - - version('0.5', '5874f3fda9fd2d313c0ff9684f915ab5', - url='https://github.com/scalability-llnl/MemAxes/archive/v0.5.tar.gz') - - depends_on("cmake@2.8.9:") - depends_on("qt@5:") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") - diff --git a/var/spack/packages/mesa/package.py b/var/spack/packages/mesa/package.py deleted file mode 100644 index 2a04a8fd51..0000000000 --- a/var/spack/packages/mesa/package.py +++ /dev/null @@ -1,34 +0,0 @@ -from spack import * - -class Mesa(Package): - """Mesa is an open-source implementation of the OpenGL - specification - a system for rendering interactive 3D graphics.""" - - homepage = "http://www.mesa3d.org" - url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz" - # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" - - # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') - version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2') - - # mesa 7.x, 8.x, 9.x - depends_on("libdrm@2.4.33") - depends_on("llvm@3.0") - depends_on("libxml2") - - # patch("llvm-fixes.patch") # using newer llvm - - # mesa 10.x - # depends_on("py-mako") - # depends_on("flex") - # depends_on("bison") - # depends_on("dri2proto") - # depends_on("libxcb") - # depends_on("libxshmfence") - - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/metis/package.py b/var/spack/packages/metis/package.py deleted file mode 100644 index 7ce5ae1925..0000000000 --- a/var/spack/packages/metis/package.py +++ /dev/null @@ -1,27 +0,0 @@ -from spack import * - -class Metis(Package): - """METIS is a set of serial programs for partitioning graphs, - partitioning finite element meshes, and producing fill reducing - orderings for sparse matrices. The algorithms implemented in - METIS are based on the multilevel recursive-bisection, - multilevel k-way, and multi-constraint partitioning schemes.""" - - homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview" - url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz" - - version('5.1.0', '5465e67079419a69e0116de24fce58fe') - - depends_on('mpi') - - def install(self, spec, prefix): - cmake(".", - '-DGKLIB_PATH=%s/GKlib' % pwd(), - '-DSHARED=1', - '-DCMAKE_C_COMPILER=mpicc', - '-DCMAKE_CXX_COMPILER=mpicxx', - '-DSHARED=1', - *std_cmake_args) - - make() - make("install") diff --git a/var/spack/packages/mpc/package.py b/var/spack/packages/mpc/package.py deleted file mode 100644 index 6fbfca3007..0000000000 --- a/var/spack/packages/mpc/package.py +++ /dev/null @@ -1,42 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpc(Package): - """Gnu Mpc is a C library for the arithmetic of complex numbers - with arbitrarily high precision and correct rounding of the - result.""" - homepage = "http://www.multiprecision.org" - url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz" - - version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') - - depends_on("gmp") - depends_on("mpfr") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/mpe2/mpe2.patch b/var/spack/packages/mpe2/mpe2.patch deleted file mode 100644 index 3ade1f04f4..0000000000 --- a/var/spack/packages/mpe2/mpe2.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -rupN mpe2-1.3.0/src/graphics/src/mpe_graphics.c mpe2-1.3.0.new/src/graphics/src/mpe_graphics.c ---- mpe2-1.3.0/src/graphics/src/mpe_graphics.c 2009-06-15 10:36:22.000000000 -0600 -+++ mpe2-1.3.0.new/src/graphics/src/mpe_graphics.c 2014-10-25 00:11:22.000000000 -0600 -@@ -982,7 +982,7 @@ char *string; - return MPE_ERR_BAD_ARGS; - } - -- printf("color = %d, string = %s\n",(int) color, string); -+//printf("color = %d, string = %s\n",(int) color, string); - - XBSetPixVal( graph->xwin, graph->xwin->cmapping[color] ); - returnVal = XDrawString( graph->xwin->disp, XBDrawable(graph->xwin), diff --git a/var/spack/packages/mpe2/package.py b/var/spack/packages/mpe2/package.py deleted file mode 100644 index 27295172cc..0000000000 --- a/var/spack/packages/mpe2/package.py +++ /dev/null @@ -1,28 +0,0 @@ -from spack import * - -class Mpe2(Package): - """Message Passing Extensions (MPE) -- Parallel, shared X window graphics""" - - homepage = "http://www.mcs.anl.gov/research/projects/perfvis/software/MPE/" - url = "ftp://ftp.mcs.anl.gov/pub/mpi/mpe/mpe2-1.3.0.tar.gz" - - version('1.3.0', '67bf0c7b2e573df3ba0d2059a96c2f7b') - - patch('mpe2.patch') - - depends_on("mpi") - - provides("mpe") - - def install(self, spec, prefix): - configure("--prefix=" + prefix, - "--x-includes=/usr/X11R6/include", - "--x-libraries=/usr/X11R6/lib", - "--enable-mpe_graphics=yes", - "--disable-f77", - "--enable-viewers=no", - "--enable-slog2=no", - "--with-mpicc=mpicc") - - make() - make("install") diff --git a/var/spack/packages/mpfr/package.py b/var/spack/packages/mpfr/package.py deleted file mode 100644 index 9c744a22df..0000000000 --- a/var/spack/packages/mpfr/package.py +++ /dev/null @@ -1,41 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpfr(Package): - """The MPFR library is a C library for multiple-precision - floating-point computations with correct rounding.""" - homepage = "http://www.mpfr.org" - url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.3.tar.bz2" - - version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') - # version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') - - depends_on('gmp') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/mpibash/mpibash-4.3.patch b/var/spack/packages/mpibash/mpibash-4.3.patch deleted file mode 100644 index 17e285b0bf..0000000000 --- a/var/spack/packages/mpibash/mpibash-4.3.patch +++ /dev/null @@ -1,1565 +0,0 @@ -diff -Naur bash-4.3/builtins/circle.def mpibash-4.3/builtins/circle.def ---- bash-4.3/builtins/circle.def 1969-12-31 17:00:00.000000000 -0700 -+++ mpibash-4.3/builtins/circle.def 2014-05-13 11:27:37.314100671 -0600 -@@ -0,0 +1,620 @@ -+This file is circle.def, from which is created circle.c. -+It implements all of the "circle_*" builtins in Bash. -+ -+$PRODUCES circle.c -+ -+#include -+ -+#include -+#if defined (HAVE_UNISTD_H) -+# ifdef _MINIX -+# include -+# endif -+# include -+#endif -+ -+#include "../bashintl.h" -+#include "../shell.h" -+#include "common.h" -+#include "bashgetopt.h" -+#include -+ -+extern int running_trap, trap_saved_exit_value; -+ -+static int circle_rank; /* Rank in the Libcircle job */ -+static SHELL_VAR *create_func = NULL; /* User-defined callback function for CIRCLE_cb_create. */ -+static SHELL_VAR *process_func = NULL; /* User-defined callback function for CIRCLE_cb_process. */ -+static SHELL_VAR *reduce_init_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_init. */ -+static SHELL_VAR *reduce_fini_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_fini. */ -+static SHELL_VAR *reduce_op_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_op. */ -+static CIRCLE_handle *current_handle = NULL; /* Active handle within a callback or NULL if not within a callback */ -+static int within_reduction = 0; /* 1=within a reduction callback; 0=not */ -+ -+/* Return with a usage message if no arguments remain. */ -+#define YES_ARGS(LIST) \ -+ if ((LIST) == 0) \ -+ { \ -+ builtin_usage (); \ -+ return (EX_USAGE); \ -+ } -+ -+/* Perform the same operation as bind_variable, but with VALUE being a -+ * number, not a string. */ -+static SHELL_VAR * -+bind_variable_number (name, value, flags) -+ const char *name; -+ long value; -+ int flags; -+{ -+ char numstr[25]; /* String version of VALUE */ -+ -+ sprintf (numstr, "%ld", value); -+ return bind_variable (name, numstr, flags); -+} -+ -+/* Invoke the user-defined creation-callback function (create_func). */ -+static void -+internal_create_func (handle) -+ CIRCLE_handle *handle; -+{ -+ WORD_LIST *funcargs; -+ -+ if (create_func == NULL) -+ return; -+ current_handle = handle; -+ funcargs = make_word_list (make_word ("cb_create"), NULL); -+ execute_shell_function (create_func, funcargs); -+ dispose_words (funcargs); -+ current_handle = NULL; -+} -+ -+/* Invoke the user-defined process-callback function (process_func). */ -+static void -+internal_process_func (handle) -+ CIRCLE_handle *handle; -+{ -+ WORD_LIST *funcargs; -+ -+ if (process_func == NULL) -+ return; -+ current_handle = handle; -+ funcargs = make_word_list (make_word ("cb_process"), NULL); -+ execute_shell_function (process_func, funcargs); -+ dispose_words (funcargs); -+ current_handle = NULL; -+} -+ -+/* Invoke the user-defined reduction-initiation callback function -+ * (reduce_init_func). */ -+static void -+internal_reduce_init_func (void) -+{ -+ WORD_LIST *funcargs; -+ -+ if (reduce_init_func == NULL) -+ return; -+ within_reduction = 1; -+ funcargs = make_word_list (make_word ("cb_reduce_init"), NULL); -+ execute_shell_function (reduce_init_func, funcargs); -+ dispose_words (funcargs); -+ within_reduction = 0; -+} -+ -+/* Invoke the user-defined reduction callback function -+ * (reduce_op_func). */ -+static void -+internal_reduce_op_func (buf1, size1, buf2, size2) -+ const void* buf1; -+ size_t size1; -+ const void* buf2; -+ size_t size2; -+{ -+ WORD_LIST *funcargs; -+ -+ if (reduce_op_func == NULL) -+ return; -+ within_reduction = 1; -+ funcargs = make_word_list (make_word (buf2), NULL); -+ funcargs = make_word_list (make_word (buf1), funcargs); -+ funcargs = make_word_list (make_word ("cb_reduce_op"), funcargs); -+ execute_shell_function (reduce_op_func, funcargs); -+ dispose_words (funcargs); -+ within_reduction = 0; -+} -+ -+/* Invoke the user-defined reduction-finalization callback function -+ * (reduce_fini_func). */ -+static void -+internal_reduce_fini_func (buf, size) -+ const void* buf; -+ size_t size; -+{ -+ WORD_LIST *funcargs; -+ -+ if (reduce_fini_func == NULL) -+ return; -+ funcargs = make_word_list (make_word (buf), NULL); -+ funcargs = make_word_list (make_word ("cb_reduce_fini"), funcargs); -+ execute_shell_function (reduce_fini_func, funcargs); -+ dispose_words (funcargs); -+} -+ -+/* Look up a user-provided callback function. */ -+static int -+find_callback_function (list, user_func) -+ WORD_LIST *list; -+ SHELL_VAR **user_func; -+{ -+ char *funcname; /* Name of the user-defined function. */ -+ -+ /* If no argument was provided, nullify the callback function. */ -+ if (list == NULL) -+ { -+ *user_func = NULL; -+ return EXECUTION_SUCCESS; -+ } -+ -+ /* Get the callback function. */ -+ funcname = list->word->word; -+ list = list->next; -+ no_args (list); -+ *user_func = find_function (funcname); -+ if (*user_func == NULL) -+ { -+ builtin_error (_("function %s not found"), funcname); -+ return EXECUTION_FAILURE; -+ } -+ return EXECUTION_SUCCESS; -+} -+ -+/* Initialize Libcircle. */ -+void -+initialize_libcircle (argc, argv) -+ int argc; -+ char **argv; -+{ -+ circle_rank = CIRCLE_init (argc, argv, CIRCLE_DEFAULT_FLAGS); -+ bind_variable_number ("circle_rank", circle_rank, 0); -+ CIRCLE_enable_logging (CIRCLE_LOG_WARN); -+ CIRCLE_cb_create (internal_create_func); -+ CIRCLE_cb_process (internal_process_func); -+ CIRCLE_cb_reduce_init (internal_reduce_init_func); -+ CIRCLE_cb_reduce_op (internal_reduce_op_func); -+ CIRCLE_cb_reduce_fini (internal_reduce_fini_func); -+} -+ -+/* Finalize Libcircle. */ -+void -+finalize_libcircle (void) -+{ -+ CIRCLE_finalize (); -+} -+ -+/* ---------------------------------------------------------------------- */ -+ -+$BUILTIN circle_set_options -+$FUNCTION circle_set_options_builtin -+$SHORT_DOC circle_set_options [flag]... -+Change Libcircle's run-time behavior. -+ -+Arguments: -+ FLAG "split_random", "split_equal", or "create_global" -+ -+Multiple flags can be provided. If no flags are provided, Libcircle -+reverts to its default options. -+ -+Exit Status: -+Returns 0 unless an invalid option is given. -+$END -+/*'*/ -+ -+/* Here is the circle_set_options builtin. */ -+int -+circle_set_options_builtin (list) -+ WORD_LIST *list; -+{ -+ char *word; /* One argument */ -+ int flags = 0; /* Flags to pass to CIRCLE_set_options */ -+ -+ if (list == NULL) -+ flags = CIRCLE_DEFAULT_FLAGS; -+ else -+ while (list != NULL) -+ { -+ word = list->word->word; -+ if (!strcmp (word, "split_random")) -+ flags |= CIRCLE_SPLIT_RANDOM; -+ else if (!strcmp (word, "split_equal")) -+ flags |= CIRCLE_SPLIT_EQUAL; -+ else if (!strcmp (word, "create_global")) -+ flags |= CIRCLE_CREATE_GLOBAL; -+ else -+ { -+ builtin_error (_("invalid flag \"%s\""), word); -+ return (EXECUTION_FAILURE); -+ } -+ list = list->next; -+ } -+ CIRCLE_set_options (flags); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_cb_create -+$FUNCTION circle_cb_create_builtin -+$SHORT_DOC circle_cb_create [func] -+Register a function that will create work when asked. -+ -+Arguments: -+ FUNC User-defined callback function that will invoke -+ circle_enqueue when called -+ -+If FUNC is omitted, no function will be associated with work creation. -+This can be used to nullify a previous circle_cb_create invocation. -+ -+Exit Status: -+Returns 0 unless an invalid function is given or an error occurs. -+$END -+ -+/* Here is the circle_cb_create builtin. */ -+int -+circle_cb_create_builtin (list) -+ WORD_LIST *list; -+{ -+ return find_callback_function (list, &create_func); -+} -+ -+$BUILTIN circle_cb_process -+$FUNCTION circle_cb_process_builtin -+$SHORT_DOC circle_cb_process [func] -+Register a function that will process work when asked. -+ -+Arguments: -+ FUNC User-defined callback function that will invoke -+ circle_enqueue when called -+ -+If FUNC is omitted, no function will be associated with work processing. -+This can be used to nullify a previous circle_cb_process invocation. -+ -+Exit Status: -+Returns 0 unless an invalid function is given or an error occurs. -+$END -+ -+/* Here is the circle_cb_process builtin. */ -+int -+circle_cb_process_builtin (list) -+ WORD_LIST *list; -+{ -+ return find_callback_function (list, &process_func); -+} -+ -+$BUILTIN circle_begin -+$FUNCTION circle_begin_builtin -+$SHORT_DOC circle_begin -+Begin creation and processing of the distributed work queue. -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+ -+/* Here is the circle_begin builtin. */ -+int -+circle_begin_builtin (list) -+ WORD_LIST *list; -+{ -+ no_args (list); -+ CIRCLE_begin (); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_enqueue -+$FUNCTION circle_enqueue_builtin -+$SHORT_DOC circle_enqueue work -+Enqueue work onto the distributed queue. -+ -+Arguments: -+ WORK "Work" as represented by an arbitrary string of limited -+ size (generally around 4KB) -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+ -+/* Here is the circle_enqueue builtin. */ -+int -+circle_enqueue_builtin (list) -+ WORD_LIST *list; -+{ -+ char *work; /* Work to perform */ -+ -+ /* Extract the work argument. */ -+ YES_ARGS (list); -+ work = list->word->word; -+ list = list->next; -+ no_args (list); -+ -+ /* Complain if we're not within a proper callback function. */ -+ if (current_handle == NULL) -+ { -+ builtin_error (_("not within a Libcircle \"create\" or \"process\" callback function")); -+ return EXECUTION_FAILURE; -+ } -+ -+ /* Enqueue the work. */ -+ if (current_handle->enqueue (work) == -1) -+ return EXECUTION_FAILURE; -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_dequeue -+$FUNCTION circle_dequeue_builtin -+$SHORT_DOC circle_dequeue var -+Dequeue work from the distributed queue into a variable. -+ -+Arguments: -+ VAR Variable in which to receive previously enqueued "work" -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+ -+/* Here is the circle_dequeue builtin. */ -+int -+circle_dequeue_builtin (list) -+ WORD_LIST *list; -+{ -+ char *varname; /* Variable in which to store the work string */ -+ char work[CIRCLE_MAX_STRING_LEN+1]; /* Work to perform */ -+ -+ /* Extract the variable-name argument. */ -+ YES_ARGS (list); -+ varname = list->word->word; -+ list = list->next; -+ no_args (list); -+ -+ /* Complain if we're not within a callback function. */ -+ if (current_handle == NULL) -+ { -+ builtin_error (_("not within a Libcircle callback function")); -+ return EXECUTION_FAILURE; -+ } -+ -+ /* Dequeue the work and bind it to the given variable. */ -+ if (current_handle->dequeue (work) == -1) -+ return EXECUTION_FAILURE; -+ bind_variable (varname, work, 0); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_enable_logging -+$FUNCTION circle_enable_logging_builtin -+$SHORT_DOC circle_enable_logging log_level -+Change Libcircle's logging verbosity -+ -+Arguments: -+ LOG_LEVEL "fatal", "error", "warning", "info", or "debug" -+ -+Exit Status: -+Returns 0 unless an invalid option is given. -+$END -+/*'*/ -+ -+/* Here is the circle_enable_logging builtin. */ -+int -+circle_enable_logging_builtin (list) -+ WORD_LIST *list; -+{ -+ char *word; /* One argument */ -+ CIRCLE_loglevel loglevel; /* Level to set */ -+ -+ /* Parse the log level. */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (!strcmp (word, "fatal")) -+ loglevel = CIRCLE_LOG_FATAL; -+ else if (!strcmp (word, "error")) -+ loglevel = CIRCLE_LOG_ERR; -+ else if (!strcmp (word, "warning")) -+ loglevel = CIRCLE_LOG_WARN; -+ else if (!strcmp (word, "info")) -+ loglevel = CIRCLE_LOG_INFO; -+ else if (!strcmp (word, "debug")) -+ loglevel = CIRCLE_LOG_DBG; -+ else -+ { -+ builtin_error (_("invalid log level \"%s\""), word); -+ return (EXECUTION_FAILURE); -+ } -+ -+ /* Set the log level. */ -+ CIRCLE_enable_logging (loglevel); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_abort -+$FUNCTION circle_abort_builtin -+$SHORT_DOC circle_abort -+Terminate queue processing. -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+ -+/* Here is the circle_abort builtin. */ -+int -+circle_abort_builtin (list) -+ WORD_LIST *list; -+{ -+ no_args (list); -+ CIRCLE_abort (); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_checkpoint -+$FUNCTION circle_checkpoint_builtin -+$SHORT_DOC circle_checkpoint -+Checkpoint a work queue to disk. -+ -+Write a file called circle${circle_rank}.txt containing the current -+queue state of rank ${circle_rank}. On a later run, a worker can -+invoke circle_read_restarts to repopulate its queue from such a -+checkpoint file. -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+/*'*/ -+ -+/* Here is the circle_checkpoint builtin. */ -+int -+circle_checkpoint_builtin (list) -+ WORD_LIST *list; -+{ -+ no_args (list); -+ CIRCLE_checkpoint (); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_read_restarts -+$FUNCTION circle_read_restarts_builtin -+$SHORT_DOC circle_read_restarts -+Repopulate a work queue from a disk checkpoint. -+ -+Read queue contents from a file called circle${circle_rank}.txt, which -+was previously produced by circle_checkpoint. -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+/*'*/ -+ -+/* Here is the circle_read_restarts builtin. */ -+int -+circle_read_restarts_builtin (list) -+ WORD_LIST *list; -+{ -+ no_args (list); -+ CIRCLE_read_restarts (); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN circle_cb_reduce_init -+$FUNCTION circle_cb_reduce_init_builtin -+$SHORT_DOC circle_cb_reduce_init [func] -+Register a function that will initiate a reduction operation. -+ -+Arguments: -+ FUNC User-defined callback function that will invoke -+ circle_reduce when called -+ -+FUNC will be invoked on all ranks. -+ -+If FUNC is omitted, no function will be associated with reduction -+initialization. This can be used to nullify a previous -+circle_cb_reduce_init invocation. -+ -+Exit Status: -+Returns 0 unless an invalid function is given or an error occurs. -+$END -+ -+/* Here is the circle_cb_reduce_init builtin. */ -+int -+circle_cb_reduce_init_builtin (list) -+ WORD_LIST *list; -+{ -+ return find_callback_function (list, &reduce_init_func); -+} -+ -+$BUILTIN circle_cb_reduce_op -+$FUNCTION circle_cb_reduce_op_builtin -+$SHORT_DOC circle_cb_reduce_op [func] -+Register a function that will complete a reduction operation. -+ -+Arguments: -+ FUNC User-defined callback function that will receive -+ two items to reduce and invoke circle_reduce on -+ the reduced value -+ -+If FUNC is omitted, no function will be associated with reduction -+execution. This can be used to nullify a previous circle_cb_reduce_op -+invocation. -+ -+Exit Status: -+Returns 0 unless an invalid function is given or an error occurs. -+$END -+ -+/* Here is the circle_cb_reduce_op builtin. */ -+int -+circle_cb_reduce_op_builtin (list) -+ WORD_LIST *list; -+{ -+ return find_callback_function (list, &reduce_op_func); -+} -+ -+$BUILTIN circle_cb_reduce_fini -+$FUNCTION circle_cb_reduce_fini_builtin -+$SHORT_DOC circle_cb_reduce_fini [func] -+Register a function that will complete a reduction operation. -+ -+Arguments: -+ FUNC User-defined callback function that will receive -+ the final reduced data -+ -+If FUNC is omitted, no function will be associated with reduction -+completion. This can be used to nullify a previous -+circle_cb_reduce_fini invocation. -+ -+Libcircle guarantees that FUNC will be invoked only on rank 0. -+ -+Exit Status: -+Returns 0 unless an invalid function is given or an error occurs. -+$END -+ -+/* Here is the circle_cb_reduce_fini builtin. */ -+int -+circle_cb_reduce_fini_builtin (list) -+ WORD_LIST *list; -+{ -+ return find_callback_function (list, &reduce_fini_func); -+} -+ -+$BUILTIN circle_reduce -+$FUNCTION circle_reduce_builtin -+$SHORT_DOC circle_reduce work -+Seed the next phase of a reduction operation -+ -+Arguments: -+ WORK "Work" as represented by an arbitrary string of limited -+ size (generally around 4KB) -+ -+This function should be called both by the callback function -+registered with circle_reduce_init and the callback function -+registered with circle_reduce_op. -+ -+Exit Status: -+Returns 0 unless an error occurs. -+$END -+ -+/* Here is the circle_reduce builtin. */ -+int -+circle_reduce_builtin (list) -+ WORD_LIST *list; -+{ -+ char *work; /* Work to perform */ -+ -+ /* Extract the work argument. */ -+ YES_ARGS (list); -+ work = list->word->word; -+ list = list->next; -+ no_args (list); -+ -+ /* Complain if we're not within a proper callback function. */ -+ if (!within_reduction) -+ { -+ builtin_error (_("not within a Libcircle \"reduce_init\" or \"reduce_op\" callback function")); -+ return EXECUTION_FAILURE; -+ } -+ -+ /* Reduce the work. */ -+ CIRCLE_reduce (work, strlen (work)); -+ return EXECUTION_SUCCESS; -+} -diff -Naur bash-4.3/builtins/Makefile.in mpibash-4.3/builtins/Makefile.in ---- bash-4.3/builtins/Makefile.in 2012-05-25 07:29:19.000000000 -0600 -+++ mpibash-4.3/builtins/Makefile.in 2014-05-13 11:27:37.314100671 -0600 -@@ -141,7 +141,9 @@ - $(srcdir)/times.def $(srcdir)/trap.def $(srcdir)/type.def \ - $(srcdir)/ulimit.def $(srcdir)/umask.def $(srcdir)/wait.def \ - $(srcdir)/reserved.def $(srcdir)/pushd.def $(srcdir)/shopt.def \ -- $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def -+ $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def \ -+ $(srcdir)/mpi.def \ -+@CIRCLE@ $(srcdir)/circle.def - - STATIC_SOURCE = common.c evalstring.c evalfile.c getopt.c bashgetopt.c \ - getopt.h -@@ -153,7 +155,9 @@ - jobs.o kill.o let.o mapfile.o \ - pushd.o read.o return.o set.o setattr.o shift.o source.o \ - suspend.o test.o times.o trap.o type.o ulimit.o umask.o \ -- wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o -+ wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o \ -+ mpi.o \ -+@CIRCLE@ circle.o - - CREATED_FILES = builtext.h builtins.c psize.aux pipesize.h tmpbuiltins.c \ - tmpbuiltins.h -@@ -317,6 +321,8 @@ - getopts.o: getopts.def - reserved.o: reserved.def - complete.o: complete.def -+@CIRCLE@ circle.o: circle.def -+mpi.o: mpi.def - - # C files - bashgetopt.o: ../config.h $(topdir)/bashansi.h $(BASHINCDIR)/ansi_stdlib.h -@@ -644,6 +650,19 @@ - mapfile.o: $(topdir)/subst.h $(topdir)/externs.h $(BASHINCDIR)/maxpath.h - mapfile.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/variables.h $(topdir)/conftypes.h - mapfile.o: $(topdir)/arrayfunc.h ../pathnames.h -+@CIRCLE@ circle.o: $(topdir)/command.h ../config.h $(BASHINCDIR)/memalloc.h $(topdir)/error.h -+@CIRCLE@ circle.o: $(topdir)/general.h $(topdir)/xmalloc.h $(topdir)/subst.h $(topdir)/externs.h -+@CIRCLE@ circle.o: $(topdir)/quit.h $(topdir)/dispose_cmd.h $(topdir)/make_cmd.h -+@CIRCLE@ circle.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/unwind_prot.h $(topdir)/variables.h $(topdir)/conftypes.h -+@CIRCLE@ circle.o: $(BASHINCDIR)/maxpath.h ../pathnames.h -+mpi.o: ../config.h ../config-top.h ../config-bot.h ../bashintl.h -+mpi.o: ../include/gettext.h ../shell.h ../config.h ../bashjmp.h -+mpi.o: ../include/posixjmp.h ../command.h ../syntax.h ../general.h -+mpi.o: ../bashtypes.h ../include/chartypes.h ../xmalloc.h ../bashansi.h -+mpi.o: ../error.h ../variables.h ../array.h ../assoc.h ../hashlib.h -+mpi.o: ../conftypes.h ../arrayfunc.h ../quit.h ../sig.h ../include/maxpath.h -+mpi.o: ../unwind_prot.h ../dispose_cmd.h ../make_cmd.h ../include/ocache.h -+mpi.o: ../subst.h ../pathnames.h ../externs.h common.h bashgetopt.h - - #bind.o: $(RL_LIBSRC)chardefs.h $(RL_LIBSRC)readline.h $(RL_LIBSRC)keymaps.h - -diff -Naur bash-4.3/builtins/mpi.def mpibash-4.3/builtins/mpi.def ---- bash-4.3/builtins/mpi.def 1969-12-31 17:00:00.000000000 -0700 -+++ mpibash-4.3/builtins/mpi.def 2014-05-13 11:27:37.314100671 -0600 -@@ -0,0 +1,744 @@ -+This file is mpi.def, from which is created mpi.c. -+It implements all of the "mpi_*" builtins in Bash. -+ -+$PRODUCES mpi.c -+ -+#include -+ -+#include -+#if defined (HAVE_UNISTD_H) -+# ifdef _MINIX -+# include -+# endif -+# include -+#endif -+ -+#include "../bashintl.h" -+#include "../shell.h" -+#include "common.h" -+#include "bashgetopt.h" -+#include -+ -+extern int running_trap, trap_saved_exit_value; -+ -+/* Keep track of who we are within MPI_COMM_WORLD. */ -+static int mpi_rank; -+static int mpi_num_ranks; -+ -+/* Try an MPI operation. Return with an error message on failure. */ -+#define MPI_TRY(STMT) \ -+ do \ -+ { \ -+ int mpierr; \ -+ mpierr = STMT; \ -+ if (mpierr != MPI_SUCCESS) \ -+ return report_mpi_error (mpierr); \ -+ } \ -+ while (0) -+ -+/* Return with a usage message if no arguments remain. */ -+#define YES_ARGS(LIST) \ -+ if ((LIST) == 0) \ -+ { \ -+ builtin_usage (); \ -+ return (EX_USAGE); \ -+ } -+ -+/* Return with an error message if a given variable is read-only or if -+ * we can't write to it for any other reason (e.g., it's defined as a -+ * function). */ -+#define REQUIRE_WRITABLE(NAME) \ -+ do \ -+ { \ -+ SHELL_VAR *bindvar = find_shell_variable (NAME); \ -+ if (bindvar) \ -+ { \ -+ if (readonly_p (bindvar)) \ -+ { \ -+ err_readonly (NAME); \ -+ return (EXECUTION_FAILURE); \ -+ } \ -+ if (unbind_variable (NAME) == -1) \ -+ { \ -+ builtin_error ("Failed to write to variable %s", NAME); \ -+ return (EXECUTION_FAILURE); \ -+ } \ -+ } \ -+ } \ -+ while (0) -+ -+/* Initialize MPI. */ -+void -+initialize_mpi (argc, argv) -+ int argc; -+ char **argv; -+{ -+ int init_done; -+ -+ MPI_Initialized (&init_done); -+ if (!init_done) -+ MPI_Init (&argc, &argv); -+ MPI_Errhandler_set (MPI_COMM_WORLD, MPI_ERRORS_RETURN); -+ MPI_Comm_rank (MPI_COMM_WORLD, &mpi_rank); -+ MPI_Comm_size (MPI_COMM_WORLD, &mpi_num_ranks); -+} -+ -+/* Finalize MPI. */ -+void -+finalize_mpi () -+{ -+ MPI_Finalize (); -+} -+ -+/* Parse an operation name into an MPI_Op. Return 1 on success, 0 on -+ * failure. */ -+static int -+parse_operation (char *name, MPI_Op *op) -+{ -+ /* Define a mapping from operator names to MPI_Op values. */ -+ typedef struct { -+ char *name; /* Operation name (e.g., "sum") */ -+ MPI_Op value; /* Operation value (e.g., MPI_SUM) */ -+ } opname2value_t; -+ static opname2value_t oplist[] = { -+ {"max", MPI_MAX}, -+ {"min", MPI_MIN}, -+ {"sum", MPI_SUM}, -+ {"prod", MPI_PROD}, -+ {"land", MPI_LAND}, -+ {"band", MPI_BAND}, -+ {"lor", MPI_LOR}, -+ {"bor", MPI_BOR}, -+ {"lxor", MPI_LXOR}, -+ {"bxor", MPI_BXOR}, -+ {"maxloc", MPI_MAXLOC}, -+ {"minloc", MPI_MINLOC} -+ }; -+ size_t i; -+ -+ for (i = 0; i < sizeof(oplist)/sizeof(opname2value_t); i++) -+ if (!strcmp(name, oplist[i].name)) -+ { -+ *op = oplist[i].value; -+ if (i > 0) -+ { -+ /* As a performance optimization, bubble up the value we -+ * just found. */ -+ opname2value_t prev = oplist[i - 1]; -+ oplist[i - 1] = oplist[i]; -+ oplist[i] = prev; -+ } -+ return 1; -+ } -+ return 0; -+} -+ -+/* Report an error to the user and return EXECUTION_FAILURE. */ -+static int -+report_mpi_error (mpierr) -+ int mpierr; -+{ -+ char errstr[MPI_MAX_ERROR_STRING]; -+ int errstrlen; -+ -+ MPI_Error_string (mpierr, errstr, &errstrlen); -+ builtin_error ("%s", errstr); -+ return EXECUTION_FAILURE; -+} -+ -+/* Perform the same operation as bind_variable, but with VALUE being a -+ * number, not a string. */ -+static SHELL_VAR * -+bind_variable_number (name, value, flags) -+ const char *name; -+ long value; -+ int flags; -+{ -+ char numstr[25]; /* String version of VALUE */ -+ -+ sprintf (numstr, "%ld", value); -+ return bind_variable (name, numstr, flags); -+} -+ -+/* Perform the same operation as bind_array_variable, but with VALUE -+ * being a number, not a string. */ -+static SHELL_VAR * -+bind_array_variable_number (name, ind, value, flags) -+ char *name; -+ arrayind_t ind; -+ long value; -+ int flags; -+{ -+ char numstr[25]; /* String version of VALUE */ -+ -+ sprintf (numstr, "%ld", value); -+ return bind_array_variable (name, ind, numstr, flags); -+} -+ -+/* Define a reduction-type function (allreduce, scan, exscan, etc.). */ -+typedef int (*reduction_func_t)(void *, void *, int, MPI_Datatype, MPI_Op, MPI_Comm); -+ -+/* Perform any reduction-type operation (allreduce, scan, exscan, etc.). */ -+static int -+reduction_like (list, funcname, func) -+ WORD_LIST *list; -+ char *funcname; -+ reduction_func_t func; -+{ -+ char *word; /* One argument */ -+ struct { -+ long int value; /* Reduced value */ -+ int rank; /* Rank associated with the above */ -+ } number, result; -+ MPI_Op operation = MPI_SUM; /* Operation to perform */ -+ char *varname; /* Name of the variable to bind the results to */ -+ intmax_t n; -+ int i; -+ -+ /* Parse "-O OPERATION" (optional), where OPERATION is a reduction -+ * operation. */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (ISOPTION (word, 'O')) -+ { -+ list = list->next; -+ if (list == 0) -+ { -+ sh_needarg (funcname); -+ return (EX_USAGE); -+ } -+ word = list->word->word; -+ if (!parse_operation (word, &operation)) -+ { -+ sh_invalidopt ("-O"); -+ return (EX_USAGE); -+ } -+ list = list->next; -+ } -+ -+ /* Parse the argument, which must be a number. */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (!legal_number (word, &n)) -+ { -+ sh_neednumarg (funcname); -+ return (EX_USAGE); -+ } -+ number.value = (long int) n; -+ number.rank = mpi_rank; -+ list = list->next; -+ -+ /* Parse the target variable, which must not be read-only. */ -+ YES_ARGS (list); -+ varname = list->word->word; -+ if (mpi_rank != 0 || func != MPI_Exscan) -+ REQUIRE_WRITABLE (varname); -+ list = list->next; -+ no_args (list); -+ -+ /* Perform the reduction operation. Bind the given array variable -+ * to the result and, for minloc/maxloc, the associated rank. */ -+ if (mpi_rank != 0 || func != MPI_Exscan) { -+ bind_array_variable (varname, 0, "", 0); -+ bind_array_variable (varname, 1, "", 0); -+ } -+ if (operation == MPI_MINLOC || operation == MPI_MAXLOC) -+ { -+ MPI_TRY (func (&number, &result, 1, MPI_LONG_INT, operation, MPI_COMM_WORLD)); -+ if (mpi_rank != 0 || func != MPI_Exscan) -+ bind_array_variable_number (varname, 1, result.rank, 0); -+ } -+ else -+ MPI_TRY (func (&number.value, &result.value, 1, MPI_LONG, operation, MPI_COMM_WORLD)); -+ if (mpi_rank != 0 || func != MPI_Exscan) -+ bind_array_variable_number (varname, 0, result.value, 0); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_comm_rank -+$FUNCTION mpi_comm_rank_builtin -+$SHORT_DOC mpi_comm_rank name -+Return the process's rank in the MPI job. -+ -+Arguments: -+ NAME Scalar variable in which to receive the rank -+ -+Exit Status: -+Returns 0 unless an invalid option is given. -+$END -+/*'*/ -+ -+/* Here is the mpi_comm_rank builtin. */ -+int -+mpi_comm_rank_builtin (list) -+ WORD_LIST *list; -+{ -+ char *varname; /* Name of the variable to bind the results to */ -+ -+ YES_ARGS (list); -+ varname = list->word->word; -+ REQUIRE_WRITABLE (varname); -+ list = list->next; -+ no_args (list); -+ bind_variable_number (varname, mpi_rank, 0); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_comm_size -+$FUNCTION mpi_comm_size_builtin -+$SHORT_DOC mpi_comm_size name -+Return the total number of ranks in the MPI job. -+ -+Arguments: -+ NAME Scalar variable in which to receive the number of ranks -+ -+Exit Status: -+Returns 0 unless an invalid option is given. -+$END -+ -+/* Here is the mpi_comm_size builtin. */ -+int -+mpi_comm_size_builtin (list) -+ WORD_LIST *list; -+{ -+ char *varname; /* Name of the variable to bind the results to */ -+ -+ YES_ARGS (list); -+ varname = list->word->word; -+ REQUIRE_WRITABLE (varname); -+ list = list->next; -+ no_args (list); -+ bind_variable_number (varname, mpi_num_ranks, 0); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_abort -+$FUNCTION mpi_abort_builtin -+$SHORT_DOC mpi_abort [n] -+Abort all processes in the MPI job and exit the shell. -+ -+Exits not only the caller's shell (with a status of N) but also all -+remote shells that are part of the same MPI job. If N is omitted, the -+exit status is that of the last command executed. -+ -+This command should be used only in extreme circumstances. It is -+better for each process to exit normally on its own. -+$END -+/*'*/ -+ -+/* Here is the mpi_abort builtin. */ -+int -+mpi_abort_builtin (list) -+ WORD_LIST *list; -+{ -+ int exit_value; -+ -+ exit_value = (running_trap == 1 && list == 0) ? trap_saved_exit_value : get_exitstat (list); /* Copied from exit.def */ -+ MPI_TRY (MPI_Abort (MPI_COMM_WORLD, exit_value)); -+ return EXECUTION_FAILURE; -+} -+ -+$BUILTIN mpi_send -+$FUNCTION mpi_send_builtin -+$SHORT_DOC mpi_send [-t tag] rank message -+Send a message to a remote process in the same MPI job. -+ -+Options: -+ -t TAG Send the message using tag TAG (default: 0). TAG must -+ be a nonnegative integer. -+ -+Arguments: -+ RANK Whom to send the message to. RANK must be an integer in -+ the range [0, $(mpi_comm_size)-1]. -+ -+ MESSAGE String to send to rank RANK. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_send builtin. */ -+int -+mpi_send_builtin (list) -+ WORD_LIST *list; -+{ -+ char *word; /* One argument */ -+ intmax_t target_rank; /* MPI target rank */ -+ char *message; /* Message to send to rank target_rank */ -+ intmax_t tag = 0; /* Message tag to use */ -+ -+ /* Parse "-t TAG" (optional), where TAG is a number or "any". */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (ISOPTION (word, 't')) -+ { -+ list = list->next; -+ if (list == 0) -+ { -+ sh_needarg ("mpi_recv"); -+ return (EX_USAGE); -+ } -+ word = list->word->word; -+ if (!legal_number (word, &tag)) -+ { -+ sh_neednumarg ("-t"); -+ return (EX_USAGE); -+ } -+ list = list->next; -+ } -+ else if (*word == '-') -+ { -+ sh_invalidopt (word); -+ builtin_usage (); -+ return (EX_USAGE); -+ } -+ -+ /* Parse the target rank, which must be a number. */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (!legal_number (word, &target_rank)) -+ { -+ builtin_error (_("mpi_send: numeric rank required")); -+ return (EX_USAGE); -+ } -+ list = list->next; -+ -+ /* Parse the message to send. */ -+ YES_ARGS (list); -+ message = list->word->word; -+ list = list->next; -+ no_args (list); -+ -+ /* Send the message. */ -+ MPI_TRY (MPI_Send (message, strlen(message)+1, MPI_BYTE, (int)target_rank, (int)tag, MPI_COMM_WORLD)); -+ return EXECUTION_SUCCESS; -+} -+ -+ -+$BUILTIN mpi_recv -+$FUNCTION mpi_recv_builtin -+$SHORT_DOC mpi_recv [-t tag] rank name -+Receive a message from a remote process in the same MPI job. -+ -+Options: -+ -t TAG Receive only messages sent using tag TAG (default: 0). -+ TAG must be either a nonnegative integer or the string -+ "any" to receive messages sent using any tag. -+ -+Arguments: -+ RANK Receive only messages sent from sender RANK. RANK -+ must either be in the range [0, $(mpi_comm_size)-1] or -+ be the string "any" to receive messages from any sender. -+ -+ NAME Array variable in which to receive the message, sender -+ rank, and tag. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_recv builtin. */ -+int -+mpi_recv_builtin (list) -+ WORD_LIST *list; -+{ -+ char *word; /* One argument */ -+ intmax_t source_rank; /* MPI source rank */ -+ char *endptr; /* Used for parsing strings into numbers */ -+ MPI_Status status; /* Status of an MPI operation */ -+ int count; /* Message length in bytes */ -+ intmax_t tag = 0; /* Message tag to use */ -+ char *varname; /* Name of the variable to bind the results to */ -+ static char *message = NULL; /* Message received from MPI */ -+ static size_t alloced = 0; /* Number of bytes allocated for the above */ -+ int opt; /* Parsed option */ -+ -+ /* Parse any options provided. */ -+ reset_internal_getopt (); -+ while ((opt = internal_getopt (list, "t:")) != -1) -+ { -+ switch (opt) -+ { -+ case 't': -+ if (!strcmp (list_optarg, "any")) -+ tag = MPI_ANY_TAG; -+ else if (!legal_number (list_optarg, &tag)) -+ { -+ builtin_error (_("-t: numeric argument or \"any\" required")); -+ return (EX_USAGE); -+ } -+ break; -+ -+ default: -+ sh_invalidopt (word); -+ builtin_usage (); -+ return (EX_USAGE); -+ } -+ } -+ list = loptend; -+ -+ /* Parse the source rank, which must be a number or "any". */ -+ YES_ARGS (list); -+ word = list->word->word; -+ if (!legal_number (word, &source_rank)) -+ { -+ if (!strcmp (word, "any")) -+ source_rank = MPI_ANY_SOURCE; -+ else -+ { -+ builtin_error (_("mpi_recv: numeric rank or \"any\" required")); -+ return (EX_USAGE); -+ } -+ } -+ list = list->next; -+ -+ /* Parse the target variable, which must not be read-only. */ -+ YES_ARGS (list); -+ varname = list->word->word; -+ REQUIRE_WRITABLE (varname); -+ list = list->next; -+ no_args (list); -+ -+ /* Receive a message. Because we don't know long the message will -+ * be, we first probe to get the length. */ -+ MPI_TRY (MPI_Probe ((int)source_rank, (int)tag, MPI_COMM_WORLD, &status)); -+ MPI_TRY (MPI_Get_count (&status, MPI_BYTE, &count)); -+ if (alloced < count) -+ { -+ message = xrealloc (message, count); -+ alloced = count; -+ } -+ MPI_TRY (MPI_Recv (message, count, MPI_BYTE, status.MPI_SOURCE, status.MPI_TAG, MPI_COMM_WORLD, &status)); -+ bind_array_variable (varname, 0, message, 0); -+ bind_array_variable_number (varname, 1, status.MPI_SOURCE, 0); -+ bind_array_variable_number (varname, 2, status.MPI_TAG, 0); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_barrier -+$FUNCTION mpi_barrier_builtin -+$SHORT_DOC mpi_barrier -+Synchronizes all of the processes in the MPI job. -+ -+No process will return from mpi_barrier until all processes have -+called mpi_barrier. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_barrier builtin. */ -+int -+mpi_barrier_builtin (list) -+ WORD_LIST *list; -+{ -+ no_args (list); -+ MPI_TRY (MPI_Barrier (MPI_COMM_WORLD)); -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_bcast -+$FUNCTION mpi_bcast_builtin -+$SHORT_DOC mpi_bcast [message] name -+Broadcast a message to all processes in the same MPI job. -+ -+Arguments: -+ MESSAGE String to broadcast from one process to all the others. -+ -+ NAME Scalar variable in which to receive the broadcast message. -+ -+Exactly one process in the MPI job must specify a message to -+broadcast. No process will return from mpi_bcast until all processes -+have called mpi_bcast. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_bcast builtin. */ -+int -+mpi_bcast_builtin (list) -+ WORD_LIST *list; -+{ -+ char *word; /* One argument */ -+ int root; /* MPI root rank */ -+ char *root_message; /* Message to broadcast */ -+ int msglen; /* Length in bytes of the above (including the NULL byte) */ -+ char *varname; /* Name of the variable to bind the results to */ -+ static int *all_lengths = NULL; /* List of every rank's msglen */ -+ static char *message = NULL; /* Message received from the root */ -+ static int alloced = 0; /* Bytes allocated for the above */ -+ int i; -+ -+ /* Parse the optional message and target variable, which must not be -+ * read-only. */ -+ YES_ARGS (list); -+ if (list->next == NULL) -+ { -+ /* Non-root */ -+ root_message = NULL; -+ msglen = -1; -+ } -+ else -+ { -+ /* Root */ -+ root_message = list->word->word; -+ msglen = (int) strlen(root_message) + 1; -+ list = list->next; -+ } -+ varname = list->word->word; -+ REQUIRE_WRITABLE (varname); -+ list = list->next; -+ no_args (list); -+ -+ /* Acquire global agreement on the root and the message size. */ -+ if (all_lengths == NULL) -+ all_lengths = xmalloc (mpi_num_ranks*sizeof(int)); -+ MPI_TRY (MPI_Allgather (&msglen, 1, MPI_INT, all_lengths, 1, MPI_INT, MPI_COMM_WORLD)); -+ root = -1; -+ for (i = 0; i < mpi_num_ranks; i++) -+ { -+ if (all_lengths[i] == -1) -+ continue; -+ if (root != -1) -+ { -+ builtin_error (_("mpi_bcast: more than one process specified a message")); -+ return (EXECUTION_FAILURE); -+ } -+ root = i; -+ msglen = all_lengths[i]; -+ } -+ if (root == -1) -+ { -+ builtin_error (_("mpi_bcast: no process specified a message")); -+ return (EXECUTION_FAILURE); -+ } -+ -+ /* Broadcast the message. */ -+ if (mpi_rank == root) -+ { -+ MPI_TRY (MPI_Bcast (root_message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); -+ bind_variable (varname, root_message, 0); -+ } -+ else -+ { -+ if (alloced < msglen) -+ { -+ message = xrealloc (message, msglen); -+ alloced = msglen; -+ } -+ MPI_TRY (MPI_Bcast (message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); -+ bind_variable (varname, message, 0); -+ } -+ return EXECUTION_SUCCESS; -+} -+ -+$BUILTIN mpi_scan -+$FUNCTION mpi_scan_builtin -+$SHORT_DOC mpi_scan number name -+Perform an inclusive scan across all processes in the same MPI job. -+ -+ -O OPERATION Operation to perform. Must be one of "max", "min", -+ "sum", "prod", "land", "band", "lor", "bor", "lxor", -+ "bxor", "maxloc", or "minloc" (default: "sum"). -+ -+Arguments: -+ NUMBER Integer to use in the scan operation. -+ -+ NAME Array variable in which to receive the result and, in -+ the case of maxloc and minloc, the associated rank. -+ -+In an inclusive-scan operation, each process i presents a number, -+a[i]. Once all processes in the MPI job have presented their number, -+the command returns a[0] to rank 0, a[0]+a[1] to rank 1, -+a[0]+a[1]+a[2] to rank 2, and so forth. The -O option enables "+" to -+be replaced with other operations. -+ -+Inclusive scans can be useful for assigning a unique index to each -+process in the MPI job. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_scan builtin. */ -+int -+mpi_scan_builtin (list) -+ WORD_LIST *list; -+{ -+ return reduction_like (list, "mpi_scan", MPI_Scan); -+} -+ -+$BUILTIN mpi_exscan -+$FUNCTION mpi_exscan_builtin -+$SHORT_DOC mpi_exscan number name -+Perform an exclusive scan across all processes in the same MPI job. -+ -+ -O OPERATION Operation to perform. Must be one of "max", "min", -+ "sum", "prod", "land", "band", "lor", "bor", "lxor", -+ "bxor", "maxloc", or "minloc" (default: "sum"). -+ -+Arguments: -+ NUMBER Integer to use in the scan operation. -+ -+ NAME Array variable in which to receive the result and, in -+ the case of maxloc and minloc, the associated rank. -+ -+In a exclusive-scan operation, each process i presents a number, a[i]. -+Once all processes in the MPI job have presented their number, the -+command assigns a[0] to NAME on rank 1, a[0]+a[1] to NAME on rank 2, -+a[0]+a[1]+a[2] to NAME on rank 3, and so forth. No assignment is -+performed on rank 0. The -O option enables "+" to be replaced with -+other operations. -+ -+Exclusive scans can be useful for assigning a unique index to each -+process in the MPI job. -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_exscan builtin. */ -+int -+mpi_exscan_builtin (list) -+ WORD_LIST *list; -+{ -+ return reduction_like (list, "mpi_exscan", MPI_Exscan); -+} -+ -+$BUILTIN mpi_allreduce -+$FUNCTION mpi_allreduce_builtin -+$SHORT_DOC mpi_allreduce number name -+Reduce numbers from all processes in an MPI job to a single number. -+ -+Options: -+ -+ -O OPERATION Operation to perform. Must be one of "max", "min", -+ "sum", "prod", "land", "band", "lor", "bor", "lxor", -+ "bxor", "maxloc", or "minloc" (default: "sum"). -+ -+Arguments: -+ NUMBER Integer to use in the allreduce operation. -+ -+ NAME Array variable in which to receive the result and, in -+ the case of maxloc and minloc, the associated rank. -+ -+In an all-reduce operation, each process i presents a number, a[i]. -+Once all processes in the MPI job have presented their number, the -+command returns a[0]+a[1]+...+a[n-1] to all ranks. The -O option -+enables "+" to be replaced with other operations. -+ -+All-reduces can be useful for reaching global agreement (e.g., of a -+termination condition). -+ -+Exit Status: -+Returns 0 unless an invalid option is given or an error occurs. -+$END -+ -+/* Here is the mpi_allreduce builtin. */ -+int -+mpi_allreduce_builtin (list) -+ WORD_LIST *list; -+{ -+ return reduction_like (list, "mpi_allreduce", MPI_Allreduce); -+} -diff -Naur bash-4.3/config.h.in mpibash-4.3/config.h.in ---- bash-4.3/config.h.in 2013-06-29 15:35:33.000000000 -0600 -+++ mpibash-4.3/config.h.in 2014-05-13 11:27:37.314100671 -0600 -@@ -1147,6 +1147,12 @@ - /* Define if you have the `__argz_stringify' function. */ - #undef HAVE___ARGZ_STRINGIFY - -+/* Define if you have both the header file and the libcircle library. */ -+#undef HAVE_LIBCIRCLE -+ -+/* Define if you have the `CIRCLE_cb_reduce_op' function. */ -+#undef HAVE_CIRCLE_CB_REDUCE_OP -+ - /* End additions for lib/intl */ - - #include "config-bot.h" -diff -Naur bash-4.3/configure.ac mpibash-4.3/configure.ac ---- bash-4.3/configure.ac 2014-02-11 08:37:53.000000000 -0700 -+++ mpibash-4.3/configure.ac 2014-05-13 11:27:37.302100179 -0600 -@@ -24,7 +24,7 @@ - AC_REVISION([for Bash 4.3, version 4.063])dnl - - define(bashvers, 4.3) --define(relstatus, release) -+define(relstatus, MPI) - - AC_INIT([bash], bashvers-relstatus, [bug-bash@gnu.org]) - -@@ -813,6 +813,21 @@ - fi - ]) - -+dnl Ensure that we can find an MPI library. -+AC_CHECK_FUNCS([MPI_Init], [], [ -+ AC_MSG_ERROR([Cannot continue without MPI. Consider specifying CC=mpicc.])]) -+ -+dnl If we have Libcircle, use it, too. -+AC_SEARCH_LIBS([CIRCLE_cb_create], [circle], [AC_CHECK_HEADERS([libcircle.h])]) -+if test "x$ac_cv_header_libcircle_h" = xyes; then -+ libcircle_make_prefix="" -+ AC_DEFINE([HAVE_LIBCIRCLE], [1], [Define if you have the Libcircle header and library.]) -+ AC_CHECK_FUNCS([CIRCLE_cb_reduce_op]) -+else -+ libcircle_make_prefix="#" -+fi -+AC_SUBST([CIRCLE], [$libcircle_make_prefix]) -+ - BASH_CHECK_DECL(strtoimax) - BASH_CHECK_DECL(strtol) - BASH_CHECK_DECL(strtoll) -diff -Naur bash-4.3/Makefile.in mpibash-4.3/Makefile.in ---- bash-4.3/Makefile.in 2014-01-25 14:27:30.000000000 -0700 -+++ mpibash-4.3/Makefile.in 2014-05-13 11:27:37.314100671 -0600 -@@ -104,7 +104,7 @@ - VERSPROG = bashversion$(EXEEXT) - VERSOBJ = bashversion.$(OBJEXT) - --Program = bash$(EXEEXT) -+Program = mpibash$(EXEEXT) - Version = @BASHVERS@ - PatchLevel = `$(BUILD_DIR)/$(VERSPROG) -p` - RELSTATUS = @RELSTATUS@ -diff -Naur bash-4.3/shell.c mpibash-4.3/shell.c ---- bash-4.3/shell.c 2014-01-14 06:04:32.000000000 -0700 -+++ mpibash-4.3/shell.c 2014-05-13 11:27:37.314100671 -0600 -@@ -107,6 +107,13 @@ - extern char *primary_prompt, *secondary_prompt; - extern char *this_command_name; - -+extern void initialize_mpi __P((int, char **)); -+extern void finalize_mpi __P((void)); -+#ifdef HAVE_LIBCIRCLE -+extern void initialize_libcircle __P((int, char **)); -+extern void finalize_libcircle __P((void)); -+#endif -+ - /* Non-zero means that this shell has already been run; i.e. you should - call shell_reinitialize () if you need to start afresh. */ - int shell_initialized = 0; -@@ -324,7 +331,7 @@ - static void init_interactive_script __P((void)); - - static void set_shell_name __P((char *)); --static void shell_initialize __P((void)); -+static void shell_initialize __P((int, char **)); - static void shell_reinitialize __P((void)); - - static void show_shell_usage __P((FILE *, int)); -@@ -561,7 +568,7 @@ - - /* From here on in, the shell must be a normal functioning shell. - Variables from the environment are expected to be set, etc. */ -- shell_initialize (); -+ shell_initialize (argc, argv); - - set_default_lang (); - set_default_locale_vars (); -@@ -941,6 +948,12 @@ - end_job_control (); - #endif /* JOB_CONTROL */ - -+#ifdef HAVE_LIBCIRCLE -+ finalize_libcircle (); -+#else -+ finalize_mpi (); -+#endif -+ - /* Always return the exit status of the last command to our parent. */ - sh_exit (s); - } -@@ -1691,7 +1704,9 @@ - /* Do whatever is necessary to initialize the shell. - Put new initializations in here. */ - static void --shell_initialize () -+shell_initialize (argc, argv) -+ int argc; -+ char **argv; - { - char hostname[256]; - -@@ -1760,6 +1775,17 @@ - initialize_shell_options (privileged_mode||running_setuid); - initialize_bashopts (privileged_mode||running_setuid); - #endif -+ -+ /* Initialize Libcircle and MPI. */ -+#ifdef HAVE_LIBCIRCLE -+ initialize_libcircle (argc, argv); -+ initialize_mpi (argc, argv); -+ bind_variable ("libcircle", "yes", 0); -+#else -+ initialize_mpi (argc, argv); -+ bind_variable ("libcircle", "no", 0); -+#endif -+ bind_variable ("mpibash", "yes", 0); - } - - /* Function called by main () when it appears that the shell has already diff --git a/var/spack/packages/mpibash/package.py b/var/spack/packages/mpibash/package.py deleted file mode 100644 index d0f6dafed6..0000000000 --- a/var/spack/packages/mpibash/package.py +++ /dev/null @@ -1,32 +0,0 @@ -import os -from spack import * - -class Mpibash(Package): - """Parallel scripting right from the Bourne-Again Shell (Bash)""" - homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html" - - version('4.3', '81348932d5da294953e15d4814c74dd1', - url="http://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz") - - # patch -p1 < ../mpibash-4.3.patch - patch('mpibash-4.3.patch', level=1, when='@4.3') - - # above patch modifies configure.ac - depends_on('autoconf') - - # uses MPI_Exscan which is in MPI-1.2 and later - depends_on('mpi@1.2:') - - depends_on('libcircle') - - def install(self, spec, prefix): - # run autoconf to rebuild configure - autoconf = which('autoconf') - autoconf() - - configure("--prefix=" + prefix, - "CC=mpicc") - - make(parallel=False) - - make("install") diff --git a/var/spack/packages/mpich/package.py b/var/spack/packages/mpich/package.py deleted file mode 100644 index d48bf878f6..0000000000 --- a/var/spack/packages/mpich/package.py +++ /dev/null @@ -1,92 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * -import os - -class Mpich(Package): - """MPICH is a high performance and widely portable implementation of - the Message Passing Interface (MPI) standard.""" - homepage = "http://www.mpich.org" - url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" - list_url = "http://www.mpich.org/static/downloads/" - list_depth = 2 - - version('3.1.4', '2ab544607986486562e076b83937bba2') - version('3.1.3', '93cb17f91ac758cbf9174ecb03563778') - version('3.1.2', '7fbf4b81dcb74b07ae85939d1ceee7f1') - version('3.1.1', '40dc408b1e03cc36d80209baaa2d32b7') - version('3.1', '5643dd176499bfb7d25079aaff25f2ec') - version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') - - provides('mpi@:3.0', when='@3:') - provides('mpi@:1.3', when='@1:') - - def setup_dependent_environment(self, module, spec, dep_spec): - """For dependencies, make mpicc's use spack wrapper.""" - os.environ['MPICH_CC'] = 'cc' - os.environ['MPICH_CXX'] = 'c++' - os.environ['MPICH_F77'] = 'f77' - os.environ['MPICH_F90'] = 'f90' - - - def install(self, spec, prefix): - config_args = ["--prefix=" + prefix, - "--enable-shared"] - - # TODO: Spack should make it so that you can't actually find - # these compilers if they're "disabled" for the current - # compiler configuration. - if not self.compiler.f77: - config_args.append("--disable-f77") - - if not self.compiler.fc: - config_args.append("--disable-fc") - - configure(*config_args) - make() - make("install") - - self.filter_compilers() - - - def filter_compilers(self): - """Run after install to make the MPI compilers use the - compilers that Spack built the package with. - - If this isn't done, they'll have CC, CXX, F77, and FC set - to Spack's generic cc, c++, f77, and f90. We want them to - be bound to whatever compiler they were built with. - """ - bin = self.prefix.bin - mpicc = os.path.join(bin, 'mpicc') - mpicxx = os.path.join(bin, 'mpicxx') - mpif77 = os.path.join(bin, 'mpif77') - mpif90 = os.path.join(bin, 'mpif90') - - kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True } - filter_file('CC="cc"', 'CC="%s"' % self.compiler.cc, mpicc, **kwargs) - filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs) - filter_file('F77="f77"', 'F77="%s"' % self.compiler.f77, mpif77, **kwargs) - filter_file('FC="f90"', 'FC="%s"' % self.compiler.fc, mpif90, **kwargs) diff --git a/var/spack/packages/mpileaks/package.py b/var/spack/packages/mpileaks/package.py deleted file mode 100644 index 4ef866588c..0000000000 --- a/var/spack/packages/mpileaks/package.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Mpileaks(Package): - """Tool to detect and report leaked MPI objects like MPI_Requests and MPI_Datatypes.""" - - homepage = "https://github.com/hpc/mpileaks" - url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" - - version('1.0', '8838c574b39202a57d7c2d68692718aa') - - depends_on("mpi") - depends_on("adept-utils") - depends_on("callpath") - - def install(self, spec, prefix): - configure("--prefix=" + prefix, - "--with-adept-utils=" + spec['adept-utils'].prefix, - "--with-callpath=" + spec['callpath'].prefix) - make() - make("install") diff --git a/var/spack/packages/mrnet/package.py b/var/spack/packages/mrnet/package.py deleted file mode 100644 index 6e9766f275..0000000000 --- a/var/spack/packages/mrnet/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Mrnet(Package): - """The MRNet Multi-Cast Reduction Network.""" - homepage = "http://paradyn.org/mrnet" - url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" - - version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') - version('4.1.0', '5a248298b395b329e2371bf25366115c') - - parallel = False - - depends_on("boost") - - def install(self, spec, prefix): - configure("--prefix=%s" %prefix, "--enable-shared") - - make() - make("install") - diff --git a/var/spack/packages/munge/package.py b/var/spack/packages/munge/package.py deleted file mode 100644 index c737ca0354..0000000000 --- a/var/spack/packages/munge/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * -import os - -class Munge(Package): - """ MUNGE Uid 'N' Gid Emporium """ - homepage = "https://code.google.com/p/munge/" - url = "https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2" - - version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01', url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2') - - depends_on('openssl') - depends_on('libgcrypt') - - def install(self, spec, prefix): - os.makedirs(os.path.join(prefix, "lib/systemd/system")) - configure("--prefix=%s" % prefix) - - make() - make("install") - diff --git a/var/spack/packages/muster/package.py b/var/spack/packages/muster/package.py deleted file mode 100644 index 722daf3d7f..0000000000 --- a/var/spack/packages/muster/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Muster(Package): - """The Muster library provides implementations of sequential and - parallel K-Medoids clustering algorithms. It is intended as a - general framework for parallel cluster analysis, particularly - for performance data analysis on systems with very large - numbers of processes. - """ - homepage = "https://github.com/scalability-llnl/muster" - url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz" - - version('1.0.1', 'd709787db7e080447afb6571ac17723c') - version('1.0', '2eec6979a4a36d3a65a792d12969be16') - - depends_on("boost") - depends_on("mpi") - - def install(self, spec, prefix): - cmake(".", *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch b/var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch deleted file mode 100644 index ff85845cf8..0000000000 --- a/var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800 -+++ b/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800 -@@ -8,7 +8,7 @@ - * Copyright (C) 2008 Sun Microsystems, Lustre group - */ - --#define _XOPEN_SOURCE 600 -+//#define _XOPEN_SOURCE 600 - #include - #include - #include "ad_lustre.h" diff --git a/var/spack/packages/mvapich2/package.py b/var/spack/packages/mvapich2/package.py deleted file mode 100644 index ca0b1287c1..0000000000 --- a/var/spack/packages/mvapich2/package.py +++ /dev/null @@ -1,104 +0,0 @@ -import os -from spack import * - -class Mvapich2(Package): - """mvapich2 is an MPI implmenetation for infiniband networks.""" - homepage = "http://mvapich.cse.ohio-state.edu/" - - version('1.9', '5dc58ed08fd3142c260b70fe297e127c', - url="http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz") - patch('ad_lustre_rwcontig_open_source.patch', when='@1.9') - - version('2.0', '9fbb68a4111a8b6338e476dc657388b4', - url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') - - provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2 - provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0 - - - def install(self, spec, prefix): - # we'll set different configure flags depending on our environment - configure_args = [] - - # TODO: The MPICH*_FLAGS have a different name for 1.9 - - if '+debug' in spec: - # set configure flags for debug build - configure_args.append("--disable-fast") - configure_args.append("--enable-g=dbg") - configure_args.append("--enable-error-checking=runtime") - configure_args.append("--enable-error-messages=all") - configure_args.append("--enable-nmpi-as-mpi") - - if "%gnu" in spec: - # set variables for GNU compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O0" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" - os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fno-second-underscore" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fno-second-underscore" - elif "%intel" in spec: - # set variables for Inel compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O0" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" - os.environ['MPICHLIB_FFLAGS'] = "-g -O0" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O0" - elif "%pgi" in spec: - # set variables for PGI compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O0 -fPIC" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0 -fPIC" - os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fPIC" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fPIC" - - else: - # set configure flags for normal optimizations - configure_args.append("--enable-fast=all") - configure_args.append("--enable-g=dbg") - configure_args.append("--enable-nmpi-as-mpi") - - if "%gnu" in spec: - # set variables for what compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O2" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" - os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fno-second-underscore" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fno-second-underscore" - elif "%intel" in spec: - # set variables for Inel compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O2" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" - os.environ['MPICHLIB_FFLAGS'] = "-g -O2" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O2" - elif "%pgi" in spec: - # set variables for PGI compilers - os.environ['MPICHLIB_CFLAGS'] = "-g -O2 -fPIC" - os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2 -fPIC" - os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fPIC" - os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fPIC" - - # determine network type by variant - if "+psm" in spec: - # throw this flag on QLogic systems to use PSM - configure_args.append("--with-device=ch3:psm") - else: - # throw this flag on IB systems - configure_args.append("--with-device=ch3:mrail", "--with-rdma=gen2") - - # TODO: shared-memory build - - # TODO: CUDA - - # TODO: other file systems like panasis - - configure( - "--prefix=" + prefix, - "--enable-f77", "--enable-fc", "--enable-cxx", - "--enable-shared", "--enable-sharedlibs=gcc", - "--enable-debuginfo", - "--with-pm=no", "--with-pmi=slurm", - "--enable-romio", "--with-file-system=lustre+nfs+ufs", - "--disable-mpe", "--without-mpe", - "--disable-silent-rules", - *configure_args) - - make() - - make("install") diff --git a/var/spack/packages/nasm/package.py b/var/spack/packages/nasm/package.py deleted file mode 100644 index 933b6a62c5..0000000000 --- a/var/spack/packages/nasm/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Nasm(Package): - """NASM (Netwide Assembler) is an 80x86 assembler designed for - portability and modularity. It includes a disassembler as well.""" - homepage = "http://www.nasm.us" - url = "http://www.nasm.us/pub/nasm/releasebuilds/2.11.06/nasm-2.11.06.tar.xz" - - version('2.11.06', '2b958e9f5d200641e6fc9564977aecc5') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/ncdu/package.py b/var/spack/packages/ncdu/package.py deleted file mode 100644 index 234f9730d6..0000000000 --- a/var/spack/packages/ncdu/package.py +++ /dev/null @@ -1,28 +0,0 @@ -from spack import * - -class Ncdu(Package): - """ - Ncdu is a disk usage analyzer with an ncurses interface. It is designed - to find space hogs on a remote server where you don't have an entire - gaphical setup available, but it is a useful tool even on regular desktop - systems. Ncdu aims to be fast, simple and easy to use, and should be able - to run in any minimal POSIX-like environment with ncurses installed. - """ - - homepage = "http://dev.yorhel.nl/ncdu" - url = "http://dev.yorhel.nl/download/ncdu-1.11.tar.gz" - - version('1.11', '9e44240a5356b029f05f0e70a63c4d12') - version('1.10', '7535decc8d54eca811493e82d4bfab2d') - version('1.9' , '93258079db897d28bb8890e2db89b1fb') - version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5') - version('1.7' , '172047c29d232724cc62e773e82e592a') - - depends_on("ncurses") - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix, - '--with-ncurses=%s' % spec['ncurses']) - - make() - make("install") diff --git a/var/spack/packages/ncurses/package.py b/var/spack/packages/ncurses/package.py deleted file mode 100644 index cc180bbae1..0000000000 --- a/var/spack/packages/ncurses/package.py +++ /dev/null @@ -1,33 +0,0 @@ -from spack import * - -class Ncurses(Package): - """The ncurses (new curses) library is a free software emulation of curses - in System V Release 4.0, and more. It uses terminfo format, supports pads and - color and multiple highlights and forms characters and function-key mapping, - and has all the other SYSV-curses enhancements over BSD curses. - """ - - homepage = "http://invisible-island.net/ncurses/ncurses.html" - - version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') - version('6.0', 'ee13d052e1ead260d7c28071f46eefb1', - url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-shared", - "--enable-widec", - "--disable-pc-files", - "--without-ada") - make() - make("install") - - configure("--prefix=%s" % prefix, - "--with-shared", - "--disable-widec", - "--disable-pc-files", - "--without-ada") - make() - make("install") - diff --git a/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch b/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch deleted file mode 100644 index 46dda5fc9d..0000000000 --- a/var/spack/packages/netcdf/netcdf-4.3.3-mpi.patch +++ /dev/null @@ -1,25 +0,0 @@ -diff -Nur netcdf-4.3.3/CMakeLists.txt netcdf-4.3.3.mpi/CMakeLists.txt ---- netcdf-4.3.3/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 -+++ netcdf-4.3.3.mpi/CMakeLists.txt 2015-10-14 16:44:41.176300658 -0400 -@@ -753,6 +753,7 @@ - SET(USE_PARALLEL OFF CACHE BOOL "") - MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") - ELSE() -+ FIND_PACKAGE(MPI REQUIRED) - SET(USE_PARALLEL ON CACHE BOOL "") - SET(STATUS_PARALLEL "ON") - ENDIF() -diff -Nur netcdf-4.3.3/liblib/CMakeLists.txt netcdf-4.3.3.mpi/liblib/CMakeLists.txt ---- netcdf-4.3.3/liblib/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 -+++ netcdf-4.3.3.mpi/liblib/CMakeLists.txt 2015-10-14 16:44:57.757793634 -0400 -@@ -71,6 +71,10 @@ - SET(TLL_LIBS ${TLL_LIBS} ${CURL_LIBRARY}) - ENDIF() - -+IF(USE_PARALLEL) -+ SET(TLL_LIBS ${TLL_LIBS} ${MPI_C_LIBRARIES}) -+ENDIF() -+ - IF(USE_HDF4) - SET(TLL_LIBS ${TLL_LIBS} ${HDF4_LIBRARIES}) - ENDIF() diff --git a/var/spack/packages/netcdf/package.py b/var/spack/packages/netcdf/package.py deleted file mode 100644 index e1e0d836c6..0000000000 --- a/var/spack/packages/netcdf/package.py +++ /dev/null @@ -1,27 +0,0 @@ -from spack import * - -class Netcdf(Package): - """NetCDF is a set of software libraries and self-describing, machine-independent - data formats that support the creation, access, and sharing of array-oriented - scientific data.""" - - homepage = "http://www.unidata.ucar.edu/software/netcdf/" - url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" - - version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') - - patch('netcdf-4.3.3-mpi.patch') - - # Dependencies: - # >HDF5 - depends_on("hdf5") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', - "-DCMAKE_INSTALL_PREFIX:PATH=%s" % prefix, - "-DENABLE_DAP:BOOL=OFF", # Disable DAP. - "-DBUILD_SHARED_LIBS:BOOL=OFF") # Don't build shared libraries (use static libs). - - make() - make("install") diff --git a/var/spack/packages/netgauge/package.py b/var/spack/packages/netgauge/package.py deleted file mode 100644 index c2378b0718..0000000000 --- a/var/spack/packages/netgauge/package.py +++ /dev/null @@ -1,43 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Netgauge(Package): - """Netgauge is a high-precision network parameter measurement - tool. It supports benchmarking of many different network protocols - and communication patterns. The main focus lies on accuracy, - statistical analysis and easy extensibility. - """ - homepage = "http://unixer.de/research/netgauge/" - url = "http://unixer.de/research/netgauge/netgauge-2.4.6.tar.gz" - - version('2.4.6', 'e0e040ec6452e93ca21ccc54deac1d7f') - - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/netlib-blas/package.py b/var/spack/packages/netlib-blas/package.py deleted file mode 100644 index 85e97323d3..0000000000 --- a/var/spack/packages/netlib-blas/package.py +++ /dev/null @@ -1,46 +0,0 @@ -from spack import * -import os - - -class NetlibBlas(Package): - """Netlib reference BLAS""" - homepage = "http://www.netlib.org/lapack/" - url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" - - version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') - - variant('fpic', default=False, description="Build with -fpic compiler option") - - # virtual dependency - provides('blas') - - # Doesn't always build correctly in parallel - parallel = False - - def patch(self): - os.symlink('make.inc.example', 'make.inc') - - mf = FileFilter('make.inc') - mf.filter('^FORTRAN.*', 'FORTRAN = f90') - mf.filter('^LOADER.*', 'LOADER = f90') - mf.filter('^CC =.*', 'CC = cc') - - if '+fpic' in self.spec: - mf.filter('^OPTS.*=.*', 'OPTS = -O2 -frecursive -fpic') - mf.filter('^CFLAGS =.*', 'CFLAGS = -O3 -fpic') - - - def install(self, spec, prefix): - make('blaslib') - - # Tests that blas builds correctly - make('blas_testing') - - # No install provided - mkdirp(prefix.lib) - install('librefblas.a', prefix.lib) - - # Blas virtual package should provide blas.a and libblas.a - with working_dir(prefix.lib): - symlink('librefblas.a', 'blas.a') - symlink('librefblas.a', 'libblas.a') diff --git a/var/spack/packages/netlib-lapack/package.py b/var/spack/packages/netlib-lapack/package.py deleted file mode 100644 index fb6b99e27c..0000000000 --- a/var/spack/packages/netlib-lapack/package.py +++ /dev/null @@ -1,59 +0,0 @@ -from spack import * - -class NetlibLapack(Package): - """ - LAPACK version 3.X is a comprehensive FORTRAN library that does - linear algebra operations including matrix inversions, least - squared solutions to linear sets of equations, eigenvector - analysis, singular value decomposition, etc. It is a very - comprehensive and reputable package that has found extensive - use in the scientific community. - """ - homepage = "http://www.netlib.org/lapack/" - url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" - - version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') - version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478') - version('3.4.1', '44c3869c38c8335c2b9c2a8bb276eb55') - version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70') - version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4') - - variant('shared', default=False, description="Build shared library version") - - # virtual dependency - provides('lapack') - - # blas is a virtual dependency. - depends_on('blas') - - depends_on('cmake') - - # Doesn't always build correctly in parallel - parallel = False - - @when('^netlib-blas') - def get_blas_libs(self): - blas = self.spec['netlib-blas'] - return [join_path(blas.prefix.lib, 'blas.a')] - - - @when('^atlas') - def get_blas_libs(self): - blas = self.spec['atlas'] - return [join_path(blas.prefix.lib, l) - for l in ('libf77blas.a', 'libatlas.a')] - - - def install(self, spec, prefix): - blas_libs = ";".join(self.get_blas_libs()) - cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs] - - if '+shared' in spec: - cmake_args.append('-DBUILD_SHARED_LIBS=ON') - - cmake_args += std_cmake_args - - cmake(*cmake_args) - make() - make("install") - diff --git a/var/spack/packages/nettle/package.py b/var/spack/packages/nettle/package.py deleted file mode 100644 index cd600b0b87..0000000000 --- a/var/spack/packages/nettle/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Nettle(Package): - """The Nettle package contains the low-level cryptographic library - that is designed to fit easily in many contexts.""" - - homepage = "http://www.example.com" - url = "http://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz" - - version('2.7', '2caa1bd667c35db71becb93c5d89737f') - - depends_on('gmp') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/ompss/package.py b/var/spack/packages/ompss/package.py deleted file mode 100644 index e09e0a624f..0000000000 --- a/var/spack/packages/ompss/package.py +++ /dev/null @@ -1,50 +0,0 @@ -from spack import * -import os -import glob - -# working config lines for ompss 14.06 : -#./nanox-0.7/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-mcc=/usr/gapps/exmatex/ompss/ --with-hwloc=/usr -#./mcxx-1.99.2/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-nanox=/usr/gapps/exmatex/ompss --enable-ompss --with-mpi=/opt/mvapich2-intel-shmem-1.7 --enable-tl-openmp-profile --enable-tl-openmp-intel - -class Ompss(Package): - """OmpSs is an effort to integrate features from the StarSs - programming model developed by BSC into a single programming - model. In particular, our objective is to extend OpenMP with - new directives to support asynchronous parallelism and - heterogeneity (devices like GPUs). However, it can also be - understood as new directives extending other accelerator based - APIs like CUDA or OpenCL. Our OmpSs environment is built on top - of our Mercurium compiler and Nanos++ runtime system.""" - homepage = "http://pm.bsc.es/" - url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz" - list_url = 'http://pm.bsc.es/ompss-downloads' - - version('14.10', '404d161265748f2f96bb35fd8c7e79ee') - - # all dependencies are optional, really - depends_on("mpi") - #depends_on("openmp") - depends_on("hwloc") - depends_on("extrae") - - def install(self, spec, prefix): - if 'openmpi' in spec: - mpi = spec['openmpi'] - elif 'mpich' in spec: - mpi = spec['mpich'] - elif 'mvapich' in spec: - mpi = spec['mvapich'] - - openmp_options = ["--enable-tl-openmp-profile"] - if spec.satisfies('%intel'): - openmp_options.append( "--enable-tl-openmp-intel" ) - - os.chdir(glob.glob('./nanox-*').pop()) - configure("--prefix=%s" % prefix, "--with-mcc=%s" % prefix, "--with-extrae=%s" % spec['extrae'].prefix, "--with-hwloc=%s" % spec['hwloc'].prefix) - make() - make("install") - - os.chdir(glob.glob('../mcxx-*').pop()) - configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options) - make() - make("install") diff --git a/var/spack/packages/ompt-openmp/package.py b/var/spack/packages/ompt-openmp/package.py deleted file mode 100644 index 5d380ebd77..0000000000 --- a/var/spack/packages/ompt-openmp/package.py +++ /dev/null @@ -1,23 +0,0 @@ -from spack import * - -class OmptOpenmp(Package): - """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang.""" - homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp" - url = "http://github.com/khuck/LLVM-openmp/archive/v0.1-spack.tar.gz" - - version('spack', '35227b2726e377faa433fc841226e036') - - # depends_on("foo") - - def install(self, spec, prefix): - with working_dir("runtime/build", create=True): - - # FIXME: Modify the configure line to suit your build system here. - cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc, - '-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx, - '-DCMAKE_INSTALL_PREFIX=%s' % prefix, - '..', *std_cmake_args) - - # FIXME: Add logic to build and install here - make() - make("install") diff --git a/var/spack/packages/opari2/package.py b/var/spack/packages/opari2/package.py deleted file mode 100644 index daaee61e3a..0000000000 --- a/var/spack/packages/opari2/package.py +++ /dev/null @@ -1,65 +0,0 @@ -# FIXME: Add copyright statement here - -from spack import * -from contextlib import closing - -class Opari2(Package): - """OPARI2 is a source-to-source instrumentation tool for OpenMP and - hybrid codes. It surrounds OpenMP directives and runtime library - calls with calls to the POMP2 measurement interface. - OPARI2 will provide you with a new initialization method that allows - for multi-directory and parallel builds as well as the usage of - pre-instrumented libraries. Furthermore, an efficient way of - tracking parent-child relationships was added. Additionally, we - extended OPARI2 to support instrumentation of OpenMP 3.0 - tied tasks. """ - - homepage = "http://www.vi-hps.org/projects/score-p" - url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" - - version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') - - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=mpicc -MPICXX=mpicxx -MPIF77=mpif77 -MPIFC=mpif90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - with closing(open("platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) - with closing(open("platform-mpi-user-provided", "w")) as mpi_file: - mpi_file.write(self.mpi_user_provided) - - # FIXME: Modify the configure line to suit your build system here. - configure("--prefix=%s" % prefix, - "--with-custom-compilers", - "--enable-shared") - - # FIXME: Add logic to build and install here - make() - make("install") diff --git a/var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch b/var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch deleted file mode 100644 index daa825ccbe..0000000000 --- a/var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/ompi/mca/io/romio/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800 -+++ b/ompi/mca/io/romio/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800 -@@ -8,7 +8,7 @@ - * Copyright (C) 2008 Sun Microsystems, Lustre group - */ - --#define _XOPEN_SOURCE 600 -+//#define _XOPEN_SOURCE 600 - #include - #include - #include "ad_lustre.h" diff --git a/var/spack/packages/openmpi/llnl-platforms.patch b/var/spack/packages/openmpi/llnl-platforms.patch deleted file mode 100644 index f515743c4d..0000000000 --- a/var/spack/packages/openmpi/llnl-platforms.patch +++ /dev/null @@ -1,151 +0,0 @@ -diff -Nuar openmpi-1.6.5.orig/contrib/platform/llnl/optimized openmpi-1.6.5.llnl/contrib/platform/llnl/optimized ---- openmpi-1.6.5.orig/contrib/platform/llnl/optimized 1969-12-31 16:00:00.000000000 -0800 -+++ openmpi-1.6.5.llnl/contrib/platform/llnl/optimized 2013-08-08 23:47:12.704029000 -0700 -@@ -0,0 +1,29 @@ -+enable_dlopen=no -+enable_mem_debug=no -+enable_mem_profile=no -+enable_debug_symbols=no -+enable_binaries=yes -+enable_heterogeneous=no -+enable_debug=no -+enable_shared=yes -+enable_static=yes -+enable_memchecker=no -+enable_ipv6=no -+enable_mpi_f77=yes -+enable_mpi_f90=yes -+enable_mpi_cxx=yes -+enable_mpi_cxx_seek=yes -+enable_cxx_exceptions=no -+enable_ft_thread=no -+enable_per_user_config_files=no -+enable_mca_no_build=carto,crs,filem,routed-linear,snapc,pml-dr,pml-crcp2,pml-crcpw,pml-v,pml-example,crcp,btl-tcp -+enable_contrib_no_build=libnbc,vt -+with_slurm=yes -+with_pmi=yes -+with_tm=no -+with_openib=yes -+with_psm=yes -+with_devel_headers=yes -+with_io_romio_flags=--with-file-system=ufs+nfs+lustre -+with_memory_manager=ptmalloc2 -+with_valgrind=no -diff -Nuar openmpi-1.6.5.orig/contrib/platform/llnl/optimized.conf openmpi-1.6.5.llnl/contrib/platform/llnl/optimized.conf ---- openmpi-1.6.5.orig/contrib/platform/llnl/optimized.conf 1969-12-31 16:00:00.000000000 -0800 -+++ openmpi-1.6.5.llnl/contrib/platform/llnl/optimized.conf 2013-08-08 23:43:52.907553000 -0700 -@@ -0,0 +1,114 @@ -+# -+# Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana -+# University Research and Technology -+# Corporation. All rights reserved. -+# Copyright (c) 2004-2005 The University of Tennessee and The University -+# of Tennessee Research Foundation. All rights -+# reserved. -+# Copyright (c) 2004-2005 High Performance Computing Center Stuttgart, -+# University of Stuttgart. All rights reserved. -+# Copyright (c) 2004-2005 The Regents of the University of California. -+# All rights reserved. -+# Copyright (c) 2006 Cisco Systems, Inc. All rights reserved. -+# Copyright (c) 2011 Los Alamos National Security, LLC. -+# All rights reserved. -+# $COPYRIGHT$ -+# -+# Additional copyrights may follow -+# -+# $HEADER$ -+# -+ -+# This is the default system-wide MCA parameters defaults file. -+# Specifically, the MCA parameter "mca_param_files" defaults to a -+# value of -+# "$HOME/.openmpi/mca-params.conf:$sysconf/openmpi-mca-params.conf" -+# (this file is the latter of the two). So if the default value of -+# mca_param_files is not changed, this file is used to set system-wide -+# MCA parameters. This file can therefore be used to set system-wide -+# default MCA parameters for all users. Of course, users can override -+# these values if they want, but this file is an excellent location -+# for setting system-specific MCA parameters for those users who don't -+# know / care enough to investigate the proper values for them. -+ -+# Note that this file is only applicable where it is visible (in a -+# filesystem sense). Specifically, MPI processes each read this file -+# during their startup to determine what default values for MCA -+# parameters should be used. mpirun does not bundle up the values in -+# this file from the node where it was run and send them to all nodes; -+# the default value decisions are effectively distributed. Hence, -+# these values are only applicable on nodes that "see" this file. If -+# $sysconf is a directory on a local disk, it is likely that changes -+# to this file will need to be propagated to other nodes. If $sysconf -+# is a directory that is shared via a networked filesystem, changes to -+# this file will be visible to all nodes that share this $sysconf. -+ -+# The format is straightforward: one per line, mca_param_name = -+# rvalue. Quoting is ignored (so if you use quotes or escape -+# characters, they'll be included as part of the value). For example: -+ -+# Disable run-time MPI parameter checking -+# mpi_param_check = 0 -+ -+# Note that the value "~/" will be expanded to the current user's home -+# directory. For example: -+ -+# Change component loading path -+# component_path = /usr/local/lib/openmpi:~/my_openmpi_components -+ -+# See "ompi_info --param all all" for a full listing of Open MPI MCA -+# parameters available and their default values. -+# -+ -+# Basic behavior to smooth startup -+mca_component_show_load_errors = 0 -+orte_abort_timeout = 10 -+opal_set_max_sys_limits = 1 -+orte_report_launch_progress = 1 -+ -+# Define timeout for daemons to report back during launch -+orte_startup_timeout = 10000 -+ -+## Protect the shared file systems -+orte_no_session_dirs = /p,/usr/local,/usr/global,/nfs/tmp1,/nfs/tmp2 -+orte_tmpdir_base = /tmp -+ -+## Require an allocation to run - protects the frontend -+## from inadvertent job executions -+orte_allocation_required = 1 -+ -+## MPI behavior -+## Do NOT specify mpi_leave_pinned so system -+## can figure out for itself whether or not -+## it is supported and usable -+orte_notifier = syslog -+ -+## Add the interface for out-of-band communication -+## and set it up -+oob_tcp_if_include=ib0 -+oob_tcp_peer_retries = 1000 -+oob_tcp_disable_family = IPv6 -+oob_tcp_listen_mode = listen_thread -+oob_tcp_sndbuf = 32768 -+oob_tcp_rcvbuf = 32768 -+ -+## Define the MPI interconnects -+btl = sm,openib,self -+ -+## We are using the PSM MTL by default -+## There can only be one! -+pml = cm -+ -+## Setup OpenIB - just in case -+btl_openib_want_fork_support = 0 -+btl_openib_cpc_include = oob -+btl_openib_receive_queues = S,4096,1024:S,12288,512:S,65536,512 -+ -+## Enable cpu affinity -+opal_paffinity_alone = 1 -+ -+## Setup MPI options -+mpi_show_handle_leaks = 0 -+mpi_warn_on_fork = 1 -+mpi_abort_print_stack = 0 -+ diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py deleted file mode 100644 index 5e429dedf5..0000000000 --- a/var/spack/packages/openmpi/package.py +++ /dev/null @@ -1,109 +0,0 @@ -import os - -from spack import * - - -class Openmpi(Package): - """Open MPI is a project combining technologies and resources from - several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI) - in order to build the best MPI library available. A completely - new MPI-2 compliant implementation, Open MPI offers advantages - for system and software vendors, application developers and - computer science researchers. - """ - - homepage = "http://www.open-mpi.org" - - version('1.10.0', '280cf952de68369cebaca886c5ce0304', - url = "http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.0.tar.bz2") - version('1.8.8', '0dab8e602372da1425e9242ae37faf8c', - url = 'http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.8.tar.bz2') - version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475', - url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2") - - patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5") - patch('llnl-platforms.patch', when="@1.6.5") - - provides('mpi@:2.2', when='@1.6.5') # Open MPI 1.6.5 supports MPI-2.2 - provides('mpi@:3.0', when='@1.8.8') # Open MPI 1.8.8 supports MPI-3.0 - provides('mpi@:3.0', when='@1.10.0') # Open MPI 1.10.0 supports MPI-3.0 - - - def setup_dependent_environment(self, module, spec, dep_spec): - """For dependencies, make mpicc's use spack wrapper.""" - os.environ['OMPI_CC'] = 'cc' - os.environ['OMPI_CXX'] = 'c++' - os.environ['OMPI_FC'] = 'f90' - os.environ['OMPI_F77'] = 'f77' - - - def install(self, spec, prefix): - config_args = ["--prefix=%s" % prefix] - - # TODO: use variants for this, e.g. +lanl, +llnl, etc. - # use this for LANL builds, but for LLNL builds, we need: - # "--with-platform=contrib/platform/llnl/optimized" - if self.version == ver("1.6.5") and '+lanl' in spec: - config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") - - # TODO: Spack should make it so that you can't actually find - # these compilers if they're "disabled" for the current - # compiler configuration. - if not self.compiler.f77 and not self.compiler.fc: - config_args.append("--enable-mpi-fortran=no") - - configure(*config_args) - make() - make("install") - - self.filter_compilers() - - - def filter_compilers(self): - """Run after install to make the MPI compilers use the - compilers that Spack built the package with. - - If this isn't done, they'll have CC, CXX and FC set - to Spack's generic cc, c++ and f90. We want them to - be bound to whatever compiler they were built with. - """ - kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : False } - dir = os.path.join(self.prefix, 'share/openmpi/') - - cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt', - 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt'] - - cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt', - 'ortec++-wrapper-data.txt'] - - fc_wrappers = ['mpifort-vt-wrapper-data.txt', - 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt'] - - for wrapper in cc_wrappers: - filter_file('compiler=.*', 'compiler=%s' % self.compiler.cc, - os.path.join(dir, wrapper), **kwargs) - - for wrapper in cxx_wrappers: - filter_file('compiler=.*', 'compiler=%s' % self.compiler.cxx, - os.path.join(dir, wrapper), **kwargs) - - for wrapper in fc_wrappers: - filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, - os.path.join(dir, wrapper), **kwargs) - - # These are symlinks in newer versions, so check that here - f77_wrappers = ['mpif77-vt-wrapper-data.txt', 'mpif77-wrapper-data.txt'] - f90_wrappers = ['mpif90-vt-wrapper-data.txt', 'mpif90-wrapper-data.txt'] - - for wrapper in f77_wrappers: - path = os.path.join(dir, wrapper) - if not os.path.islink(path): - filter_file('compiler=.*', 'compiler=%s' % self.compiler.f77, - path, **kwargs) - for wrapper in f90_wrappers: - path = os.path.join(dir, wrapper) - if not os.path.islink(path): - filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, - path, **kwargs) - - diff --git a/var/spack/packages/openssl/package.py b/var/spack/packages/openssl/package.py deleted file mode 100644 index c5a8aeb9dc..0000000000 --- a/var/spack/packages/openssl/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * - -class Openssl(Package): - """The OpenSSL Project is a collaborative effort to develop a - robust, commercial-grade, full-featured, and Open Source - toolkit implementing the Secure Sockets Layer (SSL v2/v3) and - Transport Layer Security (TLS v1) protocols as well as a - full-strength general purpose cryptography library.""" - homepage = "http://www.openssl.org" - url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz" - - version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') - - depends_on("zlib") - parallel = False - - def install(self, spec, prefix): - config = Executable("./config") - config("--prefix=%s" % prefix, - "--openssldir=%s/etc/openssl" % prefix, - "zlib", - "no-krb5", - "shared") - - make() - make("install") diff --git a/var/spack/packages/otf/package.py b/var/spack/packages/otf/package.py deleted file mode 100644 index 52893dd265..0000000000 --- a/var/spack/packages/otf/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Otf(Package): - """To improve scalability for very large and massively parallel - traces the Open Trace Format (OTF) is developed at ZIH as a - successor format to the Vampir Trace Format (VTF3).""" - - homepage = "http://tu-dresden.de/die_tu_dresden/zentrale_einrichtungen/zih/forschung/projekte/otf/index_html/document_view?set_language=en" - url = "http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz" - - version('1.12.5salmon', 'bf260198633277031330e3356dcb4eec') - - depends_on('zlib') - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix, - '--without-vtf3', - '--with-zlib', - '--with-zlibsymbols') - make() - make("install") diff --git a/var/spack/packages/otf2/package.py b/var/spack/packages/otf2/package.py deleted file mode 100644 index fa0a5898b6..0000000000 --- a/var/spack/packages/otf2/package.py +++ /dev/null @@ -1,74 +0,0 @@ -# FIXME: Add copyright - -from spack import * -from contextlib import closing -import os - -class Otf2(Package): - """The Open Trace Format 2 is a highly scalable, memory efficient event - trace data format plus support library.""" - - homepage = "http://www.vi-hps.org/score-p" - url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" - - version('1.4', 'a23c42e936eb9209c4e08b61c3cf5092', - url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz") - version('1.3.1', 'd0ffc4e858455ace4f596f910e68c9f2', - url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz") - version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8', - url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") - - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=cc -MPICXX=c++ -MPIF77=f77 -MPIFC=f90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - - @when('@:1.2.1') - def version_specific_args(self): - return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"] - - @when('@1.3:') - def version_specific_args(self): - # TODO: figure out what scorep's build does as of otf2 1.3 - return ["--with-custom-compilers"] - - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - with closing(open("platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) - with closing(open("platform-mpi-user-provided", "w")) as mpi_file: - mpi_file.write(self.mpi_user_provided) - - configure_args=["--prefix=%s" % prefix, - "--enable-shared"] - - configure_args.extend(self.version_specific_args()) - - configure(*configure_args) - - make() - make("install") diff --git a/var/spack/packages/pango/package.py b/var/spack/packages/pango/package.py deleted file mode 100644 index df43625bf5..0000000000 --- a/var/spack/packages/pango/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Pango(Package): - """Pango is a library for laying out and rendering of text, with - an emphasis on internationalization. It can be used anywhere - that text layout is needed, though most of the work on Pango so - far has been done in the context of the GTK+ widget toolkit.""" - homepage = "http://www.pango.org" - url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz" - - version('1.36.8', '217a9a753006275215fa9fa127760ece') - - depends_on("harfbuzz") - depends_on("cairo") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/papi/package.py b/var/spack/packages/papi/package.py deleted file mode 100644 index 596f7114d6..0000000000 --- a/var/spack/packages/papi/package.py +++ /dev/null @@ -1,35 +0,0 @@ -from spack import * -import os - -class Papi(Package): - """PAPI provides the tool designer and application engineer with a - consistent interface and methodology for use of the performance - counter hardware found in most major microprocessors. PAPI - enables software engineers to see, in near real time, the - relation between software performance and processor events. In - addition Component PAPI provides access to a collection of - components that expose performance measurement opportunites - across the hardware and software stack.""" - homepage = "http://icl.cs.utk.edu/papi/index.html" - url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.3.0.tar.gz" - - version('5.3.0', '367961dd0ab426e5ae367c2713924ffb') - - def install(self, spec, prefix): - os.chdir("src/") - - configure_args=["--prefix=%s" % prefix] - - # need to force consistency in the use of compilers - if spec.satisfies('%gcc'): - configure_args.append('CC=gcc') - configure_args.append('MPICH_CC=gcc') - if spec.satisfies('%intel'): - configure_args.append('CC=icc') - configure_args.append('MPICH_CC=icc') - - configure(*configure_args) - - make() - make("install") - diff --git a/var/spack/packages/paraver/package.py b/var/spack/packages/paraver/package.py deleted file mode 100644 index 5f8a153d4c..0000000000 --- a/var/spack/packages/paraver/package.py +++ /dev/null @@ -1,41 +0,0 @@ -from spack import * -import os - -class Paraver(Package): - """"A very powerful performance visualization and analysis tool - based on traces that can be used to analyse any information that - is expressed on its input trace format. Traces for parallel MPI, - OpenMP and other programs can be genereated with Extrae.""" - homepage = "http://www.bsc.es/computer-sciences/performance-tools/paraver" - url = "http://www.bsc.es/ssl/apps/performanceTools/files/paraver-sources-4.5.3.tar.gz" - - version('4.5.3', '625de9ec0d639acd18d1aaa644b38f72') - - depends_on("boost") - #depends_on("extrae") - depends_on("wx") - depends_on("wxpropgrid") - - def install(self, spec, prefix): - os.chdir("ptools_common_files") - configure("--prefix=%s" % prefix) - make() - make("install") - - os.chdir("../paraver-kernel") - #"--with-extrae=%s" % spec['extrae'].prefix, - configure("--prefix=%s" % prefix, "--with-ptools-common-files=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization") - make() - make("install") - - os.chdir("../paraver-toolset") - configure("--prefix=%s" % prefix) - make() - make("install") - - os.chdir("../wxparaver") - #"--with-extrae=%s" % spec['extrae'].prefix, - configure("--prefix=%s" % prefix, "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", "--with-wxdir=%s" % spec['wx'].prefix.bin) - make() - make("install") - diff --git a/var/spack/packages/paraview/package.py b/var/spack/packages/paraview/package.py deleted file mode 100644 index a0ff812ca2..0000000000 --- a/var/spack/packages/paraview/package.py +++ /dev/null @@ -1,72 +0,0 @@ -from spack import * - -class Paraview(Package): - homepage = 'http://www.paraview.org' - url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz' - - version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz') - - variant('python', default=False, description='Enable Python support') - variant('matplotlib', default=False, description='Enable Matplotlib support') - variant('numpy', default=False, description='Enable NumPy support') - - variant('tcl', default=False, description='Enable TCL support') - - variant('mpi', default=False, description='Enable MPI support') - - variant('osmesa', default=False, description='Enable OSMesa support') - variant('qt', default=False, description='Enable Qt support') - - depends_on('python', when='+python') - depends_on('py-numpy', when='+python+numpy') - depends_on('py-matplotlib', when='+python+matplotlib') - depends_on('tcl', when='+tcl') - depends_on('mpi', when='+mpi') - depends_on('qt', when='+qt') - - depends_on('bzip2') - depends_on('freetype') - depends_on('hdf5') # drags in mpi - depends_on('jpeg') - depends_on('libpng') - depends_on('libtiff') - #depends_on('libxml2') # drags in python - depends_on('netcdf') - #depends_on('protobuf') # version mismatches? - #depends_on('sqlite') # external version not supported - depends_on('zlib') - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - def feature_to_bool(feature, on='ON', off='OFF'): - if feature in spec: - return on - return off - - def nfeature_to_bool(feature): - return feature_to_bool(feature, on='OFF', off='ON') - - feature_args = std_cmake_args[:] - feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt')) - feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python')) - feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi')) - feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl')) - feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa')) - feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa')) - feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) - - feature_args.extend(std_cmake_args) - - cmake('..', - '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, - '-DBUILD_TESTING:BOOL=OFF', - '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON', - '-DVTK_USER_SYSTEM_HDF5:BOOL=ON', - '-DVTK_USER_SYSTEM_JPEG:BOOL=ON', - #'-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON', - '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON', - '-DVTK_USER_SYSTEM_TIFF:BOOL=ON', - '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON', - *feature_args) - make() - make('install') diff --git a/var/spack/packages/parmetis/package.py b/var/spack/packages/parmetis/package.py deleted file mode 100644 index d8cd337304..0000000000 --- a/var/spack/packages/parmetis/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * - -class Parmetis(Package): - """ParMETIS is an MPI-based parallel library that implements a - variety of algorithms for partitioning unstructured graphs, - meshes, and for computing fill-reducing orderings of sparse - matrices.""" - homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview" - url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz" - - version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628') - - depends_on('mpi') - - def install(self, spec, prefix): - cmake(".", - '-DGKLIB_PATH=%s/metis/GKlib' % pwd(), - '-DMETIS_PATH=%s/metis' % pwd(), - '-DSHARED=1', - '-DCMAKE_C_COMPILER=mpicc', - '-DCMAKE_CXX_COMPILER=mpicxx', - '-DSHARED=1', - *std_cmake_args) - - make() - make("install") diff --git a/var/spack/packages/parpack/package.py b/var/spack/packages/parpack/package.py deleted file mode 100644 index 622aceca04..0000000000 --- a/var/spack/packages/parpack/package.py +++ /dev/null @@ -1,43 +0,0 @@ -from spack import * -import os -import shutil - -class Parpack(Package): - """ARPACK is a collection of Fortran77 subroutines designed to solve large - scale eigenvalue problems.""" - - homepage = "http://www.caam.rice.edu/software/ARPACK/download.html" - url = "http://www.caam.rice.edu/software/ARPACK/SRC/parpack96.tar.Z" - - version('96', 'a175f70ff71837a33ff7e4b0b6054f43') - - depends_on('mpi') - depends_on('blas') - depends_on('lapack') - - def patch(self): - # Filter the CJ makefile to make a spack one. - shutil.move('ARMAKES/ARmake.CJ', 'ARmake.inc') - mf = FileFilter('ARmake.inc') - - # Be sure to use Spack F77 wrapper - mf.filter('^FC.*', 'FC = f77') - mf.filter('^FFLAGS.*', 'FFLAGS = -O2 -g') - - # Set up some variables. - mf.filter('^PLAT.*', 'PLAT = ') - mf.filter('^home.*', 'home = %s' % os.getcwd()) - mf.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix) - mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix) - mf.filter('^MAKE.*', 'MAKE = make') - - # build the library in our own prefix. - mf.filter('^ARPACKLIB.*', 'PARPACKLIB = %s/libparpack.a' % os.getcwd()) - - - def install(self, spec, prefix): - with working_dir('PARPACK/SRC/MPI'): - make('all') - - mkdirp(prefix.lib) - install('libparpack.a', prefix.lib) diff --git a/var/spack/packages/pcre/package.py b/var/spack/packages/pcre/package.py deleted file mode 100644 index 3424048a6c..0000000000 --- a/var/spack/packages/pcre/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class Pcre(Package): - """The PCRE package contains Perl Compatible Regular Expression - libraries. These are useful for implementing regular expression - pattern matching using the same syntax and semantics as Perl 5.""" - homepage = "http://www.pcre.org""" - url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2" - - version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/petsc/package.py b/var/spack/packages/petsc/package.py deleted file mode 100644 index 4864e39bf1..0000000000 --- a/var/spack/packages/petsc/package.py +++ /dev/null @@ -1,40 +0,0 @@ -from spack import * - -class Petsc(Package): - """PETSc is a suite of data structures and routines for the - scalable (parallel) solution of scientific applications modeled by - partial differential equations.""" - - homepage = "http://www.mcs.anl.gov/petsc/index.html" - url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz" - - version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f') - version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13') - version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') - - depends_on("boost") - depends_on("blas") - depends_on("lapack") - depends_on("hypre") - depends_on("parmetis") - depends_on("metis") - depends_on("hdf5") - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "CC=cc", - "CXX=c++", - "FC=f90", - "--with-blas-lib=%s/libblas.a" % spec['blas'].prefix.lib, - "--with-lapack-lib=%s/liblapack.a" % spec['lapack'].prefix.lib, - "--with-boost-dir=%s" % spec['boost'].prefix, - "--with-hypre-dir=%s" % spec['hypre'].prefix, - "--with-parmetis-dir=%s" % spec['parmetis'].prefix, - "--with-metis-dir=%s" % spec['metis'].prefix, - "--with-hdf5-dir=%s" % spec['hdf5'].prefix, - "--with-shared-libraries=0") - - # PETSc has its own way of doing parallel make. - make('MAKE_NP=%s' % make_jobs, parallel=False) - make("install") diff --git a/var/spack/packages/pidx/package.py b/var/spack/packages/pidx/package.py deleted file mode 100644 index 81aed62fb1..0000000000 --- a/var/spack/packages/pidx/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Pidx(Package): - """PIDX Parallel I/O Library. - - PIDX is an efficient parallel I/O library that reads and writes - multiresolution IDX data files. - """ - - homepage = "http://www.cedmav.com/pidx" - - version('1.0', git='https://github.com/sci-visus/PIDX.git', - commit='6afa1cf71d1c41263296dc049c8fabaf73c296da') - - depends_on("mpi") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/pixman/package.py b/var/spack/packages/pixman/package.py deleted file mode 100644 index 895cbdbca5..0000000000 --- a/var/spack/packages/pixman/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Pixman(Package): - """The Pixman package contains a library that provides low-level - pixel manipulation features such as image compositing and - trapezoid rasterization.""" - homepage = "http://www.pixman.org" - url = "http://cairographics.org/releases/pixman-0.32.6.tar.gz" - - version('0.32.6', '3a30859719a41bd0f5cccffbfefdd4c2') - - depends_on("libpng") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--disable-gtk") - make() - make("install") diff --git a/var/spack/packages/pkg-config/package.py b/var/spack/packages/pkg-config/package.py deleted file mode 100644 index 9964c6ce34..0000000000 --- a/var/spack/packages/pkg-config/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class PkgConfig(Package): - """pkg-config is a helper tool used when compiling applications and libraries""" - homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/" - url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz" - - version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d') - - parallel = False - - def install(self, spec, prefix): - configure("--prefix=%s" %prefix, "--enable-shared") - - make() - make("install") - diff --git a/var/spack/packages/pmgr_collective/package.py b/var/spack/packages/pmgr_collective/package.py deleted file mode 100644 index 5d9b02acc3..0000000000 --- a/var/spack/packages/pmgr_collective/package.py +++ /dev/null @@ -1,37 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class PmgrCollective(Package): - """PMGR_COLLECTIVE provides a scalable network for bootstrapping - MPI jobs.""" - homepage = "http://www.sourceforge.net/projects/pmgrcollective" - url = "http://downloads.sourceforge.net/project/pmgrcollective/pmgrcollective/PMGR_COLLECTIVE-1.0/pmgr_collective-1.0.tgz" - - version('1.0', '0384d008774274cc3fc7b4d810dfd07e') - - def install(self, spec, prefix): - make('PREFIX="' + prefix + '"') - make('PREFIX="' + prefix + '"', "install") diff --git a/var/spack/packages/postgresql/package.py b/var/spack/packages/postgresql/package.py deleted file mode 100644 index 46922b7b71..0000000000 --- a/var/spack/packages/postgresql/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Postgresql(Package): - """PostgreSQL is a powerful, open source object-relational - database system. It has more than 15 years of active - development and a proven architecture that has earned it a - strong reputation for reliability, data integrity, and - correctness.""" - homepage = "http://www.postgresql.org/" - url = "http://ftp.postgresql.org/pub/source/v9.3.4/postgresql-9.3.4.tar.bz2" - - version('9.3.4', 'd0a41f54c377b2d2fab4a003b0dac762') - - depends_on("openssl") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-openssl") - make() - make("install") diff --git a/var/spack/packages/ppl/package.py b/var/spack/packages/ppl/package.py deleted file mode 100644 index 018d5c523d..0000000000 --- a/var/spack/packages/ppl/package.py +++ /dev/null @@ -1,28 +0,0 @@ -from spack import * - -class Ppl(Package): - """The Parma Polyhedra Library (PPL) provides numerical - abstractions especially targeted at applications in the field of - analysis and verification of complex systems. These abstractions - include convex polyhedra, some special classes of polyhedra shapes - that offer interesting complexity/precision tradeoffs, and grids - which represent regularly spaced points that satisfy a set of - linear congruence relations. The library also supports finite - powersets and products of polyhedra and grids, a mixed integer - linear programming problem solver using an exact-arithmetic - version of the simplex algorithm, a parametric integer programming - solver, and primitives for termination analysis via the automatic - synthesis of linear ranking functions.""" - - homepage = "http://bugseng.com/products/ppl/" - url = "http://bugseng.com/products/ppl/download/ftp/releases/1.1/ppl-1.1.tar.gz" - - version('1.1', '4f2422c0ef3f409707af32108deb30a7') - - depends_on("gmp") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-gmp=%s" % spec['gmp'].prefix) - make() - make("install") diff --git a/var/spack/packages/protobuf/package.py b/var/spack/packages/protobuf/package.py deleted file mode 100644 index 34085c7ce9..0000000000 --- a/var/spack/packages/protobuf/package.py +++ /dev/null @@ -1,16 +0,0 @@ -import os -from spack import * - -class Protobuf(Package): - """Google's data interchange format.""" - - homepage = "https://developers.google.com/protocol-buffers" - url = "https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.bz2" - - version('2.5.0', 'a72001a9067a4c2c4e0e836d0f92ece4') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("check") - make("install") diff --git a/var/spack/packages/py-basemap/package.py b/var/spack/packages/py-basemap/package.py deleted file mode 100644 index 45f1085ba1..0000000000 --- a/var/spack/packages/py-basemap/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * -import os - -class PyBasemap(Package): - """The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python.""" - homepage = "http://matplotlib.org/basemap/" - url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz" - - version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8') - - extends('python') - depends_on('py-setuptools') - depends_on('py-numpy') - depends_on('py-matplotlib') - depends_on('py-pil') - depends_on("geos") - - def install(self, spec, prefix): - env['GEOS_DIR'] = spec['geos'].prefix - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-biopython/package.py b/var/spack/packages/py-biopython/package.py deleted file mode 100644 index 8ecaf48626..0000000000 --- a/var/spack/packages/py-biopython/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyBiopython(Package): - """It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics.""" - homepage = "http://biopython.org/wiki/Main_Page" - url = "http://biopython.org/DIST/biopython-1.65.tar.gz" - - version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67') - - extends('python') - depends_on('py-mx') - depends_on('py-numpy') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-cffi/package.py b/var/spack/packages/py-cffi/package.py deleted file mode 100644 index a4d37483fe..0000000000 --- a/var/spack/packages/py-cffi/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class PyCffi(Package): - """Foreign Function Interface for Python calling C code""" - homepage = "http://cffi.readthedocs.org/en/latest/" - # base https://pypi.python.org/pypi/cffi - url = "https://pypi.python.org/packages/source/c/cffi/cffi-1.1.2.tar.gz#md5=" - - version('1.1.2', 'ca6e6c45b45caa87aee9adc7c796eaea') - - extends('python') - depends_on('py-setuptools') - depends_on('py-pycparser') - depends_on('libffi') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-cython/package.py b/var/spack/packages/py-cython/package.py deleted file mode 100644 index 68eb735ad9..0000000000 --- a/var/spack/packages/py-cython/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyCython(Package): - """The Cython compiler for writing C extensions for the Python language.""" - homepage = "https://pypi.python.org/pypi/cython" - url = "https://pypi.python.org/packages/source/C/Cython/cython-0.22.tar.gz" - - version('0.21.2', 'd21adb870c75680dc857cd05d41046a4') - version('0.22', '1ae25add4ef7b63ee9b4af697300d6b6') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-dateutil/package.py b/var/spack/packages/py-dateutil/package.py deleted file mode 100644 index 0a17f2f2d2..0000000000 --- a/var/spack/packages/py-dateutil/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class PyDateutil(Package): - """Extensions to the standard Python datetime module.""" - homepage = "https://pypi.python.org/pypi/dateutil" - url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz" - - version('2.4.0', '75714163bb96bedd07685cdb2071b8bc') - version('2.4.2', '4ef68e1c485b09e9f034e10473e5add2') - - extends('python') - depends_on('py-setuptools') - depends_on('py-six') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-epydoc/package.py b/var/spack/packages/py-epydoc/package.py deleted file mode 100644 index af05510504..0000000000 --- a/var/spack/packages/py-epydoc/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyEpydoc(Package): - """Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings.""" - homepage = "https://pypi.python.org/pypi/epydoc" - url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz" - - version('3.0.1', '36407974bd5da2af00bf90ca27feeb44') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-genders/package.py b/var/spack/packages/py-genders/package.py deleted file mode 100644 index c49c8fd5b2..0000000000 --- a/var/spack/packages/py-genders/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyGenders(Package): - """Genders is a static cluster configuration database used for cluster configuration management. It is used by a variety of tools and scripts for management of large clusters.""" - homepage = "https://github.com/chaos/genders" - url = "https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz" - - version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e', url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz') - extends('python') - - def install(self, spec, prefix): - configure("--prefix=%s" %prefix) - make(parallel=False) - make("install") - diff --git a/var/spack/packages/py-gnuplot/package.py b/var/spack/packages/py-gnuplot/package.py deleted file mode 100644 index ede4472c03..0000000000 --- a/var/spack/packages/py-gnuplot/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyGnuplot(Package): - """Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program.""" - homepage = "http://gnuplot-py.sourceforge.net/" - url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz" - - version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1') - - extends('python') - depends_on('py-numpy') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-h5py/package.py b/var/spack/packages/py-h5py/package.py deleted file mode 100644 index 6293da5407..0000000000 --- a/var/spack/packages/py-h5py/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * -import re - -class PyH5py(Package): - """The h5py package provides both a high- and low-level interface to the HDF5 library from Python.""" - homepage = "https://pypi.python.org/pypi/h5py" - url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz" - - version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758') - version('2.5.0', '6e4301b5ad5da0d51b0a1e5ac19e3b74') - - extends('python', ignore=lambda f: re.match(r'bin/cy*', f)) - depends_on('hdf5') - depends_on('py-numpy') - depends_on('py-cython') - - def install(self, spec, prefix): - python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix) - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-ipython/package.py b/var/spack/packages/py-ipython/package.py deleted file mode 100644 index 8d0e64a07f..0000000000 --- a/var/spack/packages/py-ipython/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class PyIpython(Package): - """IPython provides a rich toolkit to help you make the most out of using Python interactively.""" - homepage = "https://pypi.python.org/pypi/ipython" - url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz" - - version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf') - version('3.1.0', 'a749d90c16068687b0ec45a27e72ef8f') - - extends('python') - depends_on('py-pygments') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-libxml2/package.py b/var/spack/packages/py-libxml2/package.py deleted file mode 100644 index 59005428e4..0000000000 --- a/var/spack/packages/py-libxml2/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyLibxml2(Package): - """A Python wrapper around libxml2.""" - homepage = "https://xmlsoft.org/python.html" - url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz" - - version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3') - - extends('python') - depends_on('libxml2') - depends_on('libxslt') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-lockfile/package.py b/var/spack/packages/py-lockfile/package.py deleted file mode 100644 index 8722914d94..0000000000 --- a/var/spack/packages/py-lockfile/package.py +++ /dev/null @@ -1,23 +0,0 @@ -from spack import * - -class PyLockfile(Package): - """The lockfile package exports a LockFile class which provides a - simple API for locking files. Unlike the Windows msvcrt.locking - function, the fcntl.lockf and flock functions, and the - deprecated posixfile module, the API is identical across both - Unix (including Linux and Mac) and Windows platforms. The lock - mechanism relies on the atomic nature of the link (on Unix) and - mkdir (on Windows) system calls. An implementation based on - SQLite is also provided, more as a demonstration of the - possibilities it provides than as production-quality code. - """ - homepage = "https://pypi.python.org/pypi/lockfile" - url = "https://pypi.python.org/packages/source/l/lockfile/lockfile-0.10.2.tar.gz" - - version('0.10.2', '1aa6175a6d57f082cd12e7ac6102ab15') - - extends("python") - depends_on("py-setuptools") - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mako/package.py b/var/spack/packages/py-mako/package.py deleted file mode 100644 index 3e91ffd8e5..0000000000 --- a/var/spack/packages/py-mako/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class PyMako(Package): - """A super-fast templating language that borrows the best - ideas from the existing templating languages.""" - - homepage = "https://pypi.python.org/pypi/mako" - url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz" - - version('1.0.1', '9f0aafd177b039ef67b90ea350497a54') - - depends_on('py-setuptools') - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/packages/py-matplotlib/package.py deleted file mode 100644 index e7ce3dfd24..0000000000 --- a/var/spack/packages/py-matplotlib/package.py +++ /dev/null @@ -1,47 +0,0 @@ -from spack import * -import os - -class PyMatplotlib(Package): - """Python plotting package.""" - homepage = "https://pypi.python.org/pypi/matplotlib" - url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz" - - version('1.4.2', '7d22efb6cce475025733c50487bd8898') - version('1.4.3', '86af2e3e3c61849ac7576a6f5ca44267') - - extends('python', ignore=r'bin/nosetests.*$') - - depends_on('py-pyside') - depends_on('py-ipython') - depends_on('py-pyparsing') - depends_on('py-six') - depends_on('py-dateutil') - depends_on('py-pytz') - depends_on('py-nose') - depends_on('py-numpy') - - depends_on('qt') - depends_on('bzip2') - depends_on('tcl') - depends_on('tk') - depends_on('qhull') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) - - if str(self.version) in ['1.4.2', '1.4.3']: - # hack to fix configuration file - config_file = None - for p,d,f in os.walk(prefix.lib): - for file in f: - if file.find('matplotlibrc') != -1: - config_file = join_path(p, 'matplotlibrc') - print config_file - if config_file == None: - raise InstallError('could not find config file') - filter_file(r'backend : pyside', - 'backend : Qt4Agg', - config_file) - filter_file(r'#backend.qt4 : PyQt4', - 'backend.qt4 : PySide', - config_file) diff --git a/var/spack/packages/py-mock/package.py b/var/spack/packages/py-mock/package.py deleted file mode 100644 index 3b08428ba0..0000000000 --- a/var/spack/packages/py-mock/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class PyMock(Package): - """mock is a library for testing in Python. It allows you to replace parts - of your system under test with mock objects and make assertions about how - they have been used.""" - - homepage = "https://github.com/testing-cabal/mock" - url = "https://pypi.python.org/packages/source/m/mock/mock-1.3.0.tar.gz" - - version('1.3.0', '73ee8a4afb3ff4da1b4afa287f39fdeb') - - extends('python') - depends_on('py-setuptools@17.1:') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mpi4py/package.py b/var/spack/packages/py-mpi4py/package.py deleted file mode 100644 index 8001689a18..0000000000 --- a/var/spack/packages/py-mpi4py/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyMpi4py(Package): - """This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings.""" - homepage = "https://pypi.python.org/pypi/mpi4py" - url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz" - - version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c') - extends('python') - depends_on('py-setuptools') - depends_on('mpi') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mx/package.py b/var/spack/packages/py-mx/package.py deleted file mode 100644 index 717ee0562b..0000000000 --- a/var/spack/packages/py-mx/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyMx(Package): - """The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types.""" - homepage = "http://www.egenix.com/products/python/mxBase/" - url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz" - - version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-nose/package.py b/var/spack/packages/py-nose/package.py deleted file mode 100644 index e7c6cf0264..0000000000 --- a/var/spack/packages/py-nose/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class PyNose(Package): - """nose extends the test loading and running features of unittest, - making it easier to write, find and run tests.""" - - homepage = "https://pypi.python.org/pypi/nose" - url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz" - - version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') - version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16') - - extends('python', ignore=r'bin/nosetests.*$') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py deleted file mode 100644 index efa109a3e9..0000000000 --- a/var/spack/packages/py-numpy/package.py +++ /dev/null @@ -1,28 +0,0 @@ -from spack import * - -class PyNumpy(Package): - """array processing for numbers, strings, records, and objects.""" - homepage = "https://pypi.python.org/pypi/numpy" - url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz" - - version('1.9.1', '78842b73560ec378142665e712ae4ad9') - version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') - - extends('python') - depends_on('py-nose') - depends_on('netlib-blas+fpic') - depends_on('netlib-lapack+shared') - - def patch(self): - filter_file( - "possible_executables = \['(gfortran|g77|ifort|efl)", - "possible_executables = ['fc", - "numpy/distutils/fcompiler/gnu.py", - "numpy/distutils/fcompiler/intel.py") - - def install(self, spec, prefix): - with open('site.cfg', 'w') as f: - f.write('[DEFAULT]\n') - f.write('libraries=lapack,blas\n') - f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix)) - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pandas/package.py b/var/spack/packages/py-pandas/package.py deleted file mode 100644 index 5b9997faa9..0000000000 --- a/var/spack/packages/py-pandas/package.py +++ /dev/null @@ -1,25 +0,0 @@ -from spack import * -import os - -class PyPandas(Package): - """pandas is a Python package providing fast, flexible, and expressive data structures designed to make working with relational or labeled data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, real world data analysis in Python. Additionally, it has the broader goal of becoming the most powerful and flexible open source data analysis / manipulation tool available in any language.""" - homepage = "http://pandas.pydata.org/" - url = "https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73" - - version('0.16.0', 'bfe311f05dc0c351f8955fbd1e296e73') - version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8') - - extends('python') - depends_on('py-dateutil') - depends_on('py-numpy') - depends_on('py-matplotlib') - depends_on('py-scipy') - depends_on('py-setuptools') - depends_on('py-pytz') - depends_on('libdrm') - depends_on('libpciaccess') - depends_on('llvm') - depends_on('mesa') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pexpect/package.py b/var/spack/packages/py-pexpect/package.py deleted file mode 100644 index ff5fac84e0..0000000000 --- a/var/spack/packages/py-pexpect/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyPexpect(Package): - """Pexpect allows easy control of interactive console applications.""" - homepage = "https://pypi.python.org/pypi/pexpect" - url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz" - - version('3.3', '0de72541d3f1374b795472fed841dce8') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pil/package.py b/var/spack/packages/py-pil/package.py deleted file mode 100644 index 743b761981..0000000000 --- a/var/spack/packages/py-pil/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyPil(Package): - """The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities.""" - - homepage = "http://www.pythonware.com/products/pil/" - url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz" - - version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pmw/package.py b/var/spack/packages/py-pmw/package.py deleted file mode 100644 index 56131811e9..0000000000 --- a/var/spack/packages/py-pmw/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyPmw(Package): - """Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts.""" - homepage = "https://pypi.python.org/pypi/Pmw" - url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz" - - version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pychecker/package.py b/var/spack/packages/py-pychecker/package.py deleted file mode 100644 index bda5a746aa..0000000000 --- a/var/spack/packages/py-pychecker/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyPychecker(Package): - """""" - homepage = "http://pychecker.sourceforge.net/" - url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz" - - version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pycparser/package.py b/var/spack/packages/py-pycparser/package.py deleted file mode 100644 index f2bb679d25..0000000000 --- a/var/spack/packages/py-pycparser/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyPycparser(Package): - """pycparser is a complete parser of the C language, written in pure python""" - homepage = "https://github.com/eliben/pycparser" - url = "https://pypi.python.org/packages/source/p/pycparser/pycparser-2.13.tar.gz" - - version('2.13', 'e4fe1a2d341b22e25da0d22f034ef32f') - - - extends('python') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyelftools/package.py b/var/spack/packages/py-pyelftools/package.py deleted file mode 100644 index d5ad32e624..0000000000 --- a/var/spack/packages/py-pyelftools/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyPyelftools(Package): - """A pure-Python library for parsing and analyzing ELF files and DWARF debugging information""" - homepage = "https://pypi.python.org/pypi/pyelftools" - url = "https://pypi.python.org/packages/source/p/pyelftools/pyelftools-0.23.tar.gz" - - version('0.23', 'aa7cefa8bd2f63d7b017440c9084f310') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pygments/package.py b/var/spack/packages/py-pygments/package.py deleted file mode 100644 index 7e07bf6869..0000000000 --- a/var/spack/packages/py-pygments/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyPygments(Package): - """Pygments is a syntax highlighting package written in Python.""" - homepage = "https://pypi.python.org/pypi/pygments" - url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz" - - version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c') - version('2.0.2', '238587a1370d62405edabd0794b3ec4a') - - extends('python') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pylint/package.py b/var/spack/packages/py-pylint/package.py deleted file mode 100644 index 9579708c29..0000000000 --- a/var/spack/packages/py-pylint/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * -import re - -class PyPylint(Package): - """array processing for numbers, strings, records, and objects.""" - homepage = "https://pypi.python.org/pypi/pylint" - url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz" - - version('1.4.1', 'df7c679bdcce5019389038847e4de622') - version('1.4.3', '5924c1c7ca5ca23647812f5971d0ea44') - - extends('python') - depends_on('py-nose') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pypar/package.py b/var/spack/packages/py-pypar/package.py deleted file mode 100644 index af9c76ccd8..0000000000 --- a/var/spack/packages/py-pypar/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyPypar(Package): - """Pypar is an efficient but easy-to-use module that allows programs written in Python to run in parallel on multiple processors and communicate using MPI.""" - homepage = "http://code.google.com/p/pypar/" - url = "https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz" - - version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8', url='https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz') - extends('python') - depends_on('mpi') - - def install(self, spec, prefix): - with working_dir('source'): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyparsing/package.py b/var/spack/packages/py-pyparsing/package.py deleted file mode 100644 index a6e50ad139..0000000000 --- a/var/spack/packages/py-pyparsing/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PyPyparsing(Package): - """A Python Parsing Module.""" - homepage = "https://pypi.python.org/pypi/pyparsing" - url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz" - - version('2.0.3', '0fe479be09fc2cf005f753d3acc35939') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-pyqt/package.py b/var/spack/packages/py-pyqt/package.py deleted file mode 100644 index 8edca105bb..0000000000 --- a/var/spack/packages/py-pyqt/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * - -class PyPyqt(Package): - """PyQt is a set of Python v2 and v3 bindings for Digia's Qt - application framework and runs on all platforms supported by Qt - including Windows, MacOS/X and Linux.""" - homepage = "http://www.riverbankcomputing.com/software/pyqt/intro" - url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz" - - version('4.11.3', '997c3e443165a89a559e0d96b061bf70') - - extends('python') - depends_on('py-sip') - - # TODO: allow qt5 when conditional deps are supported. - # TODO: Fix version matching so that @4 works like @:4 - depends_on('qt@:4') - - def install(self, spec, prefix): - python('configure.py', - '--confirm-license', - '--destdir=%s' % site_packages_dir) - make() - make('install') diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/packages/py-pyside/package.py deleted file mode 100644 index bb5da44d02..0000000000 --- a/var/spack/packages/py-pyside/package.py +++ /dev/null @@ -1,45 +0,0 @@ -from spack import * -import os - -class PyPyside(Package): - """array processing for numbers, strings, records, and objects.""" - homepage = "https://pypi.python.org/pypi/pyside" - url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz" - - version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d') - - # TODO: make build dependency - # depends_on("cmake") - - extends('python') - depends_on('py-setuptools') - depends_on('qt@:4') - - def patch(self): - """Undo PySide RPATH handling and add Spack RPATH.""" - # Figure out the special RPATH - pypkg = self.spec['python'].package - rpath = self.rpath - rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) - - # Add Spack's standard CMake args to the sub-builds. - # They're called BY setup.py so we have to patch it. - filter_file( - r'OPTION_CMAKE,', - r'OPTION_CMAKE, ' + ( - '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' - '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), - 'setup.py') - - # PySide tries to patch ELF files to remove RPATHs - # Disable this and go with the one we set. - filter_file( - r'^\s*rpath_cmd\(pyside_path, srcpath\)', - r'#rpath_cmd(pyside_path, srcpath)', - 'pyside_postinstall.py') - - - def install(self, spec, prefix): - python('setup.py', 'install', - '--prefix=%s' % prefix, - '--jobs=%s' % make_jobs) diff --git a/var/spack/packages/py-python-daemon/package.py b/var/spack/packages/py-python-daemon/package.py deleted file mode 100644 index 12cbe9101c..0000000000 --- a/var/spack/packages/py-python-daemon/package.py +++ /dev/null @@ -1,26 +0,0 @@ -from spack import * - -class PyPythonDaemon(Package): - """Library to implement a well-behaved Unix daemon process. - - This library implements the well-behaved daemon specification of - PEP Standard daemon process. - - A well-behaved Unix daemon process is tricky to get right, but the - required steps are much the same for every daemon program. A - DaemonContext instance holds the behaviour and configured process - environment for the program; use the instance as a context manager - to enter a daemon state. - """ - homepage = "https://pypi.python.org/pypi/python-daemon/" - url = "https://pypi.python.org/packages/source/p/python-daemon/python-daemon-2.0.5.tar.gz" - - version('2.0.5', '73e7f49f525c51fa4a995aea4d80de41') - - extends("python") - depends_on("py-setuptools") - depends_on("py-lockfile") - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) - diff --git a/var/spack/packages/py-pytz/package.py b/var/spack/packages/py-pytz/package.py deleted file mode 100644 index da6311a784..0000000000 --- a/var/spack/packages/py-pytz/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyPytz(Package): - """World timezone definitions, modern and historical.""" - homepage = "https://pypi.python.org/pypi/pytz" - url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz" - - version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7') - version('2015.4', '417a47b1c432d90333e42084a605d3d8') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-rpy2/package.py b/var/spack/packages/py-rpy2/package.py deleted file mode 100644 index a0b03d03e3..0000000000 --- a/var/spack/packages/py-rpy2/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class PyRpy2(Package): - """rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions.""" - homepage = "https://pypi.python.org/pypi/rpy2" - url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" - - version('2.5.4', '115a20ac30883f096da2bdfcab55196d') - version('2.5.6', 'a36e758b633ce6aec6a5f450bfee980f') - - extends('python') - depends_on('py-setuptools') - - depends_on('R') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scientificpython/package.py b/var/spack/packages/py-scientificpython/package.py deleted file mode 100644 index df2c86caac..0000000000 --- a/var/spack/packages/py-scientificpython/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class PyScientificpython(Package): - """ScientificPython is a collection of Python modules for - scientific computing. It contains support for geometry, - mathematical functions, statistics, physical units, IO, - visualization, and parallelization.""" - - homepage = "https://sourcesup.renater.fr/projects/scientific-py/" - url = "https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz" - version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scikit-learn/package.py b/var/spack/packages/py-scikit-learn/package.py deleted file mode 100644 index 5b078ce901..0000000000 --- a/var/spack/packages/py-scikit-learn/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PyScikitLearn(Package): - """""" - homepage = "https://pypi.python.org/pypi/scikit-learn" - url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz" - - version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d') - version('0.16.1', '363ddda501e3b6b61726aa40b8dbdb7e') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-scipy/package.py b/var/spack/packages/py-scipy/package.py deleted file mode 100644 index 3a1124cc15..0000000000 --- a/var/spack/packages/py-scipy/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class PyScipy(Package): - """Scientific Library for Python.""" - homepage = "https://pypi.python.org/pypi/scipy" - url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz" - - version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a') - version('0.15.1', 'be56cd8e60591d6332aac792a5880110') - - extends('python') - depends_on('py-nose') - depends_on('py-numpy') - depends_on('blas') - depends_on('lapack') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-setuptools/package.py b/var/spack/packages/py-setuptools/package.py deleted file mode 100644 index 760ad4d6db..0000000000 --- a/var/spack/packages/py-setuptools/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PySetuptools(Package): - """Easily download, build, install, upgrade, and uninstall Python packages.""" - homepage = "https://pypi.python.org/pypi/setuptools" - url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" - - version('11.3.1', '01f69212e019a2420c1693fb43593930') - version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') - version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-shiboken/package.py b/var/spack/packages/py-shiboken/package.py deleted file mode 100644 index e4bf4ce07e..0000000000 --- a/var/spack/packages/py-shiboken/package.py +++ /dev/null @@ -1,45 +0,0 @@ -from spack import * -import os - -class PyShiboken(Package): - """Shiboken generates bindings for C++ libraries using CPython source code.""" - homepage = "https://shiboken.readthedocs.org/" - url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz" - - version('1.2.2', '345cfebda221f525842e079a6141e555') - - # TODO: make build dependency - # depends_on("cmake") - - extends('python') - depends_on("py-setuptools") - depends_on("libxml2") - depends_on("qt@:4.8") - - def patch(self): - """Undo Shiboken RPATH handling and add Spack RPATH.""" - # Add Spack's standard CMake args to the sub-builds. - # They're called BY setup.py so we have to patch it. - pypkg = self.spec['python'].package - rpath = self.rpath - rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken')) - - filter_file( - r'OPTION_CMAKE,', - r'OPTION_CMAKE, ' + ( - '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' - '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), - 'setup.py') - - # Shiboken tries to patch ELF files to remove RPATHs - # Disable this and go with the one we set. - filter_file( - r'^\s*rpath_cmd\(shiboken_path, srcpath\)', - r'#rpath_cmd(shiboken_path, srcpath)', - 'shiboken_postinstall.py') - - - def install(self, spec, prefix): - python('setup.py', 'install', - '--prefix=%s' % prefix, - '--jobs=%s' % make_jobs) diff --git a/var/spack/packages/py-sip/package.py b/var/spack/packages/py-sip/package.py deleted file mode 100644 index e4a6fb6961..0000000000 --- a/var/spack/packages/py-sip/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * -import os - -class PySip(Package): - """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries.""" - homepage = "http://www.riverbankcomputing.com/software/sip/intro" - url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz" - - version('4.16.5', '6d01ea966a53e4c7ae5c5e48c40e49e5') - version('4.16.7', '32abc003980599d33ffd789734de4c36') - - extends('python') - - def install(self, spec, prefix): - python('configure.py', - '--destdir=%s' % site_packages_dir, - '--bindir=%s' % spec.prefix.bin, - '--incdir=%s' % python_include_dir, - '--sipdir=%s' % os.path.join(spec.prefix.share, 'sip')) - make() - make('install') diff --git a/var/spack/packages/py-six/package.py b/var/spack/packages/py-six/package.py deleted file mode 100644 index 05c5bd00a9..0000000000 --- a/var/spack/packages/py-six/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class PySix(Package): - """Python 2 and 3 compatibility utilities.""" - homepage = "https://pypi.python.org/pypi/six" - url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz" - - version('1.9.0', '476881ef4012262dfc8adc645ee786c4') - - extends('python') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-sphinx/package.py b/var/spack/packages/py-sphinx/package.py deleted file mode 100644 index ec2e89a098..0000000000 --- a/var/spack/packages/py-sphinx/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PySphinx(Package): - """Sphinx Documentation Generator.""" - homepage = "http://sphinx-doc.org" - url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.3.1.tar.gz" - - version('1.3.1', '8786a194acf9673464c5455b11fd4332') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-sympy/package.py b/var/spack/packages/py-sympy/package.py deleted file mode 100644 index c17e35b95f..0000000000 --- a/var/spack/packages/py-sympy/package.py +++ /dev/null @@ -1,13 +0,0 @@ -from spack import * - -class PySympy(Package): - """SymPy is a Python library for symbolic mathematics.""" - homepage = "https://pypi.python.org/pypi/sympy" - url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz" - - version('0.7.6', '3d04753974306d8a13830008e17babca') - - extends('python') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-virtualenv/package.py b/var/spack/packages/py-virtualenv/package.py deleted file mode 100644 index 037a6fc59f..0000000000 --- a/var/spack/packages/py-virtualenv/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * -import shutil - -class PyVirtualenv(Package): - """virtualenv is a tool to create isolated Python environments.""" - homepage = "http://virtualenv.readthedocs.org/projects/virtualenv/" - url = "https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.11.6.tar.gz" - - version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49') - version('13.0.1', '1ffc011bde6667f0e37ecd976f4934db') - - extends('python') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-yapf/package.py b/var/spack/packages/py-yapf/package.py deleted file mode 100644 index 12ef191515..0000000000 --- a/var/spack/packages/py-yapf/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class PyYapf(Package): - """ Yet Another Python Formatter """ - homepage = "https://github.com/google/yapf" - # base https://pypi.python.org/pypi/cffi - url = "https://github.com/google/yapf/archive/v0.2.1.tar.gz" - - version('0.2.1', '348ccf86cf2057872e4451b204fb914c') - - extends('python') - depends_on('py-setuptools') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py deleted file mode 100644 index 000881a846..0000000000 --- a/var/spack/packages/python/package.py +++ /dev/null @@ -1,160 +0,0 @@ -import os -import re -from contextlib import closing -from llnl.util.lang import match_predicate - -from spack import * -import spack - - -class Python(Package): - """The Python programming language.""" - homepage = "http://www.python.org" - url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz" - - extendable = True - - version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') - version('2.7.10', 'c685ef0b8e9f27b5e3db5db12b268ac6') - - depends_on("openssl") - depends_on("bzip2") - depends_on("readline") - depends_on("ncurses") - depends_on("sqlite") - - def install(self, spec, prefix): - # Need this to allow python build to find the Python installation. - env['PYTHONHOME'] = prefix - - # Rest of install is pretty standard. - configure("--prefix=%s" % prefix, - "--with-threads", - "--enable-shared") - make() - make("install") - - - # ======================================================================== - # Set up environment to make install easy for python extensions. - # ======================================================================== - - @property - def python_lib_dir(self): - return os.path.join('lib', 'python%d.%d' % self.version[:2]) - - - @property - def python_include_dir(self): - return os.path.join('include', 'python%d.%d' % self.version[:2]) - - - @property - def site_packages_dir(self): - return os.path.join(self.python_lib_dir, 'site-packages') - - - def setup_dependent_environment(self, module, spec, ext_spec): - """Called before python modules' install() methods. - - In most cases, extensions will only need to have one line:: - - python('setup.py', 'install', '--prefix=%s' % prefix) - """ - # Python extension builds can have a global python executable function - module.python = Executable(join_path(spec.prefix.bin, 'python')) - - # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. - module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) - module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir) - module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) - - # Make the site packages directory if it does not exist already. - mkdirp(module.site_packages_dir) - - # Set PYTHONPATH to include site-packages dir for the - # extension and any other python extensions it depends on. - python_paths = [] - for d in ext_spec.traverse(): - if d.package.extends(self.spec): - python_paths.append(os.path.join(d.prefix, self.site_packages_dir)) - os.environ['PYTHONPATH'] = ':'.join(python_paths) - - - # ======================================================================== - # Handle specifics of activating and deactivating python modules. - # ======================================================================== - - def python_ignore(self, ext_pkg, args): - """Add some ignore files to activate/deactivate args.""" - ignore_arg = args.get('ignore', lambda f: False) - - # Always ignore easy-install.pth, as it needs to be merged. - patterns = [r'easy-install\.pth$'] - - # Ignore pieces of setuptools installed by other packages. - if ext_pkg.name != 'py-setuptools': - patterns.append(r'/site\.pyc?$') - patterns.append(r'setuptools\.pth') - patterns.append(r'bin/easy_install[^/]*$') - patterns.append(r'setuptools.*egg$') - - return match_predicate(ignore_arg, patterns) - - - def write_easy_install_pth(self, exts): - paths = [] - for ext in sorted(exts.values()): - ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir) - easy_pth = "%s/easy-install.pth" % ext_site_packages - - if not os.path.isfile(easy_pth): - continue - - with closing(open(easy_pth)) as f: - for line in f: - line = line.rstrip() - - # Skip lines matching these criteria - if not line: continue - if re.search(r'^(import|#)', line): continue - if (ext.name != 'py-setuptools' and - re.search(r'setuptools.*egg$', line)): continue - - paths.append(line) - - site_packages = os.path.join(self.prefix, self.site_packages_dir) - main_pth = "%s/easy-install.pth" % site_packages - - if not paths: - if os.path.isfile(main_pth): - os.remove(main_pth) - - else: - with closing(open(main_pth, 'w')) as f: - f.write("import sys; sys.__plen = len(sys.path)\n") - for path in paths: - f.write("%s\n" % path) - f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; " - "p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n") - - - def activate(self, ext_pkg, **args): - ignore=self.python_ignore(ext_pkg, args) - args.update(ignore=ignore) - - super(Python, self).activate(ext_pkg, **args) - - exts = spack.install_layout.extension_map(self.spec) - exts[ext_pkg.name] = ext_pkg.spec - self.write_easy_install_pth(exts) - - - def deactivate(self, ext_pkg, **args): - args.update(ignore=self.python_ignore(ext_pkg, args)) - super(Python, self).deactivate(ext_pkg, **args) - - exts = spack.install_layout.extension_map(self.spec) - if ext_pkg.name in exts: # Make deactivate idempotent. - del exts[ext_pkg.name] - self.write_easy_install_pth(exts) diff --git a/var/spack/packages/qhull/package.py b/var/spack/packages/qhull/package.py deleted file mode 100644 index 9da4078a70..0000000000 --- a/var/spack/packages/qhull/package.py +++ /dev/null @@ -1,27 +0,0 @@ -from spack import * - -class Qhull(Package): - """Qhull computes the convex hull, Delaunay triangulation, Voronoi - diagram, halfspace intersection about a point, furt hest-site - Delaunay triangulation, and furthest-site Voronoi diagram. The - source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull - implements the Quickhull algorithm for computing the convex - hull. It handles roundoff errors from floating point - arithmetic. It computes volumes, surface areas, and - approximations to the convex hull. - - Qhull does not support triangulation of non-convex surfaces, - mesh generation of non-convex objects, medium-sized inputs in - 9-D and higher, alpha shapes, weighted Voronoi diagrams, - Voronoi volumes, or constrained Delaunay triangulations.""" - - homepage = "http://www.qhull.org" - - version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c', - url="http://www.qhull.org/download/qhull-2012.1-src.tgz") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('..', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/qt/package.py b/var/spack/packages/qt/package.py deleted file mode 100644 index 0e4abe3b1d..0000000000 --- a/var/spack/packages/qt/package.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from spack import * -import os - -class Qt(Package): - """Qt is a comprehensive cross-platform C++ application framework.""" - homepage = "http://qt.io" - list_url = 'http://download.qt-project.org/official_releases/qt/' - list_depth = 2 - - version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', - url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') - version('5.3.2', 'febb001129927a70174467ecb508a682', - url='http://download.qt.io/archive/qt/5.3/5.3.2/single/qt-everywhere-opensource-src-5.3.2.tar.gz') - - version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8', - url='http://download.qt.io/archive/qt/5.2/5.2.1/single/qt-everywhere-opensource-src-5.2.1.tar.gz') - version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', - url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") - - # Use system openssl for security. - #depends_on("openssl") - - depends_on("glib") - depends_on("gtkplus") - depends_on("libxml2") - depends_on("zlib") - depends_on("dbus") - depends_on("libtiff") - depends_on("libpng") - depends_on("libmng") - depends_on("jpeg") - - # Webkit - # depends_on("gperf") - # depends_on("flex") - # depends_on("bison") - # depends_on("ruby") - # depends_on("icu4c") - - # OpenGL hardware acceleration - depends_on("mesa") - depends_on("libxcb") - - - def setup_dependent_environment(self, module, spec, dep_spec): - """Dependencies of Qt find it using the QTDIR environment variable.""" - os.environ['QTDIR'] = self.prefix - - - def patch(self): - if self.spec.satisfies('@4'): - qmake_conf = 'mkspecs/common/g++-base.conf' - qmake_unix_conf = 'mkspecs/common/g++-unix.conf' - elif self.spec.satisfies('@5'): - qmake_conf = 'qtbase/mkspecs/common/g++-base.conf' - qmake_unix_conf = 'qtbase/mkspecs/common/g++-unix.conf' - else: - return - - # Fix qmake compilers in the default mkspec - filter_file(r'^QMAKE_COMPILER *=.*$', 'QMAKE_COMPILER = cc', qmake_conf) - filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) - filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) - filter_file(r'^QMAKE_LFLAGS_NOUNDEF *\+?=.*$', 'QMAKE_LFLAGS_NOUNDEF =', qmake_unix_conf) - - - @property - def common_config_args(self): - return [ - '-prefix', self.prefix, - '-v', - '-opensource', - '-opengl', - "-release", - '-shared', - '-confirm-license', - '-openssl-linked', - '-dbus-linked', - '-optimized-qmake', - '-no-openvg', - '-no-pch', - # NIS is deprecated in more recent glibc - "-no-nis"] - # Don't disable all the database drivers, but should - # really get them into spack at some point. - - - @when('@4') - def configure(self): - configure('-fast', - '-no-webkit', - *self.common_config_args) - - - @when('@5') - def configure(self): - configure('-no-eglfs', - '-no-directfb', - '-qt-xcb', - # If someone wants to get a webkit build working, be my guest! - '-skip', 'qtwebkit', - *self.common_config_args) - - - def install(self, spec, prefix): - self.configure() - make() - make("install") diff --git a/var/spack/packages/qthreads/package.py b/var/spack/packages/qthreads/package.py deleted file mode 100644 index dacdb71524..0000000000 --- a/var/spack/packages/qthreads/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Qthreads(Package): - """The qthreads API is designed to make using large numbers of - threads convenient and easy, and to allow portable access to - threading constructs used in massively parallel shared memory - environments. The API maps well to both MTA-style threading and - PIM-style threading, and we provide an implementation of this - interface in both a standard SMP context as well as the SST - context. The qthreads API provides access to full/empty-bit - (FEB) semantics, where every word of memory can be marked - either full or empty, and a thread can wait for any word to - attain either state.""" - homepage = "http://www.cs.sandia.gov/qthreads/" - url = "https://qthreads.googlecode.com/files/qthread-1.10.tar.bz2" - - version('1.10', '5af8c8bbe88c2a6d45361643780d1671') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/ravel/package.py b/var/spack/packages/ravel/package.py deleted file mode 100644 index 01fa941cfe..0000000000 --- a/var/spack/packages/ravel/package.py +++ /dev/null @@ -1,23 +0,0 @@ -from spack import * - -class Ravel(Package): - """Ravel is a parallel communication trace visualization tool that - orders events according to logical time.""" - - homepage = "https://github.com/scalability-llnl/ravel" - url = 'https://github.com/scalability-llnl/ravel/archive/v1.0.0.tar.gz' - - version('1.0.0', 'b25fece58331c2adfcce76c5036485c2') - - # TODO: make this a build dependency - depends_on('cmake@2.8.9:') - - depends_on('muster@1.0.1:') - depends_on('otf') - depends_on('otf2') - depends_on('qt@5:') - - def install(self, spec, prefix): - cmake('-Wno-dev', *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/readline/package.py b/var/spack/packages/readline/package.py deleted file mode 100644 index 1b870e0e7f..0000000000 --- a/var/spack/packages/readline/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Readline(Package): - """The GNU Readline library provides a set of functions for use by - applications that allow users to edit command li nes as they - are typed in. Both Emacs and vi editing modes are - available. The Readline library includes additional functions - to maintain a list of previously-entered command lines, to - recall and perhaps reedit those lines, and perform csh-like - history expansion on previous commands. """ - homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" - url = "ftp://ftp.cwru.edu/pub/bash/readline-6.3.tar.gz" - - version('6.3', '33c8fb279e981274f485fd91da77e94a') - - depends_on("ncurses") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make("SHLIB_LIBS=-lncurses") - make("install") diff --git a/var/spack/packages/rose/add_spack_compiler_recognition.patch b/var/spack/packages/rose/add_spack_compiler_recognition.patch deleted file mode 100644 index ce61ae4e4c..0000000000 --- a/var/spack/packages/rose/add_spack_compiler_recognition.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/config/compiler-defs.m4 b/config/compiler-defs.m4 -index d7d85d2..780c8de 100644 ---- a/config/compiler-defs.m4 -+++ b/config/compiler-defs.m4 -@@ -28,7 +28,7 @@ dnl predefined by a specific compiler - # g++|gcc|mpicc|mpic++|mpicxx|mpiCC) - # TOO (2/16/2011): added support for tensilica compilers, assuming they are - # like GCC (they use a GCC front-end) -- g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) -+ cc*|c++*|g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) - BACKEND_GCC_MAJOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f1` - BACKEND_GCC_MINOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f2` - BACKEND_GCC_PATCHLEVEL=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f3` diff --git a/var/spack/packages/rose/package.py b/var/spack/packages/rose/package.py deleted file mode 100644 index 1d7294acab..0000000000 --- a/var/spack/packages/rose/package.py +++ /dev/null @@ -1,39 +0,0 @@ -#------------------------------------------------------------------------------ -# Author: Justin Too -#------------------------------------------------------------------------------ - -from spack import * - -class Rose(Package): - """A compiler infrastructure to build source-to-source program - transformation and analysis tools. - (Developed at Lawrence Livermore National Lab)""" - - homepage = "http://rosecompiler.org/" - url = "https://github.com/rose-compiler/edg4x-rose" - - version('master', branch='master', git='https://github.com/rose-compiler/edg4x-rose.git') - - patch('add_spack_compiler_recognition.patch') - - depends_on("autoconf@2.69") - depends_on("automake@1.14") - depends_on("libtool@2.4") - depends_on("boost@1.54.0") - depends_on("jdk@8u25-linux-x64") - - def install(self, spec, prefix): - # Bootstrap with autotools - bash = which('bash') - bash('build') - - # Configure, compile & install - with working_dir('rose-build', create=True): - boost = spec['boost'] - - configure = Executable('../configure') - configure("--prefix=" + prefix, - "--with-boost=" + boost.prefix, - "--disable-boost-version-check") - make("install-core") - diff --git a/var/spack/packages/ruby/package.py b/var/spack/packages/ruby/package.py deleted file mode 100644 index 6b6242362c..0000000000 --- a/var/spack/packages/ruby/package.py +++ /dev/null @@ -1,41 +0,0 @@ -from spack import * -import spack -import os - -class Ruby(Package): - """A dynamic, open source programming language with a focus on - simplicity and productivity.""" - - homepage = "https://www.ruby-lang.org/" - url = "http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz" - - extendable = True - - version('2.2.0', 'cd03b28fd0b555970f5c4fd481700852') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") - - def setup_dependent_environment(self, module, spec, ext_spec): - """Called before ruby modules' install() methods. Sets GEM_HOME - and GEM_PATH to values appropriate for the package being built. - - In most cases, extensions will only need to have one line:: - - gem('install', '.gem') - """ - # Ruby extension builds have global ruby and gem functions - module.ruby = Executable(join_path(spec.prefix.bin, 'ruby')) - module.gem = Executable(join_path(spec.prefix.bin, 'gem')) - - # Set GEM_PATH to include dependent gem directories - ruby_paths = [] - for d in ext_spec.traverse(): - if d.package.extends(self.spec): - ruby_paths.append(d.prefix) - os.environ['GEM_PATH'] = ':'.join(ruby_paths) - # The actual installation path for this gem - os.environ['GEM_HOME'] = ext_spec.prefix diff --git a/var/spack/packages/samtools/package.py b/var/spack/packages/samtools/package.py deleted file mode 100644 index 72900398d8..0000000000 --- a/var/spack/packages/samtools/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Samtools(Package): - """SAM Tools provide various utilities for manipulating alignments in the SAM format, - including sorting, merging, indexing and generating - alignments in a per-position format""" - - homepage = "www.htslib.org" - version('1.2','988ec4c3058a6ceda36503eebecd4122',url = "https://github.com/samtools/samtools/releases/download/1.2/samtools-1.2.tar.bz2") - - depends_on("zlib") - depends_on("mpc") - parallel=False - patch("samtools1.2.patch",level=0) - - def install(self, spec, prefix): - make("prefix=%s" % prefix, "install") - diff --git a/var/spack/packages/samtools/samtools1.2.patch b/var/spack/packages/samtools/samtools1.2.patch deleted file mode 100644 index ead3ab4e2c..0000000000 --- a/var/spack/packages/samtools/samtools1.2.patch +++ /dev/null @@ -1,20 +0,0 @@ ---- Makefile 2015-02-03 08:27:34.000000000 -0800 -+++ Makefile.new 2015-07-21 10:38:27.881406892 -0700 -@@ -26,7 +26,7 @@ - CFLAGS = -g -Wall -O2 - LDFLAGS = - LDLIBS = --DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=1 -+DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=0 - LOBJS= bam_aux.o bam.o bam_import.o sam.o \ - sam_header.o bam_plbuf.o - AOBJS= bam_index.o bam_plcmd.o sam_view.o \ -@@ -37,7 +37,7 @@ - faidx.o stats.o stats_isize.o bam_flags.o bam_split.o \ - bam_tview.o bam_tview_curses.o bam_tview_html.o bam_lpileup.o - INCLUDES= -I. -I$(HTSDIR) --LIBCURSES= -lcurses # -lXCurses -+#LIBCURSES= -lcurses # -lXCurses - - prefix = /usr/local - exec_prefix = $(prefix) diff --git a/var/spack/packages/scalasca/package.py b/var/spack/packages/scalasca/package.py deleted file mode 100644 index cf7a40c1f5..0000000000 --- a/var/spack/packages/scalasca/package.py +++ /dev/null @@ -1,65 +0,0 @@ -# FIXME: Add copyright - -from spack import * - -class Scalasca(Package): - """Scalasca is a software tool that supports the performance optimization - of parallel programs by measuring and analyzing their runtime behavior. - The analysis identifies potential performance bottlenecks - in - particular those concerning communication and synchronization - and - offers guidance in exploring their causes.""" - - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.scalasca.org" - url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" - - version('2.1', 'bab9c2b021e51e2ba187feec442b96e6', - url = 'http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz' ) - - depends_on("mpi") - depends_on("otf2@1.4") - depends_on("cube@4.2.3") - - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=mpicc -MPICXX=mpicxx -MPIF77=mpif77 -MPIFC=mpif90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - - def install(self, spec, prefix): - configure_args = ["--prefix=%s" % prefix, - "--with-custom-compilers", - "--with-otf2=%s" % spec['otf2'].prefix.bin, - "--with-cube=%s" % spec['cube'].prefix.bin, - "--enable-shared"] - - configure(*configure_args) - - make() - make("install") - - # FIXME: Modify the configure line to suit your build system here. - configure("--prefix=%s" % prefix) - - # FIXME: Add logic to build and install here - make() - make("install") diff --git a/var/spack/packages/scorep/package.py b/var/spack/packages/scorep/package.py deleted file mode 100644 index f013bd1cbb..0000000000 --- a/var/spack/packages/scorep/package.py +++ /dev/null @@ -1,74 +0,0 @@ -# FIXME: Add copyright statement - -from spack import * - -class Scorep(Package): - """The Score-P measurement infrastructure is a highly scalable and - easy-to-use tool suite for profiling, event tracing, and online - analysis of HPC applications.""" - - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.vi-hps.org/projects/score-p" - url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" - - version('1.3', '9db6f957b7f51fa01377a9537867a55c', - url = 'http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz') - - version('1.2.3', '4978084e7cbd05b94517aa8beaea0817') - - depends_on("mpi") - depends_on("papi") - # depends_on("otf2@1.2:1.2.1") # only Score-P 1.2.x - depends_on("otf2") - depends_on("opari2") - depends_on("cube@4.2:4.2.3") - - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=mpicc -MPICXX=mpicxx -MPIF77=mpif77 -MPIFC=mpif90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - with open("platform-backend-user-provided", "w") as backend_file: - backend_file.write(self.backend_user_provided) - with open("platform-frontend-user-provided", "w") as frontend_file: - frontend_file.write(self.frontend_user_provided) - with open("platform-mpi-user-provided", "w") as mpi_file: - mpi_file.write(self.mpi_user_provided) - - configure_args = ["--prefix=%s" % prefix, - "--with-custom-compilers", - "--with-otf2=%s" % spec['otf2'].prefix.bin, - "--with-opari2=%s" % spec['opari2'].prefix.bin, - "--with-cube=%s" % spec['cube'].prefix.bin, - "--with-papi-header=%s" % spec['papi'].prefix.include, - "--with-papi-lib=%s" % spec['papi'].prefix.lib, - "--enable-shared"] - - configure(*configure_args) - - make() - make("install") diff --git a/var/spack/packages/scotch/package.py b/var/spack/packages/scotch/package.py deleted file mode 100644 index 79289ff2ad..0000000000 --- a/var/spack/packages/scotch/package.py +++ /dev/null @@ -1,40 +0,0 @@ -from spack import * -import glob -import os - -class Scotch(Package): - """Scotch is a software package for graph and mesh/hypergraph - partitioning, graph clustering, and sparse matrix ordering.""" - homepage = "http://www.labri.fr/perso/pelegrin/scotch/" - url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz" - list_url = "http://gforge.inria.fr/frs/?group_id=248" - - version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc') - - depends_on('mpi') - - - def patch(self): - with working_dir('src/Make.inc'): - makefiles = glob.glob('Makefile.inc.x86-64_pc_linux2*') - filter_file(r'^CCS\s*=.*$', 'CCS = cc', *makefiles) - filter_file(r'^CCD\s*=.*$', 'CCD = cc', *makefiles) - - - def install(self, spec, prefix): - # Currently support gcc and icc on x86_64 (maybe others with - # vanilla makefile) - makefile = 'Make.inc/Makefile.inc.x86-64_pc_linux2' - if spec.satisfies('%icc'): - makefile += '.icc' - - with working_dir('src'): - force_symlink(makefile, 'Makefile.inc') - for app in ('scotch', 'ptscotch'): - make(app) - - install_tree('bin', prefix.bin) - install_tree('lib', prefix.lib) - install_tree('include', prefix.include) - install_tree('man/man1', prefix.share_man1) - diff --git a/var/spack/packages/scr/package.py b/var/spack/packages/scr/package.py deleted file mode 100644 index 9fb758f072..0000000000 --- a/var/spack/packages/scr/package.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Scr(Package): - """SCR caches checkpoint data in storage on the compute nodes of a - Linux cluster to provide a fast, scalable checkpoint/restart - capability for MPI codes""" - - homepage = "https://computation.llnl.gov/project/scr/" - - depends_on("mpi") -# depends_on("dtcmp") - - version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz') - version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz') - - def install(self, spec, prefix): - configure("--prefix=" + prefix, - "--with-scr-config-file=" + prefix + "/etc/scr.conf") - make() - make("install") diff --git a/var/spack/packages/silo/package.py b/var/spack/packages/silo/package.py deleted file mode 100644 index 9eda11df15..0000000000 --- a/var/spack/packages/silo/package.py +++ /dev/null @@ -1,19 +0,0 @@ -from spack import * - -class Silo(Package): - """Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.""" - - homepage = "http://wci.llnl.gov/simulation/computer-codes/silo" - url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.8/silo-4.8.tar.gz" - - #version('4.9', 'a83eda4f06761a86726e918fc55e782a') - version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9') - - depends_on("hdf5@:1.8.12") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-hdf5=%s" %spec['hdf5'].prefix) - - make() - make("install") diff --git a/var/spack/packages/snappy/package.py b/var/spack/packages/snappy/package.py deleted file mode 100644 index c8f9ceef7d..0000000000 --- a/var/spack/packages/snappy/package.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -from spack import * - -class Snappy(Package): - """A fast compressor/decompressor: https://code.google.com/p/snappy""" - - homepage = "https://code.google.com/p/snappy" - url = "https://github.com/google/snappy/releases/download/1.1.3/snappy-1.1.3.tar.gz" - - version('1.1.3', '7358c82f133dc77798e4c2062a749b73') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/spindle/package.py b/var/spack/packages/spindle/package.py deleted file mode 100644 index 06a1e14284..0000000000 --- a/var/spack/packages/spindle/package.py +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################## -# Copyright (c) 2014, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Spindle(Package): - """Spindle improves the library-loading performance of dynamically - linked HPC applications. Without Spindle large MPI jobs can - overload on a shared file system when loading dynamically - linked libraries, causing site-wide performance problems. - """ - homepage = "https://computation.llnl.gov/project/spindle/" - url = "https://github.com/hpc/Spindle/archive/v0.8.1.tar.gz" - list_url = "https://github.com/hpc/Spindle/releases" - - version('0.8.1', 'f11793a6b9d8df2cd231fccb2857d912') - - depends_on("launchmon") - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/sqlite/package.py b/var/spack/packages/sqlite/package.py deleted file mode 100644 index 734b0b6cb6..0000000000 --- a/var/spack/packages/sqlite/package.py +++ /dev/null @@ -1,40 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Sqlite(Package): - """SQLite3 is an SQL database engine in a C library. Programs that - link the SQLite3 library can have SQL database access without - running a separate RDBMS process. - """ - homepage = "www.sqlite.org" - - version('3.8.5', '0544ef6d7afd8ca797935ccc2685a9ed', - url='http://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz') - - def install(self, spec, prefix): - configure("--prefix=" + prefix) - make() - make("install") diff --git a/var/spack/packages/stat/configure_mpicxx.patch b/var/spack/packages/stat/configure_mpicxx.patch deleted file mode 100644 index e09056d95c..0000000000 --- a/var/spack/packages/stat/configure_mpicxx.patch +++ /dev/null @@ -1,19 +0,0 @@ -commit 07ab6e565f939c54fff6580fc8463ea61662871a -Author: Gregory L. Lee -Date: Tue May 20 14:53:35 2014 -0700 - - re-boostrap to update configure - -diff --git a/configure b/configure -index 6c4af7d..30901ea 100755 ---- a/configure -+++ b/configure -@@ -15529,7 +15529,7 @@ fi - done - test -n "$MPICC" || MPICC="$CC" - -- for ac_prog in mpig++ mpiicpc mpxlC mpixlC -+ for ac_prog in mpig++ mpiCC mpicxx mpiicpc mpxlC mpixlC - do - # Extract the first word of "$ac_prog", so it can be a program name with args. - set dummy $ac_prog; ac_word=$2 diff --git a/var/spack/packages/stat/package.py b/var/spack/packages/stat/package.py deleted file mode 100644 index 5d81e62731..0000000000 --- a/var/spack/packages/stat/package.py +++ /dev/null @@ -1,40 +0,0 @@ -from spack import * - -class Stat(Package): - """Library to create, manipulate, and export graphs Graphlib.""" - homepage = "http://paradyn.org/STAT/STAT.html" - url = "https://github.com/lee218llnl/stat/archive/v2.0.0.tar.gz" - - version('2.2.0', '26bd69dd57a15afdd5d0ebdb0b7fb6fc') - version('2.1.0', 'ece26beaf057aa9134d62adcdda1ba91') - version('2.0.0', 'c7494210b0ba26b577171b92838e1a9b') - - variant('dysect', default=False, description="enable DySectAPI") - - depends_on('libelf') - depends_on('libdwarf') - depends_on('dyninst') - depends_on('graphlib') - depends_on('graphviz') - depends_on('launchmon') - depends_on('mrnet') - - patch('configure_mpicxx.patch', when='@2.1.0') - - def install(self, spec, prefix): - configure_args = [ - "--enable-gui", - "--prefix=%s" % prefix, - "--disable-examples", # Examples require MPI: avoid this dependency. - "--with-launchmon=%s" % spec['launchmon'].prefix, - "--with-mrnet=%s" % spec['mrnet'].prefix, - "--with-graphlib=%s" % spec['graphlib'].prefix, - "--with-stackwalker=%s" % spec['dyninst'].prefix, - "--with-libdwarf=%s" % spec['libdwarf'].prefix - ] - if '+dysect' in spec: - configure_args.append('--enable-dysectapi') - configure(*configure_args) - - make(parallel=False) - make("install") diff --git a/var/spack/packages/sundials/package.py b/var/spack/packages/sundials/package.py deleted file mode 100644 index 8b784c8c3c..0000000000 --- a/var/spack/packages/sundials/package.py +++ /dev/null @@ -1,39 +0,0 @@ -############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Sundials(Package): - """SUNDIALS (SUite of Nonlinear and DIfferential/ALgebraic equation Solvers)""" - homepage = "http://computation.llnl.gov/casc/sundials/" - url = "http://computation.llnl.gov/casc/sundials/download/code/sundials-2.5.0.tar.gz" - - version('2.5.0', 'aba8b56eec600de3109cfb967aa3ba0f') - - depends_on("mpi") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/swig/package.py b/var/spack/packages/swig/package.py deleted file mode 100644 index ee536d7063..0000000000 --- a/var/spack/packages/swig/package.py +++ /dev/null @@ -1,46 +0,0 @@ -############################################################################## -# Copyright (c) 2014, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://scalability-llnl.github.io/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License (as published by -# the Free Software Foundation) version 2.1 dated February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -from spack import * - -class Swig(Package): - """SWIG is an interface compiler that connects programs written in - C and C++ with scripting languages such as Perl, Python, Ruby, - and Tcl. It works by taking the declarations found in C/C++ - header files and using them to generate the wrapper code that - scripting languages need to access the underlying C/C++ - code. In addition, SWIG provides a variety of customization - features that let you tailor the wrapping process to suit your - application.""" - homepage = "http://www.swig.org" - url = "http://prdownloads.sourceforge.net/swig/swig-3.0.2.tar.gz" - - version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41') - - depends_on('pcre') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/task/package.py b/var/spack/packages/task/package.py deleted file mode 100644 index 07f44cc45b..0000000000 --- a/var/spack/packages/task/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Task(Package): - """Feature-rich console based todo list manager""" - homepage = "http://www.taskwarrior.org" - url = "http://taskwarrior.org/download/task-2.4.4.tar.gz" - - version('2.4.4', '517450c4a23a5842df3e9905b38801b3') - - depends_on("gnutls") - depends_on("libuuid") - # depends_on("gcc@4.8:") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('-DCMAKE_BUILD_TYPE=release', - '..', - *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/taskd/package.py b/var/spack/packages/taskd/package.py deleted file mode 100644 index 66bc0cb484..0000000000 --- a/var/spack/packages/taskd/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Taskd(Package): - """TaskWarrior task synchronization daemon""" - # FIXME: add a proper url for your package's homepage here. - homepage = "http://www.taskwarrior.org" - url = "http://taskwarrior.org/download/taskd-1.1.0.tar.gz" - - version('1.1.0', 'ac855828c16f199bdbc45fbc227388d0') - - depends_on("libuuid") - depends_on("gnutls") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake('-DCMAKE_BUILD_TYPE=release', - '..', - *std_cmake_args) - make() - make("install") diff --git a/var/spack/packages/tau/package.py b/var/spack/packages/tau/package.py deleted file mode 100644 index 048fac80aa..0000000000 --- a/var/spack/packages/tau/package.py +++ /dev/null @@ -1,36 +0,0 @@ -from spack import * - -import os -from llnl.util.filesystem import join_path - -class Tau(Package): - """A portable profiling and tracing toolkit for performance - analysis of parallel programs written in Fortran, C, C++, UPC, - Java, Python.""" - homepage = "http://www.cs.uoregon.edu/research/tau" - url = "http://www.cs.uoregon.edu/research/paracomp/tau/tauprofile/dist/tau-2.23.1.tar.gz" - - version('2.23.1', '6593b47ae1e7a838e632652f0426fe72') - - def install(self, spec, prefix): - # TAU isn't happy with directories that have '@' in the path. Sigh. - change_sed_delimiter('@', ';', 'configure') - change_sed_delimiter('@', ';', 'utils/FixMakefile') - change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default') - - # After that, it's relatively standard. - configure("-prefix=%s" % prefix) - make("install") - - # Link arch-specific directories into prefix since there is - # only one arch per prefix the way spack installs. - self.link_tau_arch_dirs() - - - def link_tau_arch_dirs(self): - for subdir in os.listdir(self.prefix): - for d in ('bin', 'lib'): - src = join_path(self.prefix, subdir, d) - dest = join_path(self.prefix, d) - if os.path.isdir(src) and not os.path.exists(dest): - os.symlink(join_path(subdir, d), dest) diff --git a/var/spack/packages/tcl/package.py b/var/spack/packages/tcl/package.py deleted file mode 100644 index 529adf7788..0000000000 --- a/var/spack/packages/tcl/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Tcl(Package): - """Tcl (Tool Command Language) is a very powerful but easy to - learn dynamic programming language, suitable for a very wide - range of uses, including web and desktop applications, - networking, administration, testing and many more. Open source - and business-friendly, Tcl is a mature yet evolving language - that is truly cross platform, easily deployed and highly - extensible.""" - homepage = "http://www.tcl.tk" - - version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f', - url="http://prdownloads.sourceforge.net/tcl/tcl8.6.3-src.tar.gz") - - depends_on('zlib') - - def install(self, spec, prefix): - with working_dir('unix'): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/the_silver_searcher/package.py b/var/spack/packages/the_silver_searcher/package.py deleted file mode 100644 index e4020b6766..0000000000 --- a/var/spack/packages/the_silver_searcher/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class TheSilverSearcher(Package): - """Fast recursive grep alternative""" - homepage = "http://geoff.greer.fm/ag/" - url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.30.0.tar.gz" - - version('0.30.0', '95e2e7859fab1156c835aff7413481db') - - depends_on('pcre') - depends_on('xz') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/thrift/package.py b/var/spack/packages/thrift/package.py deleted file mode 100644 index 0e15052f64..0000000000 --- a/var/spack/packages/thrift/package.py +++ /dev/null @@ -1,44 +0,0 @@ -from spack import * - -class Thrift(Package): - """The Apache Thrift software framework, for scalable cross-language services - development, combines a software stack with a code generation engine to build - services that work efficiently and seamlessly between C++, Java, Python, PHP, - Ruby, Erlang, Perl, Haskell, C#, Cocoa, JavaScript, Node.js, Smalltalk, OCaml - and Delphi and other languages.""" - - homepage = "http://thrift.apache.org" - url = "http://apache.mirrors.ionfish.org/thrift/0.9.2/thrift-0.9.2.tar.gz" - - version('0.9.2', '89f63cc4d0100912f4a1f8a9dee63678') - - extends("python") - - depends_on("autoconf") - depends_on("automake") - depends_on("bison") - depends_on("boost") - depends_on("flex") - depends_on("jdk") - depends_on("libtool") - depends_on("openssl") - depends_on("python") - - # Compilation fails for most languages, fortunately cpp installs fine - # All other languages (yes, including C) are omitted until someone needs them - def install(self, spec, prefix): - env["PY_PREFIX"] = prefix - env["JAVA_PREFIX"] = prefix - - configure("--prefix=%s" % prefix, - "--with-boost=%s" % spec['boost'].prefix, - "--with-c=no", - "--with-go=no", - "--with-python=yes", - "--with-lua=no", - "--with-php=no", - "--with-qt4=no", - "--enable-tests=no") - - make() - make("install") diff --git a/var/spack/packages/tk/package.py b/var/spack/packages/tk/package.py deleted file mode 100644 index 96736f6f95..0000000000 --- a/var/spack/packages/tk/package.py +++ /dev/null @@ -1,22 +0,0 @@ -from spack import * - -class Tk(Package): - """Tk is a graphical user interface toolkit that takes developing - desktop applications to a higher level than conventional - approaches. Tk is the standard GUI not only for Tcl, but for - many other dynamic languages, and can produce rich, native - applications that run unchanged across Windows, Mac OS X, Linux - and more.""" - homepage = "http://www.tcl.tk" - url = "http://prdownloads.sourceforge.net/tcl/tk8.6.3-src.tar.gz" - - version('src', '85ca4dbf4dcc19777fd456f6ee5d0221') - - depends_on("tcl") - - def install(self, spec, prefix): - with working_dir('unix'): - configure("--prefix=%s" % prefix, - "--with-tcl=%s" % spec['tcl'].prefix.lib) - make() - make("install") diff --git a/var/spack/packages/tmux/package.py b/var/spack/packages/tmux/package.py deleted file mode 100644 index 23d36db427..0000000000 --- a/var/spack/packages/tmux/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * - -class Tmux(Package): - """tmux is a terminal multiplexer. What is a terminal multiplexer? It lets - you switch easily between several programs in one terminal, detach them (they - keep running in the background) and reattach them to a different terminal. And - do a lot more. - """ - - homepage = "http://tmux.sourceforge.net" - url = "http://downloads.sourceforge.net/project/tmux/tmux/tmux-1.9/tmux-1.9a.tar.gz" - - version('1.9a', 'b07601711f96f1d260b390513b509a2d') - - depends_on('libevent') - depends_on('ncurses') - - def install(self, spec, prefix): - configure( - "--prefix=%s" % prefix, - "PKG_CONFIG_PATH=%s:%s" % (spec['libevent'].prefix, spec['ncurses'].prefix)) - - make() - make("install") diff --git a/var/spack/packages/tmuxinator/package.py b/var/spack/packages/tmuxinator/package.py deleted file mode 100644 index 26c061cbd6..0000000000 --- a/var/spack/packages/tmuxinator/package.py +++ /dev/null @@ -1,17 +0,0 @@ -from spack import * - -class Tmuxinator(Package): - """A session configuration creator and manager for tmux""" - homepage = "https://github.com/tmuxinator/tmuxinator" - url = "https://github.com/tmuxinator/tmuxinator" - - version('0.6.11', - git='https://github.com/tmuxinator/tmuxinator', - tag='v0.6.11') - - extends('ruby') - - def install(self, spec, prefix): - gem('build', 'tmuxinator.gemspec') - gem('install', 'tmuxinator-{}.gem'.format(self.version)) - diff --git a/var/spack/packages/trilinos/package.py b/var/spack/packages/trilinos/package.py deleted file mode 100644 index 7c43f796a4..0000000000 --- a/var/spack/packages/trilinos/package.py +++ /dev/null @@ -1,50 +0,0 @@ -from spack import * - - -class Trilinos(Package): - """ - The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented - software framework for the solution of large-scale, complex multi-physics engineering and scientific problems. - A unique design feature of Trilinos is its focus on packages. - """ - homepage = "https://trilinos.org/" - url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" - - version('12.2.1', '6161926ea247863c690e927687f83be9') - version('12.0.1', 'bd99741d047471e127b8296b2ec08017') - version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426') - version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') - version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') - - variant('mpi', default=True, description='Add a dependency on MPI and enables MPI dependent packages') - - # Everything should be compiled with -fpic - depends_on('blas') - depends_on('lapack') - depends_on('boost') - depends_on('netcdf') - depends_on('matio') - depends_on('glm') - depends_on('swig') - depends_on('mpi', when='+mpi') - - def install(self, spec, prefix): - - options = [ - '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', - '-DTrilinos_ENABLE_TESTS:BOOL=OFF', - '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF', - '-DBUILD_SHARED_LIBS:BOOL=ON', - '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix, - '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix - ] - if '+mpi' in spec: - mpi_options = ['-DTPL_ENABLE_MPI:BOOL=ON'] - options.extend(mpi_options) - - # -DCMAKE_INSTALL_PREFIX and all the likes... - options.extend(std_cmake_args) - with working_dir('spack-build', create=True): - cmake('..', *options) - make() - make('install') diff --git a/var/spack/packages/uncrustify/package.py b/var/spack/packages/uncrustify/package.py deleted file mode 100644 index d3f2d1b473..0000000000 --- a/var/spack/packages/uncrustify/package.py +++ /dev/null @@ -1,14 +0,0 @@ -from spack import * - -class Uncrustify(Package): - """Source Code Beautifier for C, C++, C#, ObjectiveC, D, Java, Pawn and VALA""" - - homepage = "http://uncrustify.sourceforge.net/" - url = "http://downloads.sourceforge.net/project/uncrustify/uncrustify/uncrustify-0.61/uncrustify-0.61.tar.gz" - - version('0.61', 'b6140106e74c64e831d0b1c4b6cf7727') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/util-linux/package.py b/var/spack/packages/util-linux/package.py deleted file mode 100644 index cb7ceabf57..0000000000 --- a/var/spack/packages/util-linux/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * -import os - -class UtilLinux(Package): - """Util-linux is a suite of essential utilities for any Linux system.""" - - homepage = "http://freecode.com/projects/util-linux" - url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.25/util-linux-2.25.tar.gz" - - version('2.25', 'f6d7fc6952ec69c4dc62c8d7c59c1d57') - - depends_on("python@2.7:") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib, - "--disable-use-tty-group") - - make() - make("install") diff --git a/var/spack/packages/vim/package.py b/var/spack/packages/vim/package.py deleted file mode 100644 index 4099b3257f..0000000000 --- a/var/spack/packages/vim/package.py +++ /dev/null @@ -1,83 +0,0 @@ -from spack import * - -class Vim(Package): - """Vim is a highly configurable text editor built to enable efficient text - editing. It is an improved version of the vi editor distributed with most - UNIX systems. Vim is often called a "programmer's editor," and so useful - for programming that many consider it an entire IDE. It's not just for - programmers, though. Vim is perfect for all kinds of text editing, from - composing email to editing configuration files. - """ - - homepage = "http://www.vim.org" - url = "ftp://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2" - list_url = "http://ftp.vim.org/pub/vim/unix/" - - version('7.4', '607e135c559be642f210094ad023dc65') - version('7.3', '5b9510a17074e2b37d8bb38ae09edbf2') - version('7.2', 'f0901284b338e448bfd79ccca0041254') - version('7.1', '44c6b4914f38d6f9aa959640b89da329') - version('7.0', '4ca69757678272f718b1041c810d82d8') - version('6.4', '774c14d93ce58674b3b2c880edd12d77') - version('6.3', '821fda8f14d674346b87e3ef9cb96389') - version('6.2', 'c49d360bbd069d00e2a57804f2a123d9') - version('6.1.405', 'd220ff58f2c72ed606e6d0297c2f2a7c') - version('6.1', '7fd0f915adc7c0dab89772884268b030') - version('6.0', '9d9ca84d489af6b3f54639dd97af3774') - - feature_sets = ('huge', 'big', 'normal', 'small', 'tiny') - for fs in feature_sets: - variant(fs, default=False, description="Use '%s' feature set" % fs) - - variant('python', default=False, description="build with Python") - depends_on('python', when='+python') - - variant('ruby', default=False, description="build with Ruby") - depends_on('ruby', when='+ruby') - - variant('cscope', default=False, description="build with cscope support") - depends_on('cscope', when='+cscope') - - variant('gui', default=False, description="build with gui (gvim)") - # virtual dependency? - - def install(self, spec, prefix): - feature_set = None - for fs in self.feature_sets: - if "+" + fs in spec: - if feature_set is not None: - tty.error("Only one feature set allowed, both %s and %s specified" - % (feature_set, fs)) - feature_set = fs - if '+gui' in spec: - if feature_set is not None: - if feature_set is not 'huge': - tty.error("+gui variant requires 'huge' feature set, %s was specified" - % feature_set) - feature_set = 'huge' - if feature_set is None: - feature_set = 'normal' - - configure_args = [] - configure_args.append("--with-features=" + feature_set) - - if '+python' in spec: - configure_args.append("--enable-pythoninterp=yes") - else: - configure_args.append("--enable-pythoninterp=dynamic") - - if '+ruby' in spec: - configure_args.append("--enable-rubyinterp=yes") - else: - configure_args.append("--enable-rubyinterp=dynamic") - - if '+gui' in spec: - configure_args.append("--enable-gui=auto") - - if '+cscope' in spec: - configure_args.append("--enable-cscope") - - configure("--prefix=%s" % prefix, *configure_args) - - make() - make("install") diff --git a/var/spack/packages/vtk/package.py b/var/spack/packages/vtk/package.py deleted file mode 100644 index 4a27a8fedb..0000000000 --- a/var/spack/packages/vtk/package.py +++ /dev/null @@ -1,40 +0,0 @@ -from spack import * - -class Vtk(Package): - """The Visualization Toolkit (VTK) is an open-source, freely - available software system for 3D computer graphics, image - processing and visualization. """ - homepage = "http://www.vtk.org" - url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz" - - version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d') - - depends_on("qt") - - def install(self, spec, prefix): - with working_dir('spack-build', create=True): - cmake_args = [ - "..", - "-DBUILD_SHARED_LIBS=ON", - # Disable wrappers for other languages. - "-DVTK_WRAP_PYTHON=OFF", - "-DVTK_WRAP_JAVA=OFF", - "-DVTK_WRAP_TCL=OFF"] - cmake_args.extend(std_cmake_args) - - # Enable Qt support here. - cmake_args.extend([ - "-DQT_QMAKE_EXECUTABLE:PATH=%s/qmake" % spec['qt'].prefix.bin, - "-DVTK_Group_Qt:BOOL=ON", - # Ignore webkit because it's hard to build w/Qt - "-DVTK_Group_Qt=OFF", - "-DModule_vtkGUISupportQt:BOOL=ON", - "-DModule_vtkGUISupportQtOpenGL:BOOL=ON" - ]) - - if spec['qt'].satisfies('@5'): - cmake_args.append("-DVTK_QT_VERSION:STRING=5") - - cmake(*cmake_args) - make() - make("install") diff --git a/var/spack/packages/wget/package.py b/var/spack/packages/wget/package.py deleted file mode 100644 index c8fd025122..0000000000 --- a/var/spack/packages/wget/package.py +++ /dev/null @@ -1,21 +0,0 @@ -from spack import * - -class Wget(Package): - """GNU Wget is a free software package for retrieving files using - HTTP, HTTPS and FTP, the most widely-used Internet protocols. It - is a non-interactive commandline tool, so it may easily be called - from scripts, cron jobs, terminals without X-Windows support, - etc.""" - - homepage = "http://www.gnu.org/software/wget/" - url = "http://ftp.gnu.org/gnu/wget/wget-1.16.tar.xz" - - version('1.16', 'fe102975ab3a6c049777883f1bb9ad07') - - depends_on("openssl") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--with-ssl=openssl") - make() - make("install") diff --git a/var/spack/packages/wx/package.py b/var/spack/packages/wx/package.py deleted file mode 100644 index 1813a8c8a5..0000000000 --- a/var/spack/packages/wx/package.py +++ /dev/null @@ -1,24 +0,0 @@ -from spack import * - -class Wx(Package): - """wxWidgets is a C++ library that lets developers create - applications for Windows, Mac OS X, Linux and other platforms - with a single code base. It has popular language bindings for - Python, Perl, Ruby and many other languages, and unlike other - cross-platform toolkits, wxWidgets gives applications a truly - native look and feel because it uses the platform's native API - rather than emulating the GUI. It's also extensive, free, - open-source and mature.""" - homepage = "http://www.wxwidgets.org/" - - version('2.8.12', '2fa39da14bc06ea86fe902579fedc5b1', - url="https://sourceforge.net/projects/wxwindows/files/2.8.12/wxWidgets-2.8.12.tar.gz") - version('3.0.1', 'dad1f1cd9d4c370cbc22700dc492da31', - url="https://sourceforge.net/projects/wxwindows/files/3.0.1/wxWidgets-3.0.1.tar.bz2") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers") - - make(parallel=False) - make("install") - diff --git a/var/spack/packages/wxpropgrid/package.py b/var/spack/packages/wxpropgrid/package.py deleted file mode 100644 index 790cead517..0000000000 --- a/var/spack/packages/wxpropgrid/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Wxpropgrid(Package): - """wxPropertyGrid is a property sheet control for wxWidgets. In - other words, it is a specialized two-column grid for editing - properties such as strings, numbers, flagsets, string arrays, - and colours.""" - homepage = "http://wxpropgrid.sourceforge.net/" - url = "http://prdownloads.sourceforge.net/wxpropgrid/wxpropgrid-1.4.15-src.tar.gz" - - version('1.4.15', 'f44b5cd6fd60718bacfabbf7994f1e93') - - depends_on("wx") - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wx'].prefix.bin, "--enable-unicode") - - make() - make("install") - diff --git a/var/spack/packages/xcb-proto/package.py b/var/spack/packages/xcb-proto/package.py deleted file mode 100644 index 17a94bd892..0000000000 --- a/var/spack/packages/xcb-proto/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - -class XcbProto(Package): - """Protocol for libxcb""" - - homepage = "http://xcb.freedesktop.org/" - url = "http://xcb.freedesktop.org/dist/xcb-proto-1.11.tar.gz" - - version('1.11', 'c8c6cb72c84f58270f4db1f39607f66a') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/xz/package.py b/var/spack/packages/xz/package.py deleted file mode 100644 index ba6c9733a7..0000000000 --- a/var/spack/packages/xz/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Xz(Package): - """XZ Utils is free general-purpose data compression software with - high compression ratio. XZ Utils were written for POSIX-like - systems, but also work on some not-so-POSIX systems. XZ Utils are - the successor to LZMA Utils.""" - homepage = "http://tukaani.org/xz/" - url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2" - - version('5.2.0', '867cc8611760240ebf3440bd6e170bb9', - url = 'http://tukaani.org/xz/xz-5.2.0.tar.bz2') - version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af', - url = 'http://tukaani.org/xz/xz-5.2.2.tar.bz2') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") - diff --git a/var/spack/packages/yasm/package.py b/var/spack/packages/yasm/package.py deleted file mode 100644 index d3a695b16d..0000000000 --- a/var/spack/packages/yasm/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Yasm(Package): - """Yasm is a complete rewrite of the NASM-2.11.06 assembler. It - supports the x86 and AMD64 instruction sets, accepts NASM and - GAS assembler syntaxes and outputs binary, ELF32 and ELF64 - object formats.""" - homepage = "http://yasm.tortall.net" - url = "http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz" - - version('1.3.0', 'fc9e586751ff789b34b1f21d572d96af') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - make() - make("install") diff --git a/var/spack/packages/zeromq/package.py b/var/spack/packages/zeromq/package.py deleted file mode 100644 index b5a1e3d4cd..0000000000 --- a/var/spack/packages/zeromq/package.py +++ /dev/null @@ -1,20 +0,0 @@ -from spack import * - -class Zeromq(Package): - """ The ZMQ networking/concurrency library and core API """ - homepage = "http://zguide.zeromq.org/" - url = "http://download.zeromq.org/zeromq-4.1.2.tar.gz" - - version('4.1.2', '159c0c56a895472f02668e692d122685') - version('4.1.1', '0a4b44aa085644f25c177f79dc13f253') - version('4.0.7', '9b46f7e7b0704b83638ef0d461fd59ab') - version('4.0.6', 'd47dd09ed7ae6e7fd6f9a816d7f5fdf6') - version('4.0.5', '73c39f5eb01b9d7eaf74a5d899f1d03d') - - depends_on("libsodium") - - def install(self, spec, prefix): - configure("--with-libsodium","--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/zlib/package.py b/var/spack/packages/zlib/package.py deleted file mode 100644 index 2770f781ac..0000000000 --- a/var/spack/packages/zlib/package.py +++ /dev/null @@ -1,18 +0,0 @@ -from spack import * - -class Zlib(Package): - """zlib is designed to be a free, general-purpose, legally unencumbered -- - that is, not covered by any patents -- lossless data-compression library for - use on virtually any computer hardware and operating system. - """ - - homepage = "http://zlib.net" - url = "http://zlib.net/zlib-1.2.8.tar.gz" - - version('1.2.8', '44d667c142d7cda120332623eab69f40') - - def install(self, spec, prefix): - configure("--prefix=%s" % prefix) - - make() - make("install") diff --git a/var/spack/packages/zsh/package.py b/var/spack/packages/zsh/package.py deleted file mode 100644 index 99ef9de2e5..0000000000 --- a/var/spack/packages/zsh/package.py +++ /dev/null @@ -1,16 +0,0 @@ -from spack import * - -class Zsh(Package): - """ The ZSH shell """ - homepage = "http://www.zsh.org" - url = "http://www.zsh.org/pub/zsh-5.0.8.tar.bz2" - - version('5.0.8', 'e6759e8dd7b714d624feffd0a73ba0fe') - - depends_on("pcre") - - def install(self, spec, prefix): - configure('--prefix=%s' % prefix) - - make() - make("install") diff --git a/var/spack/repos/builtin.mock/packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py new file mode 100644 index 0000000000..fa63c08df0 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/a/package.py @@ -0,0 +1,12 @@ +from spack import * + +class A(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/a-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py new file mode 100644 index 0000000000..cb88aa2157 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/b/package.py @@ -0,0 +1,12 @@ +from spack import * + +class B(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/b-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/c/package.py b/var/spack/repos/builtin.mock/packages/c/package.py new file mode 100644 index 0000000000..f51b913fa9 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/c/package.py @@ -0,0 +1,12 @@ +from spack import * + +class C(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/c-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/callpath/package.py b/var/spack/repos/builtin.mock/packages/callpath/package.py new file mode 100644 index 0000000000..5b6b70ba2a --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/callpath/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Callpath(Package): + homepage = "https://github.com/tgamblin/callpath" + url = "http://github.com/tgamblin/callpath-1.0.tar.gz" + + version(0.8, 'foobarbaz') + version(0.9, 'foobarbaz') + version(1.0, 'foobarbaz') + + depends_on("dyninst") + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin.mock/packages/direct_mpich/package.py b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py new file mode 100644 index 0000000000..2ced82521b --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py @@ -0,0 +1,36 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class DirectMpich(Package): + homepage = "http://www.example.com" + url = "http://www.example.com/direct_mpich-1.0.tar.gz" + + version('1.0', 'foobarbaz') + + depends_on('mpich') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/dyninst/package.py b/var/spack/repos/builtin.mock/packages/dyninst/package.py new file mode 100644 index 0000000000..7998578da1 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/dyninst/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Dyninst(Package): + homepage = "https://paradyn.org" + url = "http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz" + + version('8.2', 'cxyzab', + url='http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz') + version('8.1.2', 'bcxyza', + url='http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz') + version('8.1.1', 'abcxyz', + url='http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz') + + depends_on("libelf") + depends_on("libdwarf") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin.mock/packages/e/package.py b/var/spack/repos/builtin.mock/packages/e/package.py new file mode 100644 index 0000000000..76c6b64c7f --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/e/package.py @@ -0,0 +1,12 @@ +from spack import * + +class E(Package): + """Simple package with no dependencies""" + + homepage = "http://www.example.com" + url = "http://www.example.com/e-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/fake/package.py b/var/spack/repos/builtin.mock/packages/fake/package.py new file mode 100644 index 0000000000..fb3c2bdd2e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/fake/package.py @@ -0,0 +1,34 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Fake(Package): + homepage = "http://www.fake-spack-example.org" + url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz" + + version('1.0', 'foobarbaz') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/git-test/package.py b/var/spack/repos/builtin.mock/packages/git-test/package.py new file mode 100644 index 0000000000..689185463c --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/git-test/package.py @@ -0,0 +1,10 @@ +from spack import * + +class GitTest(Package): + """Mock package that uses git for fetching.""" + homepage = "http://www.git-fetch-example.com" + + version('git', git='to-be-filled-in-by-test') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/hg-test/package.py b/var/spack/repos/builtin.mock/packages/hg-test/package.py new file mode 100644 index 0000000000..462f1e4c3a --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/hg-test/package.py @@ -0,0 +1,10 @@ +from spack import * + +class HgTest(Package): + """Test package that does fetching with mercurial.""" + homepage = "http://www.hg-fetch-example.com" + + version('hg', hg='to-be-filled-in-by-test') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py new file mode 100644 index 0000000000..daf8b4b166 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class IndirectMpich(Package): + """Test case for a package that depends on MPI and one of its + dependencies requires a *particular version* of MPI. + """ + + homepage = "http://www.example.com" + url = "http://www.example.com/indirect_mpich-1.0.tar.gz" + + version(1.0, 'foobarbaz') + + depends_on('mpi') + depends_on('direct_mpich') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/libdwarf/package.py b/var/spack/repos/builtin.mock/packages/libdwarf/package.py new file mode 100644 index 0000000000..0b8df04cfb --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/libdwarf/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +# Only build certain parts of dwarf because the other ones break. +dwarf_dirs = ['libdwarf', 'dwarfdump2'] + +class Libdwarf(Package): + homepage = "http://www.prevanders.net/dwarf.html" + url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" + list_url = homepage + + version(20130729, "64b42692e947d5180e162e46c689dfbf") + version(20130207, 'foobarbaz') + version(20111030, 'foobarbaz') + version(20070703, 'foobarbaz') + + depends_on("libelf") + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/libelf/package.py b/var/spack/repos/builtin.mock/packages/libelf/package.py new file mode 100644 index 0000000000..94c8f942cd --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/libelf/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libelf(Package): + homepage = "http://www.mr511.de/software/english.html" + url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" + + version('0.8.13', '4136d7b4c04df68b686570afa26988ac') + version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') + version('0.8.10', '9db4d36c283d9790d8fa7df1f4d7b4d9') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-shared", + "--disable-dependency-tracking", + "--disable-debug") + make() + + # The mkdir commands in libelf's intsall can fail in parallel + make("install", parallel=False) diff --git a/var/spack/repos/builtin.mock/packages/mpich/package.py b/var/spack/repos/builtin.mock/packages/mpich/package.py new file mode 100644 index 0000000000..f77d3efc5d --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/mpich/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpich(Package): + homepage = "http://www.mpich.org" + url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" + list_url = "http://www.mpich.org/static/downloads/" + list_depth = 2 + + variant('debug', default=False, + description="Compile MPICH with debug flags.") + + version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + version('3.0.3', 'foobarbaz') + version('3.0.2', 'foobarbaz') + version('3.0.1', 'foobarbaz') + version('3.0', 'foobarbaz') + + provides('mpi@:3', when='@3:') + provides('mpi@:1', when='@:1') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/mpich2/package.py b/var/spack/repos/builtin.mock/packages/mpich2/package.py new file mode 100644 index 0000000000..827b94c8a4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/mpich2/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpich2(Package): + homepage = "http://www.mpich.org" + url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz" + list_url = "http://www.mpich.org/static/downloads/" + list_depth = 2 + + version('1.5', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + version('1.4', 'foobarbaz') + version('1.3', 'foobarbaz') + version('1.2', 'foobarbaz') + version('1.1', 'foobarbaz') + version('1.0', 'foobarbaz') + + provides('mpi@:2.0') + provides('mpi@:2.1', when='@1.1:') + provides('mpi@:2.2', when='@1.2:') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin.mock/packages/mpileaks/package.py b/var/spack/repos/builtin.mock/packages/mpileaks/package.py new file mode 100644 index 0000000000..3989f1b452 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/mpileaks/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpileaks(Package): + homepage = "http://www.llnl.gov" + url = "http://www.llnl.gov/mpileaks-1.0.tar.gz" + + version(1.0, 'foobarbaz') + version(2.1, 'foobarbaz') + version(2.2, 'foobarbaz') + version(2.3, 'foobarbaz') + + variant('debug', default=False, description='Debug variant') + variant('opt', default=False, description='Optimized variant') + + depends_on("mpi") + depends_on("callpath") + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py new file mode 100644 index 0000000000..75b1606ffc --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py @@ -0,0 +1,143 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Multimethod(Package): + """This package is designed for use with Spack's multimethod test. + It has a bunch of test cases for the @when decorator that the + test uses. + """ + + homepage = 'http://www.example.com/' + url = 'http://www.example.com/example-1.0.tar.gz' + + # + # These functions are only valid for versions 1, 2, and 3. + # + @when('@1.0') + def no_version_2(self): + return 1 + + @when('@3.0') + def no_version_2(self): + return 3 + + @when('@4.0') + def no_version_2(self): + return 4 + + + # + # These functions overlap, so there is ambiguity, but we'll take + # the first one. + # + @when('@:4') + def version_overlap(self): + return 1 + + @when('@2:') + def version_overlap(self): + return 2 + + + # + # More complicated case with cascading versions. + # + def mpi_version(self): + return 0 + + @when('^mpi@3:') + def mpi_version(self): + return 3 + + @when('^mpi@2:') + def mpi_version(self): + return 2 + + @when('^mpi@1:') + def mpi_version(self): + return 1 + + + # + # Use these to test whether the default method is called when no + # match is found. This also tests whether we can switch methods + # on compilers + # + def has_a_default(self): + return 'default' + + @when('%gcc') + def has_a_default(self): + return 'gcc' + + @when('%intel') + def has_a_default(self): + return 'intel' + + + + # + # Make sure we can switch methods on different architectures + # + @when('=x86_64') + def different_by_architecture(self): + return 'x86_64' + + @when('=ppc64') + def different_by_architecture(self): + return 'ppc64' + + @when('=ppc32') + def different_by_architecture(self): + return 'ppc32' + + @when('=arm64') + def different_by_architecture(self): + return 'arm64' + + + # + # Make sure we can switch methods on different dependencies + # + @when('^mpich') + def different_by_dep(self): + return 'mpich' + + @when('^zmpi') + def different_by_dep(self): + return 'zmpi' + + + # + # Make sure we can switch on virtual dependencies + # + def different_by_virtual_dep(self): + return 1 + + @when('^mpi@2:') + def different_by_virtual_dep(self): + return 2 diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py new file mode 100644 index 0000000000..ef0587588e --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py @@ -0,0 +1,18 @@ +from spack import * + +class OptionalDepTest2(Package): + """Depends on the optional-dep-test package""" + + homepage = "http://www.example.com" + url = "http://www.example.com/optional-dep-test-2-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + variant('odt', default=False) + variant('mpi', default=False) + + depends_on('optional-dep-test', when='+odt') + depends_on('optional-dep-test+mpi', when='+mpi') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py new file mode 100644 index 0000000000..e6cb3bd6e7 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py @@ -0,0 +1,17 @@ +from spack import * + +class OptionalDepTest3(Package): + """Depends on the optional-dep-test package""" + + homepage = "http://www.example.com" + url = "http://www.example.com/optional-dep-test-3-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + + variant('var', default=False) + + depends_on('a', when='~var') + depends_on('b', when='+var') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py new file mode 100644 index 0000000000..bb57576ca9 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py @@ -0,0 +1,29 @@ +from spack import * + +class OptionalDepTest(Package): + """Description""" + + homepage = "http://www.example.com" + url = "http://www.example.com/optional_dep_test-1.0.tar.gz" + + version('1.0', '0123456789abcdef0123456789abcdef') + version('1.1', '0123456789abcdef0123456789abcdef') + + variant('a', default=False) + variant('f', default=False) + variant('mpi', default=False) + + depends_on('a', when='+a') + depends_on('b', when='@1.1') + depends_on('c', when='%intel') + depends_on('d', when='%intel@64.1') + depends_on('e', when='%clang@34:40') + + depends_on('f', when='+f') + depends_on('g', when='^f') + depends_on('mpi', when='^g') + + depends_on('mpi', when='+mpi') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/svn-test/package.py b/var/spack/repos/builtin.mock/packages/svn-test/package.py new file mode 100644 index 0000000000..ba4d5522b4 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/svn-test/package.py @@ -0,0 +1,10 @@ +from spack import * + +class SvnTest(Package): + """Mock package that uses svn for fetching.""" + url = "http://www.example.com/svn-test-1.0.tar.gz" + + version('svn', 'to-be-filled-in-by-test') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py new file mode 100644 index 0000000000..c4db9f5f07 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class TrivialInstallTestPackage(Package): + """This package is a stub with a trivial install method. It allows us + to test the install and uninstall logic of spack.""" + homepage = "http://www.example.com/trivial_install" + url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz" + + version('1.0', 'foobarbaz') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + make() + make('install') diff --git a/var/spack/repos/builtin.mock/packages/zmpi/package.py b/var/spack/repos/builtin.mock/packages/zmpi/package.py new file mode 100644 index 0000000000..8c6ceda6d3 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/zmpi/package.py @@ -0,0 +1,39 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Zmpi(Package): + """This is a fake MPI package used to demonstrate virtual package providers + with dependencies.""" + homepage = "http://www.spack-fake-zmpi.org" + url = "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz" + + version('1.0', 'foobarbaz') + + provides('mpi@:10.0') + depends_on('fake') + + def install(self, spec, prefix): + pass diff --git a/var/spack/repos/builtin.mock/repo.yaml b/var/spack/repos/builtin.mock/repo.yaml new file mode 100644 index 0000000000..30b068da13 --- /dev/null +++ b/var/spack/repos/builtin.mock/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: builtin.mock diff --git a/var/spack/repos/builtin/packages/ImageMagick/package.py b/var/spack/repos/builtin/packages/ImageMagick/package.py new file mode 100644 index 0000000000..753ea80ca6 --- /dev/null +++ b/var/spack/repos/builtin/packages/ImageMagick/package.py @@ -0,0 +1,37 @@ +from spack import * + +class Imagemagick(Package): + """ImageMagick is a image processing library""" + homepage = "http://www.imagemagic.org" + + #------------------------------------------------------------------------- + # ImageMagick does not keep around anything but *-10 versions, so + # this URL may change. If you want the bleeding edge, you can + # uncomment it and see if it works but you may need to try to + # fetch a newer version (-6, -7, -8, -9, etc.) or you can stick + # wtih the older, stable, archived -10 versions below. + # + # TODO: would be nice if spack had a way to recommend avoiding a + # TODO: bleeding edge version, but not comment it out. + # ------------------------------------------------------------------------- + # version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b', + # url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2") + + #------------------------------------------------------------------------- + # *-10 versions are archived, so these versions should fetch reliably. + # ------------------------------------------------------------------------- + version('6.8.9-10', 'aa050bf9785e571c956c111377bbf57c', + url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download") + + depends_on('libtool') + depends_on('jpeg') + depends_on('libpng') + depends_on('freetype') + depends_on('fontconfig') + depends_on('libtiff') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/Mitos/package.py b/var/spack/repos/builtin/packages/Mitos/package.py new file mode 100644 index 0000000000..e312da3ffc --- /dev/null +++ b/var/spack/repos/builtin/packages/Mitos/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Mitos(Package): + """Mitos is a library and a tool for collecting sampled memory + performance data to view with MemAxes""" + + homepage = "https://github.com/scalability-llnl/Mitos" + url = "https://github.com/scalability-llnl/Mitos" + + version('0.9.1', 'c6cb57f3cae54f5157affd97ef7ef79e', git='https://github.com/scalability-llnl/Mitos.git', tag='v0.9.1') + + depends_on('dyninst@8.2.1:') + depends_on('hwloc') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py new file mode 100644 index 0000000000..2e6f65a742 --- /dev/null +++ b/var/spack/repos/builtin/packages/R/package.py @@ -0,0 +1,33 @@ +from spack import * + +class R(Package): + """R is 'GNU S', a freely available language and environment for + statistical computing and graphics which provides a wide va + riety of statistical and graphical techniques: linear and + nonlinear modelling, statistical tests, time series analysis, + classification, clustering, etc. Please consult the R project + homepage for further information.""" + homepage = "http://www.example.com" + url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz" + + version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74') + + depends_on("readline") + depends_on("ncurses") + depends_on("icu") + depends_on("glib") + depends_on("zlib") + depends_on("libtiff") + depends_on("jpeg") + depends_on("cairo") + depends_on("pango") + depends_on("freetype") + depends_on("tcl") + depends_on("tk") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-R-shlib", + "--enable-BLAS-shlib") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch b/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch new file mode 100644 index 0000000000..1adf0cf721 --- /dev/null +++ b/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch @@ -0,0 +1,20 @@ +--- SAMRAI/Makefile.in 2013-05-31 11:04:32.000000000 -0700 ++++ SAMRAI/Makefile.in.notools 2014-05-30 10:31:15.135979900 -0700 +@@ -8,7 +8,7 @@ + ## + ######################################################################### + +-default: library tools ++default: library + + SAMRAI = @top_srcdir@ + SUBDIR = . +@@ -135,7 +135,7 @@ + done + $(MAKE) archive_remove_obj_names + +-install: library tools ++install: library + $(INSTALL) -d -m 755 $(INSTDIR)/config + $(INSTALL) -d -m 755 $(INSTDIR)/lib + $(INSTALL) -d -m 755 $(INSTDIR)/bin diff --git a/var/spack/repos/builtin/packages/SAMRAI/package.py b/var/spack/repos/builtin/packages/SAMRAI/package.py new file mode 100644 index 0000000000..eef041f0d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/SAMRAI/package.py @@ -0,0 +1,53 @@ +from spack import * + +class Samrai(Package): + """SAMRAI (Structured Adaptive Mesh Refinement Application Infrastructure) + is an object-oriented C++ software library enables exploration of numerical, + algorithmic, parallel computing, and software issues associated with applying + structured adaptive mesh refinement (SAMR) technology in large-scale parallel + application development. + """ + homepage = "https://computation.llnl.gov/project/SAMRAI/" + url = "https://computation.llnl.gov/project/SAMRAI/download/SAMRAI-v3.9.1.tar.gz" + list_url = homepage + + version('3.9.1', '232d04d0c995f5abf20d94350befd0b2') + version('3.7.3', '12d574eacadf8c9a70f1bb4cd1a69df6') + version('3.7.2', 'f6a716f171c9fdbf3cb12f71fa6e2737') + version('3.6.3-beta', 'ef0510bf2893042daedaca434e5ec6ce') + version('3.5.2-beta', 'd072d9d681eeb9ada15ce91bea784274') + version('3.5.0-beta', '1ad18a319fc573e12e2b1fbb6f6b0a19') + version('3.4.1-beta', '00814cbee2cb76bf8302aff56bbb385b') + version('3.3.3-beta', '1db3241d3e1cab913dc310d736c34388') + version('3.3.2-beta', 'e598a085dab979498fcb6c110c4dd26c') + version('2.4.4', '04fb048ed0efe7c531ac10c81cc5f6ac') + + depends_on("mpi") + depends_on("zlib") + depends_on("hdf5") + depends_on("boost") + + # don't build tools with gcc + patch('no-tool-build.patch', when='%gcc') + + # TODO: currently hard-coded to use openmpi - be careful! + def install(self, spec, prefix): + mpi = next(m for m in ('openmpi', 'mpich', 'mvapich') + if m in spec) + + configure( + "--prefix=%s" % prefix, + "--with-CXX=%s" % spec[mpi].prefix.bin + "/mpic++", + "--with-CC=%s" % spec[mpi].prefix.bin + "/mpicc", + "--with-hdf5=%s" % spec['hdf5'].prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-zlib=%s" % spec['zlib'].prefix, + "--without-blas", + "--without-lapack", + "--with-hypre=no", + "--with-petsc=no", + "--enable-opt", + "--disable-debug") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py new file mode 100644 index 0000000000..45dcc7c0e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/activeharmony/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Activeharmony(Package): + """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application).""" + homepage = "http://www.dyninst.org/harmony" + url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz" + + version('4.5', 'caee5b864716d376e2c25d739251b2a9') + + def install(self, spec, prefix): + make("CFLAGS=-O3") + make("install", 'PREFIX=%s' % prefix) + +from spack import * + diff --git a/var/spack/repos/builtin/packages/adept-utils/package.py b/var/spack/repos/builtin/packages/adept-utils/package.py new file mode 100644 index 0000000000..e4a2e1523f --- /dev/null +++ b/var/spack/repos/builtin/packages/adept-utils/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class AdeptUtils(Package): + """Utility libraries for LLNL performance tools.""" + + homepage = "https://github.com/scalability-llnl/adept-utils" + url = "https://github.com/scalability-llnl/adept-utils/archive/v1.0.tar.gz" + + version('1.0.1', '731a310717adcb004d9d195130efee7d') + version('1.0', '5c6cd9badce56c945ac8551e34804397') + + depends_on("boost") + depends_on("mpi") + + def install(self, spec, prefix): + cmake(*std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/apex/package.py b/var/spack/repos/builtin/packages/apex/package.py new file mode 100644 index 0000000000..6404d5208a --- /dev/null +++ b/var/spack/repos/builtin/packages/apex/package.py @@ -0,0 +1,34 @@ +from spack import * +from spack.util.environment import * + +class Apex(Package): + homepage = "http://github.com/khuck/xpress-apex" + #url = "http://github.com/khuck/xpress-apex/archive/v0.1-release-candidate.tar.gz" + url = "http://github.com/khuck/xpress-apex" + + #version('0.1', '6e039c224387348296739f6bf360d081') + #version('master', branch='master', git='https://github.com/khuck/xpress-apex.git') + version('2015-10-21', git='https://github.com/khuck/xpress-apex.git', commit='d2e66ddde689120472fc57fc546d8cd80aab745c') + + depends_on("binutils+libiberty") + depends_on("boost@1.54:") + depends_on("cmake@2.8.12:") + depends_on("activeharmony@4.5:") + depends_on("ompt-openmp") + + def install(self, spec, prefix): + + path=get_path("PATH") + path.remove(spec["binutils"].prefix.bin) + path_set("PATH", path) + with working_dir("build", create=True): + cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix, + '-DUSE_BFD=TRUE', + '-DBFD_ROOT=%s' % spec['binutils'].prefix, + '-DUSE_ACTIVEHARMONY=TRUE', + '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix, + '-DUSE_OMPT=TRUE', + '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix, + '..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/arpack/package.py b/var/spack/repos/builtin/packages/arpack/package.py new file mode 100644 index 0000000000..8c67c536f3 --- /dev/null +++ b/var/spack/repos/builtin/packages/arpack/package.py @@ -0,0 +1,41 @@ +from spack import * +import os +import shutil + +class Arpack(Package): + """A collection of Fortran77 subroutines designed to solve large scale + eigenvalue problems. + """ + homepage = "http://www.caam.rice.edu/software/ARPACK/" + url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz" + + version('96', 'fffaa970198b285676f4156cebc8626e') + + depends_on('blas') + depends_on('lapack') + + def patch(self): + # Filter the cray makefile to make a spack one. + shutil.move('ARMAKES/ARmake.CRAY', 'ARmake.inc') + makefile = FileFilter('ARmake.inc') + + # Be sure to use Spack F77 wrapper + makefile.filter('^FC.*', 'FC = f77') + makefile.filter('^FFLAGS.*', 'FFLAGS = -O2 -g') + + # Set up some variables. + makefile.filter('^PLAT.*', 'PLAT = ') + makefile.filter('^home.*', 'home = %s' % os.getcwd()) + makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix) + makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix) + + # build the library in our own prefix. + makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/libarpack.a' % os.getcwd()) + + + def install(self, spec, prefix): + with working_dir('SRC'): + make('all') + + mkdirp(prefix.lib) + install('libarpack.a', prefix.lib) diff --git a/var/spack/repos/builtin/packages/asciidoc/package.py b/var/spack/repos/builtin/packages/asciidoc/package.py new file mode 100644 index 0000000000..828f3b3f4f --- /dev/null +++ b/var/spack/repos/builtin/packages/asciidoc/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Asciidoc(Package): + """ A presentable text document format for writing articles, UNIX man + pages and other small to medium sized documents.""" + homepage = "http://asciidoc.org" + url = "http://downloads.sourceforge.net/project/asciidoc/asciidoc/8.6.9/asciidoc-8.6.9.tar.gz" + + version('8.6.9', 'c59018f105be8d022714b826b0be130a') + + depends_on('libxml2') + depends_on('libxslt') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py new file mode 100644 index 0000000000..769805b227 --- /dev/null +++ b/var/spack/repos/builtin/packages/atk/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Atk(Package): + """ATK provides the set of accessibility interfaces that are + implemented by other toolkits and applications. Using the ATK + interfaces, accessibility tools have full access to view and + control running applications.""" + homepage = "https://developer.gnome.org/atk/" + url = "http://ftp.gnome.org/pub/gnome/sources/atk/2.14/atk-2.14.0.tar.xz" + + version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b') + + depends_on("glib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py new file mode 100644 index 0000000000..fc683363a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -0,0 +1,60 @@ +from spack import * +from spack.util.executable import Executable +import os + +class Atlas(Package): + """ + Automatically Tuned Linear Algebra Software, generic shared + ATLAS is an approach for the automatic generation and optimization of + numerical software. Currently ATLAS supplies optimized versions for the + complete set of linear algebra kernels known as the Basic Linear Algebra + Subroutines (BLAS), and a subset of the linear algebra routines in the + LAPACK library. + """ + homepage = "http://math-atlas.sourceforge.net/" + + version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825', + url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download') + version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', + url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2') + + # TODO: make this provide BLAS once it works better. Create a way + # TODO: to mark "beta" packages and require explicit invocation. + + # provides('blas') + + + def patch(self): + # Disable thraed check. LLNL's environment does not allow + # disabling of CPU throttling in a way that ATLAS actually + # understands. + filter_file(r'^\s+if \(thrchk\) exit\(1\);', 'if (0) exit(1);', + 'CONFIG/src/config.c') + # TODO: investigate a better way to add the check back in + # TODO: using, say, MSRs. Or move this to a variant. + + @when('@:3.10') + def install(self, spec, prefix): + with working_dir('ATLAS-Build', create=True): + configure = Executable('../configure') + configure('--prefix=%s' % prefix, '-C', 'ic', 'cc', '-C', 'if', 'f77', "--dylibs") + make() + make('check') + make('ptcheck') + make('time') + make("install") + + + def install(self, spec, prefix): + with working_dir('ATLAS-Build', create=True): + configure = Executable('../configure') + configure('--incdir=%s' % prefix.include, + '--libdir=%s' % prefix.lib, + '--cc=cc', + "--shared") + + make() + make('check') + make('ptcheck') + make('time') + make("install") diff --git a/var/spack/repos/builtin/packages/autoconf/package.py b/var/spack/repos/builtin/packages/autoconf/package.py new file mode 100644 index 0000000000..5189faf054 --- /dev/null +++ b/var/spack/repos/builtin/packages/autoconf/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Autoconf(Package): + """Autoconf -- system configuration part of autotools""" + homepage = "https://www.gnu.org/software/autoconf/" + url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz" + + version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/automaded/package.py b/var/spack/repos/builtin/packages/automaded/package.py new file mode 100644 index 0000000000..9fbd93e3b3 --- /dev/null +++ b/var/spack/repos/builtin/packages/automaded/package.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Automaded(Package): + """AutomaDeD (Automata-based Debugging for Dissimilar parallel + tasks) is a tool for automatic diagnosis of performance and + correctness problems in MPI applications. It creates + control-flow models of each MPI process and, when a failure + occurs, these models are leveraged to find the origin of + problems automatically. MPI calls are intercepted (using + wrappers) to create the models. When an MPI application hangs, + AutomaDeD creates a progress-dependence graph that helps + finding the process (or group of processes) that caused the hang. + """ + + homepage = "https://github.com/scalability-llnl/AutomaDeD" + url = "https://github.com/scalability-llnl/AutomaDeD/archive/v1.0.tar.gz" + + version('1.0', '16a3d4def2c4c77d0bc4b21de8b3ab03') + + depends_on('mpi') + depends_on('boost') + depends_on('callpath') + + def install(self, spec, prefix): + cmake("-DSTATE_TRACKER_WITH_CALLPATH=ON", *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py new file mode 100644 index 0000000000..9115822730 --- /dev/null +++ b/var/spack/repos/builtin/packages/automake/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Automake(Package): + """Automake -- make file builder part of autotools""" + homepage = "http://www.gnu.org/software/automake/" + url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz" + + version('1.14.1', 'd052a3e884631b9c7892f2efce542d75') + + depends_on('autoconf') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py new file mode 100644 index 0000000000..0d4436fccc --- /dev/null +++ b/var/spack/repos/builtin/packages/bear/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Bear(Package): + """Bear is a tool that generates a compilation database for clang tooling from non-cmake build systems.""" + homepage = "https://github.com/rizsotto/Bear" + url = "https://github.com/rizsotto/Bear/archive/2.0.4.tar.gz" + + version('2.0.4', 'fd8afb5e8e18f8737ba06f90bd77d011') + + depends_on("cmake") + depends_on("python") + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make("all") + make("install") diff --git a/var/spack/repos/builtin/packages/bib2xhtml/package.py b/var/spack/repos/builtin/packages/bib2xhtml/package.py new file mode 100644 index 0000000000..7f8e0cfe5a --- /dev/null +++ b/var/spack/repos/builtin/packages/bib2xhtml/package.py @@ -0,0 +1,27 @@ +from spack import * +from glob import glob + +class Bib2xhtml(Package): + """bib2xhtml is a program that converts BibTeX files into HTML.""" + homepage = "http://www.spinellis.gr/sw/textproc/bib2xhtml/" + url='http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz' + + version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645') + + def url_for_version(self, v): + return ('http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v%s.tar.gz' % v) + + def install(self, spec, prefix): + # Add the bst include files to the install directory + bst_include = join_path(prefix.share, 'bib2xhtml') + mkdirp(bst_include) + for bstfile in glob('html-*bst'): + install(bstfile, bst_include) + + # Install the script and point it at the user's favorite perl + # and the bst include directory. + mkdirp(prefix.bin) + install('bib2xhtml', prefix.bin) + filter_file(r'#!/usr/bin/perl', + '#!/usr/bin/env BSTINPUTS=%s perl' % bst_include, + join_path(prefix.bin, 'bib2xhtml')) diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py new file mode 100644 index 0000000000..cac0a0407f --- /dev/null +++ b/var/spack/repos/builtin/packages/binutils/package.py @@ -0,0 +1,30 @@ +from spack import * + +class Binutils(Package): + """GNU binutils, which contain the linker, assembler, objdump and others""" + homepage = "http://www.gnu.org/software/binutils/" + url = "ftp://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2" + + version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66') + version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b') + version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e') + version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764') + + variant('libiberty', default=False, description='Also install libiberty.') + + def install(self, spec, prefix): + configure_args = [ + '--prefix=%s' % prefix, + '--disable-dependency-tracking', + '--enable-interwork', + '--enable-multilib', + '--enable-shared', + '--enable-64-bit-bfd', + '--enable-targets=all'] + + if '+libiberty' in spec: + configure_args.append('--enable-install-libiberty') + + configure(*configure_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py new file mode 100644 index 0000000000..7c526fb958 --- /dev/null +++ b/var/spack/repos/builtin/packages/bison/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Bison(Package): + """Bison is a general-purpose parser generator that converts + an annotated context-free grammar into a deterministic LR or + generalized LR (GLR) parser employing LALR(1) parser tables.""" + + homepage = "http://www.gnu.org/software/bison/" + url = "http://ftp.gnu.org/gnu/bison/bison-3.0.tar.gz" + + version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py new file mode 100644 index 0000000000..35824d53a2 --- /dev/null +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -0,0 +1,66 @@ +from spack import * + +class Boost(Package): + """Boost provides free peer-reviewed portable C++ source + libraries, emphasizing libraries that work well with the C++ + Standard Library. + + Boost libraries are intended to be widely useful, and usable + across a broad spectrum of applications. The Boost license + encourages both commercial and non-commercial use. + """ + homepage = "http://www.boost.org" + url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2" + list_url = "http://sourceforge.net/projects/boost/files/boost/" + list_depth = 2 + + version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87') + version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546') + version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76') + version('1.56.0', 'a744cf167b05d72335f27c88115f211d') + version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354') + version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279') + version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2') + version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750') + version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679') + version('1.50.0', '52dd00be775e689f55a987baebccc462') + version('1.49.0', '0d202cb811f934282dea64856a175698') + version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95') + version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200') + version('1.46.1', '7375679575f4c8db605d426fc721d506') + version('1.46.0', '37b12f1702319b73876b0097982087e0') + version('1.45.0', 'd405c606354789d0426bc07bea617e58') + version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a') + version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1') + version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6') + version('1.41.0', '8bb65e133907db727a2a825c5400d0a6') + version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7') + version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0') + version('1.38.0', '5eca2116d39d61382b8f8235915cb267') + version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92') + version('1.36.0', '328bfec66c312150e4c2a78dcecb504b') + version('1.35.0', 'dce952a7214e72d6597516bcac84048b') + version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5') + version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0') + + + def url_for_version(self, version): + """Handle Boost's weird URLs, which write the version two different ways.""" + parts = [str(p) for p in Version(version)] + dots = ".".join(parts) + underscores = "_".join(parts) + return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % ( + dots, underscores) + + + def install(self, spec, prefix): + bootstrap = Executable('./bootstrap.sh') + bootstrap() + + # b2 used to be called bjam, before 1.47 (sigh) + b2name = './b2' if spec.satisfies('@1.47:') else './bjam' + + b2 = Executable(b2name) + b2('install', + '-j %s' % make_jobs, + '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch new file mode 100644 index 0000000000..290be39c73 --- /dev/null +++ b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch @@ -0,0 +1,16 @@ +--- Makefile 2015-02-26 10:50:00.000000000 -0800 ++++ Makefile.new 2015-07-29 18:03:59.891357399 -0700 +@@ -22,10 +22,10 @@ + # + + INC = +-GCC_PREFIX = $(shell dirname `which gcc`) ++GCC_PREFIX = + GCC_SUFFIX = +-CC = $(GCC_PREFIX)/gcc$(GCC_SUFFIX) +-CPP = $(GCC_PREFIX)/g++$(GCC_SUFFIX) ++CC = cc ++CPP = c++ + CXX = $(CPP) + HEADERS = $(wildcard *.h) + BOWTIE_MM = 1 diff --git a/var/spack/repos/builtin/packages/bowtie2/package.py b/var/spack/repos/builtin/packages/bowtie2/package.py new file mode 100644 index 0000000000..339aab6598 --- /dev/null +++ b/var/spack/repos/builtin/packages/bowtie2/package.py @@ -0,0 +1,24 @@ +from spack import * +from glob import glob +class Bowtie2(Package): + """Description""" + homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml" + version('2.2.5','51fa97a862d248d7ee660efc1147c75f', url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip") + + patch('bowtie2-2.5.patch',when='@2.2.5', level=0) + + def install(self, spec, prefix): + make() + mkdirp(prefix.bin) + for bow in glob("bowtie2*"): + install(bow, prefix.bin) + # install('bowtie2',prefix.bin) + # install('bowtie2-align-l',prefix.bin) + # install('bowtie2-align-s',prefix.bin) + # install('bowtie2-build',prefix.bin) + # install('bowtie2-build-l',prefix.bin) + # install('bowtie2-build-s',prefix.bin) + # install('bowtie2-inspect',prefix.bin) + # install('bowtie2-inspect-l',prefix.bin) + # install('bowtie2-inspect-s',prefix.bin) + diff --git a/var/spack/repos/builtin/packages/boxlib/package.py b/var/spack/repos/builtin/packages/boxlib/package.py new file mode 100644 index 0000000000..4f1b71132f --- /dev/null +++ b/var/spack/repos/builtin/packages/boxlib/package.py @@ -0,0 +1,25 @@ +from spack import * + +class Boxlib(Package): + """BoxLib, a software framework for massively parallel + block-structured adaptive mesh refinement (AMR) codes.""" + + homepage = "https://ccse.lbl.gov/BoxLib/" + url = "https://ccse.lbl.gov/pub/Downloads/BoxLib.git"; + + # TODO: figure out how best to version this. No tags in the repo! + version('master', git='https://ccse.lbl.gov/pub/Downloads/BoxLib.git') + + depends_on('mpi') + + def install(self, spec, prefix): + args = std_cmake_args + args += ['-DCCSE_ENABLE_MPI=1', + '-DCMAKE_C_COMPILER=%s' % which('mpicc'), + '-DCMAKE_CXX_COMPILER=%s' % which('mpicxx'), + '-DCMAKE_Fortran_COMPILER=%s' % which('mpif90')] + + cmake('.', *args) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py new file mode 100644 index 0000000000..d88336664d --- /dev/null +++ b/var/spack/repos/builtin/packages/bzip2/package.py @@ -0,0 +1,36 @@ +from spack import * +from glob import glob + +class Bzip2(Package): + """bzip2 is a freely available, patent free high-quality data + compressor. It typically compresses files to within 10% to 15% + of the best available techniques (the PPM family of statistical + compressors), whilst being around twice as fast at compression + and six times faster at decompression.""" + homepage = "http://www.bzip.org" + url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz" + + version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b') + + def install(self, spec, prefix): + # No configure system -- have to filter the makefile for this package. + filter_file(r'CC=gcc', 'CC=cc', 'Makefile', string=True) + + make('-f', 'Makefile-libbz2_so') + make('clean') + make("install", "PREFIX=%s" % prefix) + + bzip2_exe = join_path(prefix.bin, 'bzip2') + install('bzip2-shared', bzip2_exe) + for i, libfile in enumerate(glob('libbz2.so*')): + install(libfile, prefix.lib) + if i == 0: + symlink(join_path(prefix.lib, libfile), join_path(prefix.lib, 'libbz2.so')) + + bunzip2 = join_path(prefix.bin, 'bunzip2') + remove(bunzip2) + symlink(bzip2_exe, bunzip2) + + bzcat = join_path(prefix.bin, 'bzcat') + remove(bzcat) + symlink(bzip2_exe, bzcat) diff --git a/var/spack/repos/builtin/packages/cairo/package.py b/var/spack/repos/builtin/packages/cairo/package.py new file mode 100644 index 0000000000..e1ac8aaa7d --- /dev/null +++ b/var/spack/repos/builtin/packages/cairo/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Cairo(Package): + """Cairo is a 2D graphics library with support for multiple output devices.""" + homepage = "http://cairographics.org" + url = "http://cairographics.org/releases/cairo-1.14.0.tar.xz" + + version('1.14.0', 'fc3a5edeba703f906f2241b394f0cced') + + depends_on("libpng") + depends_on("glib") + depends_on("pixman") + depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig. + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-tee") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/callpath/package.py b/var/spack/repos/builtin/packages/callpath/package.py new file mode 100644 index 0000000000..f8a1eab9f7 --- /dev/null +++ b/var/spack/repos/builtin/packages/callpath/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Callpath(Package): + """Library for representing callpaths consistently in + distributed-memory performance tools.""" + + homepage = "https://github.com/scalability-llnl/callpath" + url = "https://github.com/scalability-llnl/callpath/archive/v1.0.1.tar.gz" + + version('1.0.2', 'b1994d5ee7c7db9d27586fc2dcf8f373') + version('1.0.1', '0047983d2a52c5c335f8ba7f5bab2325') + + depends_on("libelf") + depends_on("libdwarf") + depends_on("dyninst") + depends_on("adept-utils") + depends_on("mpi") + + def install(self, spec, prefix): + # TODO: offer options for the walker used. + cmake('.', "-DCALLPATH_WALKER=dyninst", *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cblas/package.py b/var/spack/repos/builtin/packages/cblas/package.py new file mode 100644 index 0000000000..3cfe5ee588 --- /dev/null +++ b/var/spack/repos/builtin/packages/cblas/package.py @@ -0,0 +1,35 @@ +from spack import * +import os + +class Cblas(Package): + """The BLAS (Basic Linear Algebra Subprograms) are routines that + provide standard building blocks for performing basic vector and + matrix operations.""" + + homepage = "http://www.netlib.org/blas/_cblas/" + + # tarball has no version, but on the date below, this MD5 was correct. + version('2015-06-06', '1e8830f622d2112239a4a8a83b84209a', + url='http://www.netlib.org/blas/blast-forum/cblas.tgz') + + depends_on('blas') + parallel = False + + def patch(self): + mf = FileFilter('Makefile.in') + + mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' % self.spec['blas'].prefix.lib) + mf.filter('^CC =.*', 'CC = cc') + mf.filter('^FC =.*', 'FC = f90') + + + def install(self, spec, prefix): + make('all') + mkdirp(prefix.lib) + mkdirp(prefix.include) + + # Rename the generated lib file to libcblas.a + install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib) + install('./include/cblas.h','%s' % prefix.include) + install('./include/cblas_f77.h','%s' % prefix.include) + diff --git a/var/spack/repos/builtin/packages/cgm/package.py b/var/spack/repos/builtin/packages/cgm/package.py new file mode 100644 index 0000000000..05d6395c5a --- /dev/null +++ b/var/spack/repos/builtin/packages/cgm/package.py @@ -0,0 +1,30 @@ +from spack import * + +class Cgm(Package): + """The Common Geometry Module, Argonne (CGMA) is a code library + which provides geometry functionality used for mesh generation and + other applications.""" + homepage = "http://trac.mcs.anl.gov/projects/ITAPS/wiki/CGM" + url = "http://ftp.mcs.anl.gov/pub/fathom/cgm13.1.1.tar.gz" + + version('13.1.1', '4e8dbc4ba8f65767b29f985f7a23b01f') + version('13.1.0', 'a6c7b22660f164ce893fb974f9cb2028') + version('13.1' , '95f724bda04919fc76818a5b7bc0b4ed') + + depends_on("mpi") + + def patch(self): + filter_file('^(#include "CGMParallelConventions.h")', + '//\1', + 'geom/parallel/CGMReadParallel.cpp') + + + def install(self, spec, prefix): + configure("--with-mpi", + "--prefix=%s" % prefix, + "CFLAGS=-static", + "CXXFLAGS=-static", + "FCFLAGS=-static") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/clang/package.py b/var/spack/repos/builtin/packages/clang/package.py new file mode 100644 index 0000000000..4f977bf9a4 --- /dev/null +++ b/var/spack/repos/builtin/packages/clang/package.py @@ -0,0 +1,51 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Clang(Package): + """The goal of the Clang project is to create a new C, C++, + Objective C and Objective C++ front-end for the LLVM compiler. + """ + homepage = 'http://clang.llvm.org' + url = 'http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz' + + depends_on('llvm@3.7.0', when='@3.7.0') + depends_on('llvm@3.6.2', when='@3.6.2') + depends_on('llvm@3.5.1', when='@3.5.1') + + version('3.7.0', '8f9d27335e7331cf0a4711e952f21f01', url='http://llvm.org/releases/3.7.0/cfe-3.7.0.src.tar.xz') + version('3.6.2', 'ff862793682f714bb7862325b9c06e20', url='http://llvm.org/releases/3.6.2/cfe-3.6.2.src.tar.xz') + version('3.5.1', '93f9532f8f7e6f1d8e5c1116907051cb', url='http://llvm.org/releases/3.5.1/cfe-3.5.1.src.tar.xz') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compiler.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DCLANG_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, + '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, + *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cloog/package.py b/var/spack/repos/builtin/packages/cloog/package.py new file mode 100644 index 0000000000..814a33c76c --- /dev/null +++ b/var/spack/repos/builtin/packages/cloog/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Cloog(Package): + """CLooG is a free software and library to generate code for + scanning Z-polyhedra. That is, it finds a code (e.g. in C, + FORTRAN...) that reaches each integral point of one or more + parameterized polyhedra.""" + + homepage = "http://www.cloog.org" + url = "http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz" + list_url = "http://www.bastoul.net/cloog/pages/download" + + version('0.18.1', 'e34fca0540d840e5d0f6427e98c92252') + version('0.18.0', 'be78a47bd82523250eb3e91646db5b3d') + version('0.17.0', '0aa3302c81f65ca62c114e5264f8a802') + + depends_on("gmp") + depends_on("isl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-osl=no", + "--with-isl=%s" % spec['isl'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py new file mode 100644 index 0000000000..9efa370c8b --- /dev/null +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Cmake(Package): + """A cross-platform, open-source build system. CMake is a family of + tools designed to build, test and package software.""" + homepage = 'https://www.cmake.org' + + version('2.8.10.2', '097278785da7182ec0aea8769d06860c', + url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz') + + version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f', + url = 'http://www.cmake.org/files/v3.0/cmake-3.0.2.tar.gz') + +# version('3.0.1', 'e2e05d84cb44a42f1371d9995631dcf5') +# version('3.0.0', '21a1c85e1a3b803c4b48e7ff915a863e') + + def install(self, spec, prefix): + configure('--prefix=' + prefix, + '--parallel=' + str(make_jobs)) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py new file mode 100644 index 0000000000..78c608d8eb --- /dev/null +++ b/var/spack/repos/builtin/packages/coreutils/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Coreutils(Package): + """The GNU Core Utilities are the basic file, shell and text + manipulation utilities of the GNU operating system. These are + the core utilities which are expected to exist on every + operating system. + """ + homepage = "http://www.gnu.org/software/coreutils/" + url = "http://ftp.gnu.org/gnu/coreutils/coreutils-8.23.tar.xz" + + version('8.23', 'abed135279f87ad6762ce57ff6d89c41') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py new file mode 100644 index 0000000000..8e98f457ee --- /dev/null +++ b/var/spack/repos/builtin/packages/cppcheck/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Cppcheck(Package): + """A tool for static C/C++ code analysis.""" + homepage = "http://cppcheck.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/cppcheck/cppcheck/1.68/cppcheck-1.68.tar.bz2" + + version('1.68', 'c015195f5d61a542f350269030150708') + + def install(self, spec, prefix): + # cppcheck does not have a configure script + make() + # manually install the final cppcheck binary + mkdirp(prefix.bin) + install('cppcheck', prefix.bin) diff --git a/var/spack/repos/builtin/packages/cram/package.py b/var/spack/repos/builtin/packages/cram/package.py new file mode 100644 index 0000000000..4b8ec56f25 --- /dev/null +++ b/var/spack/repos/builtin/packages/cram/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Cram(Package): + """Cram runs many small MPI jobs inside one large MPI job.""" + homepage = "https://github.com/scalability-llnl/cram" + url = "http://github.com/scalability-llnl/cram/archive/v1.0.1.tar.gz" + + version('1.0.1', 'c73711e945cf5dc603e44395f6647f5e') + + depends_on("mpi") + + def install(self, spec, prefix): + cmake(".", *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cscope/package.py b/var/spack/repos/builtin/packages/cscope/package.py new file mode 100644 index 0000000000..9aac0f7304 --- /dev/null +++ b/var/spack/repos/builtin/packages/cscope/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Cscope(Package): + """Cscope is a developer's tool for browsing source code.""" + homepage = "http://http://cscope.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/cscope/cscope/15.8b/cscope-15.8b.tar.gz" + + version('15.8b', '8f9409a238ee313a96f9f87fe0f3b176') + + # Can be configured to use flex (not necessary) + # ./configure --with-flex + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py new file mode 100644 index 0000000000..d97cd25636 --- /dev/null +++ b/var/spack/repos/builtin/packages/cube/package.py @@ -0,0 +1,55 @@ +# FIXME: Add copyright statement +# +from spack import * +from contextlib import closing + +class Cube(Package): + """Cube the profile viewer for Score-P and Scalasca profiles. It + displays a multi-dimensional performance space consisting + of the dimensions (i) performance metric, (ii) call path, + and (iii) system resource.""" + + homepage = "http://www.scalasca.org/software/cube-4.x/download.html" + url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" + + version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20') + + version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f', + url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz') + + # Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt + # to guess a matching C compiler when configuring scorep-score + backend_user_provided = """\ +CC=cc +CXX=CC +F77=f77 +FC=f90 +#CFLAGS=-fPIC +#CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=CC +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + + with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + + configure_args = ["--prefix=%s" % prefix, + "--with-custom-compilers", + "--without-paraver", + "--without-gui"] + + configure(*configure_args) + + make(parallel=False) + make("install", parallel=False) diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py new file mode 100644 index 0000000000..a2f1947554 --- /dev/null +++ b/var/spack/repos/builtin/packages/czmq/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Czmq(Package): + """ A C interface to the ZMQ library """ + homepage = "http://czmq.zeromq.org" + url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz" + + version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz') + + depends_on('zeromq') + + def install(self, spec, prefix): + bash = which("bash") + bash("./autogen.sh") + configure("--prefix=%s" % prefix) + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py new file mode 100644 index 0000000000..f7c302d611 --- /dev/null +++ b/var/spack/repos/builtin/packages/dbus/package.py @@ -0,0 +1,31 @@ +from spack import * + +class Dbus(Package): + """D-Bus is a message bus system, a simple way for applications to + talk to one another. D-Bus supplies both a system daemon (for + events such new hardware device printer queue ) and a + per-user-login-session daemon (for general IPC needs among user + applications). Also, the message bus is built on top of a + general one-to-one message passing framework, which can be used + by any two applications to communicate directly (without going + through the message bus daemon).""" + + homepage = "http://dbus.freedesktop.org/" + url = "http://dbus.freedesktop.org/releases/dbus/dbus-1.8.8.tar.gz" + + version('1.9.0', 'ec6895a4d5c0637b01f0d0e7689e2b36') + version('1.8.8', 'b9f4a18ee3faa1e07c04aa1d83239c43') + version('1.8.6', '6a08ba555d340e9dfe2d623b83c0eea8') + version('1.8.4', '4717cb8ab5b80978fcadf2b4f2f72e1b') + version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068') + + def install(self, spec, prefix): + configure( + "--prefix=%s" % prefix, + "--disable-systemd") + make() + make("install") + + # dbus needs a machine id generated after install + dbus_uuidgen = Executable(join_path(prefix.bin, 'dbus-uuidgen')) + dbus_uuidgen('--ensure') diff --git a/var/spack/repos/builtin/packages/docbook-xml/package.py b/var/spack/repos/builtin/packages/docbook-xml/package.py new file mode 100644 index 0000000000..fce1de7deb --- /dev/null +++ b/var/spack/repos/builtin/packages/docbook-xml/package.py @@ -0,0 +1,19 @@ +import os +import glob +from spack import * + + +class DocbookXml(Package): + """Docbook DTD XML files.""" + homepage = "http://www.oasis-open.org/docbook" + url = "http://www.oasis-open.org/docbook/xml/4.5/docbook-xml-4.5.zip" + + version('4.5', '03083e288e87a7e829e437358da7ef9e') + + def install(self, spec, prefix): + cp = which('cp') + + install_args = ['-a', '-t', prefix] + install_args.extend(glob.glob('*')) + + cp(*install_args) diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py new file mode 100644 index 0000000000..3d4a4e47a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/doxygen/package.py @@ -0,0 +1,25 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +# Date: September 11, 2015 +#------------------------------------------------------------------------------ + +from spack import * + +class Doxygen(Package): + """Doxygen is the de facto standard tool for generating documentation + from annotated C++ sources, but it also supports other popular programming + languages such as C, Objective-C, C#, PHP, Java, Python, IDL (Corba, + Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D.. + """ + homepage = "http://www.stack.nl/~dimitri/doxygen/" + url = "http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.10.src.tar.gz" + + version('1.8.10', '79767ccd986f12a0f949015efb5f058f') + + depends_on("cmake@2.8.12:") + + def install(self, spec, prefix): + cmake('.', *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/dri2proto/package.py b/var/spack/repos/builtin/packages/dri2proto/package.py new file mode 100644 index 0000000000..11dfa568e2 --- /dev/null +++ b/var/spack/repos/builtin/packages/dri2proto/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Dri2proto(Package): + """DRI2 Protocol Headers.""" + homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/" + url = "http://xorg.freedesktop.org/releases/individual/proto/dri2proto-2.8.tar.gz" + + version('2.8', '19ea18f63d8ae8053c9fa84b60365b77') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/dtcmp/package.py b/var/spack/repos/builtin/packages/dtcmp/package.py new file mode 100644 index 0000000000..9d940583c1 --- /dev/null +++ b/var/spack/repos/builtin/packages/dtcmp/package.py @@ -0,0 +1,20 @@ +import os +from spack import * + +class Dtcmp(Package): + """The Datatype Comparison Library provides comparison operations and + parallel sort algorithms for MPI applications.""" + + homepage = "https://github.com/hpc/dtcmp" + url = "https://github.com/hpc/dtcmp/releases/download/v1.0.3/dtcmp-1.0.3.tar.gz" + + version('1.0.3', 'cdd8ccf71e8ff67de2558594a7fcd317') + + depends_on('mpi') + depends_on('lwgrp') + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--with-lwgrp=" + spec['lwgrp'].prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py new file mode 100644 index 0000000000..41ec57dd2f --- /dev/null +++ b/var/spack/repos/builtin/packages/dyninst/package.py @@ -0,0 +1,68 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Dyninst(Package): + """API for dynamic binary instrumentation. Modify programs while they + are executing without recompiling, re-linking, or re-executing.""" + homepage = "https://paradyn.org" + url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz" + list_url = "http://www.dyninst.org/downloads/dyninst-8.x" + + version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7', + url="http://www.paradyn.org/release8.2/DyninstAPI-8.2.1.tgz") + version('8.1.2', 'bf03b33375afa66fe0efa46ce3f4b17a', + url="http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz") + version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac', + url="http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz") + + depends_on("libelf") + depends_on("libdwarf") + depends_on("boost@1.42:") + + # new version uses cmake + def install(self, spec, prefix): + libelf = spec['libelf'].prefix + libdwarf = spec['libdwarf'].prefix + + with working_dir('spack-build', create=True): + cmake('..', + '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include, + '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib, + '-DBoost_NO_SYSTEM_PATHS=TRUE', + '-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'), + '-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'), + '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include, + '-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'), + *std_cmake_args) + make() + make("install") + + + @when('@:8.1') + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py new file mode 100644 index 0000000000..926d234584 --- /dev/null +++ b/var/spack/repos/builtin/packages/elfutils/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Elfutils(Package): + """elfutils is a collection of various binary tools such as + eu-objdump, eu-readelf, and other utilities that allow you to + inspect and manipulate ELF files. Refer to Table 5.Tools Included + in elfutils for Red Hat Developer for a complete list of binary + tools that are distributed with the Red Hat Developer Toolset + version of elfutils.""" + + homepage = "https://fedorahosted.org/elfutils/" + + version('0.163', + git='git://git.fedorahosted.org/git/elfutils.git', + tag='elfutils-0.163') + + provides('elf') + + def install(self, spec, prefix): + autoreconf = which('autoreconf') + autoreconf('-if') + + configure('--prefix=%s' % prefix, '--enable-maintainer-mode') + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py new file mode 100644 index 0000000000..3ad4cbaf86 --- /dev/null +++ b/var/spack/repos/builtin/packages/extrae/package.py @@ -0,0 +1,46 @@ +from spack import * + +# typical working line with extrae 3.0.1 +# ./configure --prefix=/usr/local --with-mpi=/usr/lib64/mpi/gcc/openmpi --with-unwind=/usr/local --with-papi=/usr --with-dwarf=/usr --with-elf=/usr --with-dyninst=/usr --with-binutils=/usr --with-xml-prefix=/usr --enable-openmp --enable-nanos --enable-pthread --disable-parallel-merge LDFLAGS=-pthread + +class Extrae(Package): + """Extrae is the package devoted to generate tracefiles which can + be analyzed later by Paraver. Extrae is a tool that uses + different interposition mechanisms to inject probes into the + target application so as to gather information regarding the + application performance. The Extrae instrumentation package can + instrument the MPI programin model, and the following parallel + programming models either alone or in conjunction with MPI : + OpenMP, CUDA, OpenCL, pthread, OmpSs""" + homepage = "http://www.bsc.es/computer-sciences/extrae" + url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.0.1.tar.bz2" + version('3.0.1', 'a6a8ca96cd877723cd8cc5df6bdb922b') + + depends_on("mpi") + depends_on("dyninst") + depends_on("libunwind") + depends_on("boost") + depends_on("libdwarf") + depends_on("papi") + + def install(self, spec, prefix): + if 'openmpi' in spec: + mpi = spec['openmpi'] + elif 'mpich' in spec: + mpi = spec['mpich'] + elif 'mvapich2' in spec: + mpi = spec['mvapich2'] + + configure("--prefix=%s" % prefix, + "--with-mpi=%s" % mpi.prefix, + "--with-unwind=%s" % spec['libunwind'].prefix, + "--with-dyninst=%s" % spec['dyninst'].prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-dwarf=%s" % spec['libdwarf'].prefix, + "--with-papi=%s" % spec['papi'].prefix, + "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include, + "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib) + + make() + make("install", parallel=False) + diff --git a/var/spack/repos/builtin/packages/exuberant-ctags/package.py b/var/spack/repos/builtin/packages/exuberant-ctags/package.py new file mode 100644 index 0000000000..efd2b541b2 --- /dev/null +++ b/var/spack/repos/builtin/packages/exuberant-ctags/package.py @@ -0,0 +1,14 @@ +from spack import * + +class ExuberantCtags(Package): + """The canonical ctags generator""" + homepage = "ctags.sourceforge.net" + url = "http://downloads.sourceforge.net/project/ctags/ctags/5.8/ctags-5.8.tar.gz" + + version('5.8', 'c00f82ecdcc357434731913e5b48630d') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/fish/package.py b/var/spack/repos/builtin/packages/fish/package.py new file mode 100644 index 0000000000..1225558705 --- /dev/null +++ b/var/spack/repos/builtin/packages/fish/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Fish(Package): + """fish is a smart and user-friendly command line shell for OS X, Linux, and + the rest of the family. + """ + + homepage = "http://fishshell.com/" + url = "http://fishshell.com/files/2.2.0/fish-2.2.0.tar.gz" + list_url = homepage + + version('2.2.0', 'a76339fd14ce2ec229283c53e805faac48c3e99d9e3ede9d82c0554acfc7b77a') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py new file mode 100644 index 0000000000..b065904912 --- /dev/null +++ b/var/spack/repos/builtin/packages/flex/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Flex(Package): + """Flex is a tool for generating scanners.""" + + homepage = "http://flex.sourceforge.net/" + url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz" + + version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/flux/package.py b/var/spack/repos/builtin/packages/flux/package.py new file mode 100644 index 0000000000..c128f46be8 --- /dev/null +++ b/var/spack/repos/builtin/packages/flux/package.py @@ -0,0 +1,36 @@ +from spack import * +import os + +class Flux(Package): + """ A next-generation resource manager (pre-alpha) """ + + homepage = "https://github.com/flux-framework/flux-core" + url = "https://github.com/flux-framework/flux-core" + + version('master', branch='master', git='https://github.com/flux-framework/flux-core') + + # Also needs autotools, but should use the system version if available + depends_on("zeromq@4.0.4:") + depends_on("czmq@2.2:") + depends_on("lua@5.1:5.1.99") + depends_on("munge") + depends_on("libjson-c") + depends_on("libxslt") + # TODO: This provides a catalog, hacked with environment below for now + depends_on("docbook-xml") + depends_on("asciidoc") + depends_on("python") + depends_on("py-cffi") + + def install(self, spec, prefix): + # Bootstrap with autotools + bash = which('bash') + bash('./autogen.sh') + + # Fix asciidoc dependency on xml style sheets and whatnot + os.environ['XML_CATALOG_FILES'] = os.path.join(spec['docbook-xml'].prefix, + 'catalog.xml') + # Configure, compile & install + configure("--prefix=" + prefix) + make("install", "V=1") + diff --git a/var/spack/repos/builtin/packages/fontconfig/package.py b/var/spack/repos/builtin/packages/fontconfig/package.py new file mode 100644 index 0000000000..89b13604e8 --- /dev/null +++ b/var/spack/repos/builtin/packages/fontconfig/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Fontconfig(Package): + """Fontconfig customizing font access""" + homepage = "http://www.freedesktop.org/wiki/Software/fontconfig/" + url = "http://www.freedesktop.org/software/fontconfig/release/fontconfig-2.11.1.tar.gz" + + version('2.11.1' , 'e75e303b4f7756c2b16203a57ac87eba') + + depends_on('freetype') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/freetype/package.py b/var/spack/repos/builtin/packages/freetype/package.py new file mode 100644 index 0000000000..0309b858a1 --- /dev/null +++ b/var/spack/repos/builtin/packages/freetype/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Freetype(Package): + """Font package""" + homepage = "http://http://www.freetype.org" + url = "http://download.savannah.gnu.org/releases/freetype/freetype-2.5.3.tar.gz" + + version('2.5.3' , 'cafe9f210e45360279c730d27bf071e9') + + depends_on('libpng') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gasnet/package.py b/var/spack/repos/builtin/packages/gasnet/package.py new file mode 100644 index 0000000000..705961d1de --- /dev/null +++ b/var/spack/repos/builtin/packages/gasnet/package.py @@ -0,0 +1,35 @@ +from spack import * + +class Gasnet(Package): + """GASNet is a language-independent, low-level networking layer + that provides network-independent, high-performance communication + primitives tailored for implementing parallel global address space + SPMD languages and libraries such as UPC, Co-Array Fortran, SHMEM, + Cray Chapel, and Titanium. + """ + homepage = "http://gasnet.lbl.gov" + url = "http://gasnet.lbl.gov/GASNet-1.24.0.tar.gz" + + version('1.24.0', 'c8afdf48381e8b5a7340bdb32ca0f41a') + + + def install(self, spec, prefix): + # TODO: don't use paths with @ in them. + change_sed_delimiter('@', ';', 'configure') + + configure("--prefix=%s" % prefix, + # TODO: factor IB suport out into architecture description. + "--enable-ibv", + "--enable-udp", + "--disable-mpi", + "--enable-par", + "--enable-mpi-compat", + "--enable-segment-fast", + "--disable-aligned-segments", + # TODO: make an option so that Legion can request builds with/without this. + # See the Legion webpage for details on when to/not to use. + "--disable-pshm", + "--with-segment-mmap-max=64MB") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py new file mode 100644 index 0000000000..a49a1348aa --- /dev/null +++ b/var/spack/repos/builtin/packages/gcc/package.py @@ -0,0 +1,122 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +from contextlib import closing +from glob import glob + +class Gcc(Package): + """The GNU Compiler Collection includes front ends for C, C++, + Objective-C, Fortran, and Java.""" + homepage = "https://gcc.gnu.org" + + url = "http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2" + list_url = 'http://open-source-box.org/gcc/' + list_depth = 2 + + DEPENDS_ON_ISL_PREDICATE = '@5.0:' + + version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467') + version('4.9.3', '6f831b4d251872736e8e9cc09746f327') + version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43') + version('4.9.1', 'fddf71348546af523353bd43d34919c1') + version('4.8.5', '80d2c2982a3392bb0b89673ff136e223') + version('4.8.4', '5a84a30839b2aca22a2d723de2a626ec') + version('4.7.4', '4c696da46297de6ae77a82797d2abe28') + version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4') + version('4.5.4', '27e459c2566b8209ab064570e1b378f7') + + depends_on("mpfr") + depends_on("gmp") + depends_on("mpc") # when @4.5: + depends_on("binutils~libiberty") + + # Save these until we can do optional deps. + depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE) + #depends_on("ppl") + #depends_on("cloog") + + def install(self, spec, prefix): + # libjava/configure needs a minor fix to install into spack paths. + filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure', string=True) + + enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc')) + if spec.satisfies("@4.7.1:"): + enabled_languages.add('go') + + # Generic options to compile GCC + options = ["--prefix=%s" % prefix, + "--libdir=%s/lib64" % prefix, + "--disable-multilib", + "--enable-languages=" + ','.join(enabled_languages), + "--with-mpc=%s" % spec['mpc'].prefix, + "--with-mpfr=%s" % spec['mpfr'].prefix, + "--with-gmp=%s" % spec['gmp'].prefix, + "--enable-lto", + "--with-gnu-ld", + "--with-gnu-as", + "--with-quad"] + # Binutils + binutils_options = ["--with-stage1-ldflags=%s" % self.rpath_args, + "--with-boot-ldflags=%s" % self.rpath_args, + "--with-ld=%s/bin/ld" % spec['binutils'].prefix, + "--with-as=%s/bin/as" % spec['binutils'].prefix] + options.extend(binutils_options) + # Isl + if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE): + isl_options = ["--with-isl=%s" % spec['isl'].prefix] + options.extend(isl_options) + + # Rest of install is straightforward. + configure(*options) + make() + make("install") + + self.write_rpath_specs() + + + @property + def spec_dir(self): + # e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2 + spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix) + return spec_dir[0] if spec_dir else None + + + def write_rpath_specs(self): + """Generate a spec file so the linker adds a rpath to the libs + the compiler used to build the executable.""" + if not self.spec_dir: + tty.warn("Could not install specs for %s." % self.spec.format('$_$@')) + return + + gcc = Executable(join_path(self.prefix.bin, 'gcc')) + lines = gcc('-dumpspecs', return_output=True).strip().split("\n") + specs_file = join_path(self.spec_dir, 'specs') + with closing(open(specs_file, 'w')) as out: + for line in lines: + out.write(line + "\n") + if line.startswith("*link:"): + out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix)) + set_install_permissions(specs_file) diff --git a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py new file mode 100644 index 0000000000..14a5569984 --- /dev/null +++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py @@ -0,0 +1,22 @@ +from spack import * + +class GdkPixbuf(Package): + """The Gdk Pixbuf is a toolkit for image loading and pixel buffer + manipulation. It is used by GTK+ 2 and GTK+ 3 to load and + manipulate images. In the past it was distributed as part of + GTK+ 2 but it was split off into a separate package in + preparation for the change to GTK+ 3.""" + homepage = "https://developer.gnome.org/gdk-pixbuf/" + url = "http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.31/gdk-pixbuf-2.31.1.tar.xz" + + version('2.31.2', '6be6bbc4f356d4b79ab4226860ab8523') + + depends_on("glib") + depends_on("jpeg") + depends_on("libpng") + depends_on("libtiff") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py new file mode 100644 index 0000000000..4a2657e32f --- /dev/null +++ b/var/spack/repos/builtin/packages/geos/package.py @@ -0,0 +1,31 @@ +from spack import * + +class Geos(Package): + """GEOS (Geometry Engine - Open Source) is a C++ port of the Java + Topology Suite (JTS). As such, it aims to contain the complete + functionality of JTS in C++. This includes all the OpenGIS + Simple Features for SQL spatial predicate functions and spatial + operators, as well as specific JTS enhanced topology functions.""" + + homepage = "http://trac.osgeo.org/geos/" + url = "http://download.osgeo.org/geos/geos-3.4.2.tar.bz2" + + version('3.4.2', 'fc5df2d926eb7e67f988a43a92683bae') + version('3.4.1', '4c930dec44c45c49cd71f3e0931ded7e') + version('3.4.0', 'e41318fc76b5dc764a69d43ac6b18488') + version('3.3.9', '4794c20f07721d5011c93efc6ccb8e4e') + version('3.3.8', '75be476d0831a2d14958fed76ca266de') + version('3.3.7', '95ab996d22672b067d92c7dee2170460') + version('3.3.6', '6fadfb941541875f4976f75fb0bbc800') + version('3.3.5', '2ba61afb7fe2c5ddf642d82d7b16e75b') + version('3.3.4', '1bb9f14d57ef06ffa41cb1d67acb55a1') + version('3.3.3', '8454e653d7ecca475153cc88fd1daa26') + + extends('python') + depends_on('swig') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--enable-python") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gflags/package.py b/var/spack/repos/builtin/packages/gflags/package.py new file mode 100644 index 0000000000..62dd80a094 --- /dev/null +++ b/var/spack/repos/builtin/packages/gflags/package.py @@ -0,0 +1,21 @@ +import os +from spack import * + +class Gflags(Package): + """The gflags package contains a C++ library that implements + commandline flags processing. It includes built-in support for + standard types such as string and the ability to define flags + in the source file in which they are used. Online documentation + available at: https://gflags.github.io/gflags/""" + + homepage = "https://gflags.github.io/gflags" + url = "https://github.com/gflags/gflags/archive/v2.1.2.tar.gz" + + version('2.1.2', 'ac432de923f9de1e9780b5254884599f') + + def install(self, spec, prefix): + cmake("-DCMAKE_INSTALL_PREFIX=" + prefix, + "-DBUILD_SHARED_LIBS=ON") + make() + make("test") + make("install") diff --git a/var/spack/repos/builtin/packages/ghostscript/package.py b/var/spack/repos/builtin/packages/ghostscript/package.py new file mode 100644 index 0000000000..0ab49d425f --- /dev/null +++ b/var/spack/repos/builtin/packages/ghostscript/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Ghostscript(Package): + """an interpreter for the PostScript language and for PDF. """ + homepage = "http://ghostscript.com/" + url = "http://downloads.ghostscript.com/public/ghostscript-9.16.tar.gz" + + version('9.16', '829319325bbdb83f5c81379a8f86f38f') + + parallel = False + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix, "--enable-shared") + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py new file mode 100644 index 0000000000..0f1a3ba05b --- /dev/null +++ b/var/spack/repos/builtin/packages/git/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Git(Package): + """Git is a free and open source distributed version control + system designed to handle everything from small to very large + projects with speed and efficiency.""" + homepage = "http://git-scm.com" + url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.xz" + + version('2.2.1', '43e01f9d96ba8c11611e0eef0d9f9f28') + + # Use system openssl. + # depends_on("openssl") + + # Use system perl for now. + # depends_on("perl") + # depends_on("pcre") + + depends_on("zlib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--without-pcre", + "--without-python") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py new file mode 100644 index 0000000000..178f0b9df5 --- /dev/null +++ b/var/spack/repos/builtin/packages/glib/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Glib(Package): + """The GLib package contains a low-level libraries useful for + providing data structure handling for C, portability wrappers + and interfaces for such runtime functionality as an event loop, + threads, dynamic loading and an object system.""" + homepage = "https://developer.gnome.org/glib/" + url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz" + + version('2.42.1', '89c4119e50e767d3532158605ee9121a') + + depends_on("libffi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/glm/package.py b/var/spack/repos/builtin/packages/glm/package.py new file mode 100644 index 0000000000..d00c301b4c --- /dev/null +++ b/var/spack/repos/builtin/packages/glm/package.py @@ -0,0 +1,19 @@ +from spack import * + + +class Glm(Package): + """ + OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on + the OpenGL Shading Language (GLSL) specification. + """ + + homepage = "https://github.com/g-truc/glm" + url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz" + + version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/global/package.py b/var/spack/repos/builtin/packages/global/package.py new file mode 100644 index 0000000000..a77b1bdc09 --- /dev/null +++ b/var/spack/repos/builtin/packages/global/package.py @@ -0,0 +1,24 @@ +from spack import * +import os + + +class Global(Package): + """ The Gnu Global tagging system """ + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.gnu.org/software/global" + url = "http://tamacom.com/global/global-6.5.tar.gz" + + version('6.5', 'dfec818b4f53d91721e247cf7b218078') + + depends_on('exuberant-ctags') + + def install(self, spec, prefix): + config_args = ['--prefix={}'.format(prefix)] + + config_args.append('--with-exuberant-ctags={}'.format( + os.path.join(spec['exuberant-ctags'].prefix.bin, 'ctags'))) + + configure(*config_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/glog/package.py b/var/spack/repos/builtin/packages/glog/package.py new file mode 100644 index 0000000000..d73386b394 --- /dev/null +++ b/var/spack/repos/builtin/packages/glog/package.py @@ -0,0 +1,15 @@ +import os +from spack import * + +class Glog(Package): + """C++ implementation of the Google logging module.""" + + homepage = "https://github.com/google/glog" + url = "https://github.com/google/glog/archive/v0.3.3.tar.gz" + + version('0.3.3', 'c1f86af27bd9c73186730aa957607ed0') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py new file mode 100644 index 0000000000..d6af821b34 --- /dev/null +++ b/var/spack/repos/builtin/packages/gmp/package.py @@ -0,0 +1,40 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Gmp(Package): + """GMP is a free library for arbitrary precision arithmetic, + operating on signed integers, rational numbers, and + floating-point numbers.""" + homepage = "https://gmplib.org" + url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2" + + version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470') + version('6.0.0' , '6ef5869ae735db9995619135bd856b84') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py new file mode 100644 index 0000000000..cf57a24a6d --- /dev/null +++ b/var/spack/repos/builtin/packages/gnutls/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Gnutls(Package): + """GnuTLS is a secure communications library implementing the SSL, + TLS and DTLS protocols and technologies around them. It + provides a simple C language application programming interface + (API) to access the secure communications protocols as well as + APIs to parse and write X.509, PKCS #12, OpenPGP and other + required structures. It is aimed to be portable and efficient + with focus on security and interoperability.""" + + homepage = "http://www.gnutls.org" + url = "ftp://ftp.gnutls.org/gcrypt/gnutls/v3.3/gnutls-3.3.9.tar.xz" + + version('3.3.9', 'ff61b77e39d09f1140ab5a9cf52c58b6') + + depends_on("nettle") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gperf/package.py b/var/spack/repos/builtin/packages/gperf/package.py new file mode 100644 index 0000000000..32551b67b4 --- /dev/null +++ b/var/spack/repos/builtin/packages/gperf/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Gperf(Package): + """GNU gperf is a perfect hash function generator. For a given + list of strings, it produces a hash function and hash table, in + form of C or C++ code, for looking up a value depending on the + input string. The hash function is perfect, which means that the + hash table has no collisions, and the hash table lookup needs a + single string comparison only.""" + + homepage = "https://www.gnu.org/software/gperf/" + url = "http://ftp.gnu.org/pub/gnu/gperf/gperf-3.0.4.tar.gz" + + version('3.0.4', 'c1f1db32fb6598d6a93e6e88796a8632') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py new file mode 100644 index 0000000000..8900462324 --- /dev/null +++ b/var/spack/repos/builtin/packages/gperftools/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Gperftools(Package): + """Google's fast malloc/free implementation, especially for multi-threaded applications. + Contains tcmalloc, heap-checker, heap-profiler, and cpu-profiler.""" + homepage = "https://code.google.com/p/gperftools" + url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz" + + version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz") + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/graphlib/package.py b/var/spack/repos/builtin/packages/graphlib/package.py new file mode 100644 index 0000000000..ddac0b2b66 --- /dev/null +++ b/var/spack/repos/builtin/packages/graphlib/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Graphlib(Package): + """Library to create, manipulate, and export graphs Graphlib.""" + homepage = "http://https://github.com/lee218llnl/graphlib" + url = "https://github.com/lee218llnl/graphlib/archive/v2.0.0.tar.gz" + + version('2.0.0', '43c6df84f1d38ba5a5dce0ae19371a70') + + def install(self, spec, prefix): + cmake(".", *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py new file mode 100644 index 0000000000..7af7da1881 --- /dev/null +++ b/var/spack/repos/builtin/packages/graphviz/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Graphviz(Package): + """Graph Visualization Software""" + homepage = "http://www.graphviz.org" + url = "http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.38.0.tar.gz" + + version('2.38.0', '5b6a829b2ac94efcd5fa3c223ed6d3ae') + + parallel = False + + depends_on("swig") + depends_on("python") + depends_on("ghostscript") + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix) + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/gtkplus/package.py b/var/spack/repos/builtin/packages/gtkplus/package.py new file mode 100644 index 0000000000..0ebc7100de --- /dev/null +++ b/var/spack/repos/builtin/packages/gtkplus/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Gtkplus(Package): + """The GTK+ 2 package contains libraries used for creating graphical user interfaces for applications.""" + homepage = "http://www.gtk.org" + + version('2.24.25', '612350704dd3aacb95355a4981930c6f', + url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz") + + depends_on("atk") + depends_on("gdk-pixbuf") + depends_on("pango") + + def patch(self): + # remove disable deprecated flag. + filter_file(r'CFLAGS="-DGDK_PIXBUF_DISABLE_DEPRECATED $CFLAGS"', + '', 'configure', string=True) + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/harfbuzz/package.py b/var/spack/repos/builtin/packages/harfbuzz/package.py new file mode 100644 index 0000000000..ed7c42a909 --- /dev/null +++ b/var/spack/repos/builtin/packages/harfbuzz/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Harfbuzz(Package): + """The Harfbuzz package contains an OpenType text shaping engine.""" + homepage = "http://www.freedesktop.org/wiki/Software/HarfBuzz/" + url = "http://www.freedesktop.org/software/harfbuzz/release/harfbuzz-0.9.37.tar.bz2" + + version('0.9.37', 'bfe733250e34629a188d82e3b971bc1e') + + depends_on("glib") + depends_on("icu") + depends_on("freetype") + + def patch(self): + change_sed_delimiter('@', ';', 'src/Makefile.in') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py new file mode 100644 index 0000000000..15e0ef9338 --- /dev/null +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -0,0 +1,42 @@ +from spack import * + +class Hdf5(Package): + """HDF5 is a data model, library, and file format for storing and managing + data. It supports an unlimited variety of datatypes, and is designed for + flexible and efficient I/O and for high volume and complex data. + """ + + homepage = "http://www.hdfgroup.org/HDF5/" + url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz" + list_url = "http://www.hdfgroup.org/ftp/HDF5/releases" + list_depth = 3 + + version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24') + version('1.8.13', 'c03426e9e77d7766944654280b467289') + + depends_on("mpi") + depends_on("zlib") + + # TODO: currently hard-coded to use OpenMPI + def install(self, spec, prefix): + + configure( + "--prefix=%s" % prefix, + "--with-zlib=%s" % spec['zlib'].prefix, + "--enable-parallel", + "--enable-shared", + "CC=%s" % spec['mpich'].prefix.bin + "/mpicc", + "CXX=%s" % spec['mpich'].prefix.bin + "/mpic++") + + make() + make("install") + + def url_for_version(self, version): + v = str(version) + + if version == Version("1.2.2"): + return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz" + elif version < Version("1.7"): + return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz" + else: + return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz" diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py new file mode 100644 index 0000000000..31a31f376a --- /dev/null +++ b/var/spack/repos/builtin/packages/hwloc/package.py @@ -0,0 +1,25 @@ +from spack import * + +class Hwloc(Package): + """The Portable Hardware Locality (hwloc) software package + provides a portable abstraction (across OS, versions, + architectures, ...) of the hierarchical topology of modern + architectures, including NUMA memory nodes, sockets, shared + caches, cores and simultaneous multithreading. It also gathers + various system attributes such as cache and memory information + as well as the locality of I/O devices such as network + interfaces, InfiniBand HCAs or GPUs. It primarily aims at + helping applications with gathering information about modern + computing hardware so as to exploit it accordingly and + efficiently.""" + homepage = "http://www.open-mpi.org/projects/hwloc/" + url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" + + version('1.9', '1f9f9155682fe8946a97c08896109508') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py new file mode 100644 index 0000000000..198b3f00dc --- /dev/null +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -0,0 +1,32 @@ +from spack import * + +class Hypre(Package): + """Hypre is a library of high performance preconditioners that + features parallel multigrid methods for both structured and + unstructured grid problems.""" + + homepage = "https://computation.llnl.gov/project/linear_solvers/software.php" + url = "https://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz" + + version('2.10.0b', '768be38793a35bb5d055905b271f5b8e') + + depends_on("mpi") + depends_on("blas") + depends_on("lapack") + + def install(self, spec, prefix): + blas_dir = spec['blas'].prefix + lapack_dir = spec['lapack'].prefix + + # Hypre's source is staged under ./src so we'll have to manually + # cd into it. + with working_dir("src"): + configure( + "--prefix=%s" % prefix, + "--with-blas-libs=blas", + "--with-blas-lib-dirs=%s/lib" % blas_dir, + "--with-lapack-libs=\"lapack blas\"", + "--with-lapack-lib-dirs=%s/lib" % lapack_dir, + "--with-MPI") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/icu/package.py b/var/spack/repos/builtin/packages/icu/package.py new file mode 100644 index 0000000000..f256ec5712 --- /dev/null +++ b/var/spack/repos/builtin/packages/icu/package.py @@ -0,0 +1,25 @@ +from spack import * + +class Icu(Package): + """The International Components for Unicode (ICU) package is a + mature, widely used set of C/C++ libraries providing Unicode and + Globalization support for software applications. ICU is widely + portable and gives applications the same results on all + platforms.""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.example.com" + url = "http://download.icu-project.org/files/icu4c/54.1/icu4c-54_1-src.tgz" + + version('54.1', 'e844caed8f2ca24c088505b0d6271bc0') + + + def url_for_version(self, version): + return "http://download.icu-project.org/files/icu4c/%s/icu4c-%s-src.tgz" % ( + version, str(version).replace('.', '_')) + + + def install(self, spec, prefix): + with working_dir("source"): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/icu4c/package.py b/var/spack/repos/builtin/packages/icu4c/package.py new file mode 100644 index 0000000000..55b44463b2 --- /dev/null +++ b/var/spack/repos/builtin/packages/icu4c/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Icu4c(Package): + """ICU is a mature, widely used set of C/C++ and Java libraries + providing Unicode and Globalization support for software applications.""" + + homepage = "http://site.icu-project.org/" + url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz" + + version('54_1', 'e844caed8f2ca24c088505b0d6271bc0') + + def install(self, spec, prefix): + cd("source") + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/isl/package.py b/var/spack/repos/builtin/packages/isl/package.py new file mode 100644 index 0000000000..836ef3ea40 --- /dev/null +++ b/var/spack/repos/builtin/packages/isl/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Isl(Package): + """isl is a thread-safe C library for manipulating sets and + relations of integer points bounded by affine constraints.""" + homepage = "http://isl.gforge.inria.fr" + url = "http://isl.gforge.inria.fr/isl-0.14.tar.bz2" + + version('0.14', 'acd347243fca5609e3df37dba47fd0bb') + + depends_on("gmp") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-gmp-prefix=%s" % spec['gmp'].prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py new file mode 100644 index 0000000000..8f8076dd14 --- /dev/null +++ b/var/spack/repos/builtin/packages/jdk/package.py @@ -0,0 +1,46 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +#------------------------------------------------------------------------------ +import distutils +from distutils import dir_util +from subprocess import call + +import spack +from spack import * +import llnl.util.tty as tty + +class Jdk(Package): + """The Java Development Kit (JDK) released by Oracle Corporation + in the form of a binary product aimed at Java developers.""" + homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html" + + version('8u25-linux-x64', 'e145c03a7edc845215092786bcfba77e', + url="http://download.oracle.com/otn-pub/java/jdk/8u25-b17/jdk-8u25-linux-x64.tar.gz") + + # Oracle requires that you accept their License Agreement in order + # to access the Java packages in download.oracle.com. In order to + # automate this process, we need to utilize these additional curl + # commandline options. + # + # See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux + curl_options=[ + '-j', # junk cookies + '-H', # specify required License Agreement cookie + 'Cookie: oraclelicense=accept-securebackup-cookie'] + + def do_fetch(self): + # Add our custom curl commandline options + tty.msg( + "[Jdk] Adding required commandline options to curl " + + "before performing fetch: %s" % + (self.curl_options)) + + for option in self.curl_options: + spack.curl.add_default_arg(option) + + # Now perform the actual fetch + super(Jdk, self).do_fetch() + + + def install(self, spec, prefix): + distutils.dir_util.copy_tree(".", prefix) diff --git a/var/spack/repos/builtin/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py new file mode 100644 index 0000000000..87820467db --- /dev/null +++ b/var/spack/repos/builtin/packages/jpeg/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Jpeg(Package): + """jpeg library""" + homepage = "http://www.ijg.org" + url = "http://www.ijg.org/files/jpegsrc.v9a.tar.gz" + + version('9a', '3353992aecaee1805ef4109aadd433e7') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/launchmon/package.py b/var/spack/repos/builtin/packages/launchmon/package.py new file mode 100644 index 0000000000..6fbe6a68d0 --- /dev/null +++ b/var/spack/repos/builtin/packages/launchmon/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2014, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Launchmon(Package): + """Software infrastructure that enables HPC run-time tools to + co-locate tool daemons with a parallel job.""" + homepage = "http://sourceforge.net/projects/launchmon" + url = "http://downloads.sourceforge.net/project/launchmon/launchmon/1.0.1%20release/launchmon-1.0.1.tar.gz" + + version('1.0.1', '2f12465803409fd07f91174a4389eb2b') + version('1.0.1-2', git='https://github.com/scalability-llnl/launchmon.git', commit='ff7e22424b8f375318951eb1c9282fcbbfa8aadf') + + depends_on('autoconf') + depends_on('automake') + depends_on('libtool') + + def install(self, spec, prefix): + configure( + "--prefix=" + prefix, + "--with-bootfabric=cobo", + "--with-rm=slurm") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir b/var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir new file mode 100644 index 0000000000..8a1d93fdc9 --- /dev/null +++ b/var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir @@ -0,0 +1,147 @@ +Index: launchmon/src/linux/lmon_api/Makefile.am +=================================================================== +--- launchmon/src/linux/lmon_api/Makefile.am (revision 481) ++++ launchmon/src/linux/lmon_api/Makefile.am (working copy) +@@ -80,13 +80,10 @@ + libmonfeapi_la_CFLAGS = $(AM_CFLAGS) + libmonfeapi_la_CXXFLAGS = $(AM_CXXFLAGS) + +-libmonfeapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ +- -L$(top_srcdir)/@GCRYPTLOC@ \ +- -L$(top_srcdir)/@GPGERRLOC@ \ +- $(AM_LDFLAGS) \ +- -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ ++libmonfeapi_la_LDFLAGS = $(AM_LDFLAGS) \ ++ -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ + +-libmonfeapi_la_LIBADD = @LIBPTHREAD@ @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ @LIBRT@ ++libmonfeapi_la_LIBADD = @LIBPTHREAD@ $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ @LIBRT@ + + libmonbeapi_la_SOURCES = lmon_be.cxx \ + lmon_daemon_internal.cxx \ +@@ -113,13 +110,10 @@ + libmonbeapi_la_CFLAGS = $(AM_CFLAGS) + libmonbeapi_la_CXXFLAGS = $(AM_CXXFLAGS) + +-libmonbeapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ +- -L$(top_srcdir)/@GCRYPTLOC@ \ +- -L$(top_srcdir)/@GPGERRLOC@ \ +- $(AM_LDFLAGS) \ ++libmonbeapi_la_LDFLAGS = $(AM_LDFLAGS) \ + -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ + +-libmonbeapi_la_LIBADD = @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ ++libmonbeapi_la_LIBADD = $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ + + + # +@@ -146,10 +140,8 @@ + + libmonmwapi_la_CXXFLAGS = $(AM_CXXFLAGS) + +-libmonmwapi_la_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ \ +- -L$(top_srcdir)/@GCRYPTLOC@ \ +- -L$(top_srcdir)/@GPGERRLOC@ \ +- $(AM_LDFLAGS) \ ++libmonmwapi_la_LDFLAGS = $(AM_LDFLAGS) \ + -version-info @LMON_CURRENT@:@LMON_REVISION@:@LMON_AGE@ + +-libmonmwapi_la_LIBADD = @LIBCOMM@ @LIBGCRYPT@ @LIBGPGERR@ ++ ++libmonmwapi_la_LIBADD = $(top_builddir)/@COMMLOC@/@LIBCOMM@ $(top_builddir)/@GCRYPTLOC@/@LIBGCRYPT@ $(top_builddir)/@GPGERRLOC@/@LIBGPGERR@ +Index: tools/cobo/test/Makefile.am +=================================================================== +--- tools/cobo/test/Makefile.am (revision 481) ++++ tools/cobo/test/Makefile.am (working copy) +@@ -37,12 +37,12 @@ + + client_SOURCES = client.c + +-client_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ ++client_LDFLAGS = + +-client_LDADD = @LIBCOMM@ ++client_LDADD = $(top_srcdir)/@COMMLOC@/@LIBCOMM@ + + server_rsh_SOURCES = server_rsh.c + +-server_rsh_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ ++server_rsh_LDFLAGS = + +-server_rsh_LDADD = @LIBCOMM@ ++server_rsh_LDADD = $(top_srcdir)/@COMMLOC@/@LIBCOMM@ +Index: tools/pmgr_collective/test/Makefile.am +=================================================================== +--- tools/pmgr_collective/test/Makefile.am (revision 481) ++++ tools/pmgr_collective/test/Makefile.am (working copy) +@@ -31,18 +31,18 @@ + ## Jun 10 2008 DHA: Copied from the old Makefile. + ## + +-INCLUDES = -I$(top_srcdir)/@COMMLOC@ ++INCLUDES = + + noinst_PROGRAMS = client mpirun_rsh + + client_SOURCES = client.c + +-client_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ ++client_LDFLAGS = + +-client_LDADD = @LIBCOMM@ ++client_LDADD = @COMMLOC@/@LIBCOMM@ + + mpirun_rsh_SOURCES = mpirun_rsh.c + +-mpirun_rsh_LDFLAGS = -L$(top_srcdir)/@COMMLOC@ ++mpirun_rsh_LDFLAGS = + +-mpirun_rsh_LDADD = @LIBCOMM@ ++mpirun_rsh_LDADD = @COMMLOC@/@LIBCOMM@ +Index: config/x_ac_bootfabric.m4 +=================================================================== +--- config/x_ac_bootfabric.m4 (revision 481) ++++ config/x_ac_bootfabric.m4 (working copy) +@@ -63,7 +63,7 @@ + #AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) + #AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) + #AC_SUBST(COMMLOC, tools/pmgr_collective/src) +- #AC_SUBST(LIBCOMM, -lpmgr_collective) ++ #AC_SUBST(LIBCOMM, libcobo.la) + #else + commfab_found="no" + AC_MSG_ERROR([--with-bootfabric=pmgr is given, but pmgr_collective has been deprecated]) +@@ -87,7 +87,7 @@ + AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) + AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) + AC_SUBST(COMMLOC, tools/cobo/src) +- AC_SUBST(LIBCOMM, -lcobo) ++ AC_SUBST(LIBCOMM, libcobo.la) + + if test "x$with_cobo_port" != "xcheck" -a "x$with_cobo_port" != "xyes"; then + AC_DEFINE(COBO_BEGIN_PORT, $with_cobo_port, [Define a beginning port for COBO_BASED]) +@@ -117,7 +117,7 @@ + AC_DEFINE(TOOL_SS_ENV, "LMON_SHARED_SECRET", [Define TOOL_SS_ENV]) + AC_DEFINE(TOOL_SCH_ENV, "LMON_SEC_CHK", [Define TOOL_SCH_ENV]) + AC_SUBST(COMMLOC, tools/cobo/src) +- AC_SUBST(LIBCOMM, -lcobo) ++ AC_SUBST(LIBCOMM, libcobo.la) + + if test "x$with_cobo_port" != "xcheck" -a "x$with_cobo_port" != "xyes"; then + AC_DEFINE(COBO_BEGIN_PORT, $with_cobo_port, [Define a beginning port for COBO_BASED]) +Index: config/x_ac_gcrpyt.m4 +=================================================================== +--- config/x_ac_gcrypt.m4 2011-10-22 00:50:38.000000000 -0700 ++++ config/x_ac_gcrypt.patched.m4 2014-03-14 11:33:59.189220000 -0700 +@@ -55,8 +55,8 @@ + AC_CONFIG_SUBDIRS([tools/libgpg-error]) + AC_SUBST(GPGERRLOC, [tools/libgpg-error/src]) + AC_SUBST(GCRYPTLOC, [tools/libgcrypt/src]) +- AC_SUBST(LIBGCRYPT, [-lgcrypt]) +- AC_SUBST(LIBGPGERR, [-lgpg-error]) ++ AC_SUBST(LIBGCRYPT, [libgcrypt.la]) ++ AC_SUBST(LIBGPGERR, [libgpg-error.la]) + gcrypt_configured="yes" + else + AC_MSG_ERROR([tools/libgpg-error or tools/libgcrypt not found]) + diff --git a/var/spack/repos/builtin/packages/lcms/package.py b/var/spack/repos/builtin/packages/lcms/package.py new file mode 100644 index 0000000000..a53c2f997a --- /dev/null +++ b/var/spack/repos/builtin/packages/lcms/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Lcms(Package): + """Little cms is a color management library. Implements fast + transforms between ICC profiles. It is focused on speed, and is + portable across several platforms (MIT license).""" + homepage = "http://www.littlecms.com" + url = "http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz" + + version('2.6', 'f4c08d38ceade4a664ebff7228910a33') + + depends_on("jpeg") + depends_on("libtiff") + depends_on("zlib") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/leveldb/package.py b/var/spack/repos/builtin/packages/leveldb/package.py new file mode 100644 index 0000000000..da68a9cbcb --- /dev/null +++ b/var/spack/repos/builtin/packages/leveldb/package.py @@ -0,0 +1,29 @@ +import os +import glob +from spack import * + +class Leveldb(Package): + """LevelDB is a fast key-value storage library written at Google + that provides an ordered mapping from string keys to string values.""" + + homepage = "https://github.com/google/leveldb" + url = "https://github.com/google/leveldb/archive/v1.18.tar.gz" + + version('1.18', '73770de34a2a5ab34498d2e05b2b7fa0') + + depends_on("snappy") + + def install(self, spec, prefix): + make() + + mkdirp(prefix.include) + mkdirp(prefix.lib) + + cp = which('cp') + + # cp --preserve=links libleveldb.* prefix/lib + args = glob.glob('libleveldb.*') + args.append(prefix + '/lib') + cp('--preserve=links', *args) + + cp('-r', 'include/leveldb', prefix + '/include') diff --git a/var/spack/repos/builtin/packages/libNBC/package.py b/var/spack/repos/builtin/packages/libNBC/package.py new file mode 100644 index 0000000000..6d08f3219c --- /dev/null +++ b/var/spack/repos/builtin/packages/libNBC/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libnbc(Package): + """LibNBC is a prototypic implementation of a nonblocking + interface for MPI collective operations. Based on ANSI C and + MPI-1, it supports all MPI-1 collective operations in a + nonblocking manner. LibNBC is distributed under the BSD license. + """ + homepage = "http://unixer.de/research/nbcoll/libnbc/" + url = "http://unixer.de/research/nbcoll/libnbc/libNBC-1.1.1.tar.gz" + + version('1.1.1', 'ece5c94992591a9fa934a90e5dbe50ce') + + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libarchive/package.py b/var/spack/repos/builtin/packages/libarchive/package.py new file mode 100644 index 0000000000..cbd4b89cd0 --- /dev/null +++ b/var/spack/repos/builtin/packages/libarchive/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libarchive(Package): + """libarchive: C library and command-line tools for reading and + writing tar, cpio, zip, ISO, and other archive formats.""" + homepage = "http://www.libarchive.org" + url = "http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz" + + version('3.1.2', 'efad5a503f66329bb9d2f4308b5de98a') + version('3.1.1', '1f3d883daf7161a0065e42a15bbf168f') + version('3.1.0', '095a287bb1fd687ab50c85955692bf3a') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libcircle/package.py b/var/spack/repos/builtin/packages/libcircle/package.py new file mode 100644 index 0000000000..3f7c996fb0 --- /dev/null +++ b/var/spack/repos/builtin/packages/libcircle/package.py @@ -0,0 +1,18 @@ +import os +from spack import * + +class Libcircle(Package): + """libcircle provides an efficient distributed queue on a cluster, + using self-stabilizing work stealing.""" + + homepage = "https://github.com/hpc/libcircle" + + version('0.2.1-rc.1', '2b1369a5736457239f908abf88143ec2', + url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz') + + depends_on('mpi') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py new file mode 100644 index 0000000000..00736b7811 --- /dev/null +++ b/var/spack/repos/builtin/packages/libdrm/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Libdrm(Package): + """A userspace library for accessing the DRM, direct + rendering manager, on Linux, BSD and other operating + systems that support the ioctl interface.""" + + homepage = "http://dri.freedesktop.org/libdrm/" # no real website... + url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" + + version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6') + version('2.4.33', '86e4e3debe7087d5404461e0032231c8') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libdwarf/package.py b/var/spack/repos/builtin/packages/libdwarf/package.py new file mode 100644 index 0000000000..099a974e93 --- /dev/null +++ b/var/spack/repos/builtin/packages/libdwarf/package.py @@ -0,0 +1,81 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +# Only build certain parts of dwarf because the other ones break. +dwarf_dirs = ['libdwarf', 'dwarfdump2'] + +class Libdwarf(Package): + """The DWARF Debugging Information Format is of interest to + programmers working on compilers and debuggers (and any one + interested in reading or writing DWARF information). It was + developed by a committee (known as the PLSIG at the time) + starting around 1991. Starting around 1991 SGI developed the + libdwarf and dwarfdump tools for internal use and as part of + SGI IRIX developer tools. Since that time dwarfdump and + libdwarf have been shipped (as an executable and archive + respectively, not source) with every release of the SGI + MIPS/IRIX C compiler.""" + + homepage = "http://www.prevanders.net/dwarf.html" + url = "http://www.prevanders.net/libdwarf-20130729.tar.gz" + list_url = homepage + + version('20130729', '4cc5e48693f7b93b7aa0261e63c0e21d') + version('20130207', '64b42692e947d5180e162e46c689dfbf') + version('20130126', 'ded74a5e90edb5a12aac3c29d260c5db') + + depends_on("libelf") + + parallel = False + + + def install(self, spec, prefix): + # dwarf build does not set arguments for ar properly + make.add_default_arg('ARFLAGS=rcs') + + # Dwarf doesn't provide an install, so we have to do it. + mkdirp(prefix.bin, prefix.include, prefix.lib, prefix.man1) + + with working_dir('libdwarf'): + configure("--prefix=" + prefix, "--enable-shared") + make() + + install('libdwarf.a', prefix.lib) + install('libdwarf.so', prefix.lib) + install('libdwarf.h', prefix.include) + install('dwarf.h', prefix.include) + + with working_dir('dwarfdump2'): + configure("--prefix=" + prefix) + + # This makefile has strings of copy commands that + # cause a race in parallel + make(parallel=False) + + install('dwarfdump', prefix.bin) + install('dwarfdump.conf', prefix.lib) + install('dwarfdump.1', prefix.man1) diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py new file mode 100644 index 0000000000..9338b8f393 --- /dev/null +++ b/var/spack/repos/builtin/packages/libelf/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libelf(Package): + """libelf lets you read, modify or create ELF object files in an + architecture-independent way. The library takes care of size + and endian issues, e.g. you can process a file for SPARC + processors on an Intel-based system.""" + + homepage = "http://www.mr511.de/software/english.html" + url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz" + + version('0.8.13', '4136d7b4c04df68b686570afa26988ac') + version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7') + + provides('elf') + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--enable-shared", + "--disable-dependency-tracking", + "--disable-debug") + make() + + # The mkdir commands in libelf's install can fail in parallel + make("install", parallel=False) diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py new file mode 100644 index 0000000000..11b1083d67 --- /dev/null +++ b/var/spack/repos/builtin/packages/libevent/package.py @@ -0,0 +1,30 @@ +from spack import * + +class Libevent(Package): + """The libevent API provides a mechanism to execute a callback function + when a specific event occurs on a file descriptor or after a timeout has been + reached. Furthermore, libevent also support callbacks due to signals or regular + timeouts. + """ + + homepage = "http://libevent.org" + url = "https://github.com/downloads/libevent/libevent/libevent-2.0.21-stable.tar.gz" + list_url = "http://libevent.org/old-releases.html" + + version('2.0.21', 'b2405cc9ebf264aa47ff615d9de527a2') + version('2.0.20', '94270cdee32c0cd0aa9f4ee6ede27e8e') + version('2.0.19', '91111579769f46055b0a438f5cc59572') + version('2.0.18', 'aa1ce9bc0dee7b8084f6855765f2c86a') + version('2.0.17', 'dad64aaaaff16b5fbec25160c06fee9a') + version('2.0.16', '899efcffccdb3d5111419df76e7dc8df') + version('2.0.15', '2643abe7ba242df15c08b2cc14ec8759') + version('2.0.14', 'cac0f379da35d3b98f83ac16fcfe1df4') + version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc') + version('2.0.12', '42986228baf95e325778ed328a93e070') + + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py new file mode 100644 index 0000000000..acec031717 --- /dev/null +++ b/var/spack/repos/builtin/packages/libffi/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Libffi(Package): + """The libffi library provides a portable, high level programming + interface to various calling conventions. This allows a programmer + to call any function specified by a call interface description at + run time.""" + homepage = "https://sourceware.org/libffi/" + + version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz") + #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py new file mode 100644 index 0000000000..1d0a57f317 --- /dev/null +++ b/var/spack/repos/builtin/packages/libgcrypt/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Libgcrypt(Package): + """Libgcrypt is a general purpose cryptographic library based on + the code from GnuPG. It provides functions for all cryptographic + building blocks: symmetric ciphers, hash algorithms, MACs, public + key algorithms, large integer functions, random numbers and a lot + of supporting functions. """ + homepage = "http://www.gnu.org/software/libgcrypt/" + url = "ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-1.6.2.tar.bz2" + + version('1.6.2', 'b54395a93cb1e57619943c082da09d5f') + + depends_on("libgpg-error") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libgpg-error/package.py b/var/spack/repos/builtin/packages/libgpg-error/package.py new file mode 100644 index 0000000000..6c1d1a10a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/libgpg-error/package.py @@ -0,0 +1,17 @@ +from spack import * + +class LibgpgError(Package): + """Libgpg-error is a small library that defines common error + values for all GnuPG components. Among these are GPG, GPGSM, + GPGME, GPG-Agent, libgcrypt, Libksba, DirMngr, Pinentry, + SmartCard Daemon and possibly more in the future. """ + + homepage = "https://www.gnupg.org/related_software/libgpg-error" + url = "ftp://ftp.gnupg.org/gcrypt/libgpg-error/libgpg-error-1.18.tar.bz2" + + version('1.18', '12312802d2065774b787cbfc22cc04e9') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py new file mode 100644 index 0000000000..07ee183947 --- /dev/null +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -0,0 +1,20 @@ +from spack import * + +class LibjpegTurbo(Package): + """libjpeg-turbo is a fork of the original IJG libjpeg which uses + SIMD to accelerate baseline JPEG compression and + decompression. libjpeg is a library that implements JPEG image + encoding, decoding and transcoding.""" + homepage = "http://libjpeg-turbo.virtualgl.org" + url = "http://downloads.sourceforge.net/libjpeg-turbo/libjpeg-turbo-1.3.1.tar.gz" + + version('1.3.1', '2c3a68129dac443a72815ff5bb374b05') + + # Can use either of these. + depends_on("yasm") + depends_on("nasm") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libjson-c/package.py b/var/spack/repos/builtin/packages/libjson-c/package.py new file mode 100644 index 0000000000..c0801cce9c --- /dev/null +++ b/var/spack/repos/builtin/packages/libjson-c/package.py @@ -0,0 +1,14 @@ +from spack import * + +class LibjsonC(Package): + """ A JSON implementation in C """ + homepage = "https://github.com/json-c/json-c/wiki" + url = "https://s3.amazonaws.com/json-c_releases/releases/json-c-0.11.tar.gz" + + version('0.11', 'aa02367d2f7a830bf1e3376f77881e98') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libmng/package.py b/var/spack/repos/builtin/packages/libmng/package.py new file mode 100644 index 0000000000..e5336ea2c2 --- /dev/null +++ b/var/spack/repos/builtin/packages/libmng/package.py @@ -0,0 +1,23 @@ +from spack import * + +class Libmng(Package): + """libmng -THE reference library for reading, displaying, writing + and examining Multiple-Image Network Graphics. MNG is the animation + extension to the popular PNG image-format.""" + homepage = "http://sourceforge.net/projects/libmng/" + url = "http://downloads.sourceforge.net/project/libmng/libmng-devel/2.0.2/libmng-2.0.2.tar.gz" + + version('2.0.2', '1ffefaed4aac98475ee6267422cbca55') + + depends_on("jpeg") + depends_on("zlib") + depends_on("lcms") + + def patch(self): + # jpeg requires stdio to beincluded before its headrs. + filter_file(r'^(\#include \)', '#include\n\\1', 'libmng_types.h') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py new file mode 100644 index 0000000000..3b95b86ddf --- /dev/null +++ b/var/spack/repos/builtin/packages/libmonitor/package.py @@ -0,0 +1,36 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libmonitor(Package): + """Libmonitor is a library for process and thread control.""" + homepage = "http://hpctoolkit.org" + + version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146) + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py new file mode 100644 index 0000000000..6022fc34a3 --- /dev/null +++ b/var/spack/repos/builtin/packages/libpciaccess/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Libpciaccess(Package): + """Generic PCI access library.""" + + homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/" + url = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/" + + version('0.13.4', git='http://anongit.freedesktop.org/git/xorg/lib/libpciaccess.git', + tag='libpciaccess-0.13.4') + + depends_on('autoconf') + depends_on('libtool') + + def install(self, spec, prefix): + from subprocess import call + call(["./autogen.sh"]) + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py new file mode 100644 index 0000000000..e02b08663e --- /dev/null +++ b/var/spack/repos/builtin/packages/libpng/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Libpng(Package): + """libpng graphics file format""" + homepage = "http://www.libpng.org/pub/png/libpng.html" + url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz" + + version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d') + version('1.6.15', '829a256f3de9307731d4f52dc071916d') + version('1.6.14', '2101b3de1d5f348925990f9aa8405660') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libsodium/package.py b/var/spack/repos/builtin/packages/libsodium/package.py new file mode 100644 index 0000000000..1c8a16d998 --- /dev/null +++ b/var/spack/repos/builtin/packages/libsodium/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Libsodium(Package): + """Sodium is a modern, easy-to-use software library for encryption, + decryption, signatures, password hashing and more.""" + homepage = "https://download.libsodium.org/doc/" + url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.3.tar.gz" + + version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab') + version('1.0.2', 'dc40eb23e293448c6fc908757738003f') + version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430') + version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7') + version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py new file mode 100644 index 0000000000..63c6704cb8 --- /dev/null +++ b/var/spack/repos/builtin/packages/libtiff/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Libtiff(Package): + """libtiff graphics format library""" + homepage = "http://www.remotesensing.org/libtiff/" + url = "http://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz" + + version('4.0.3', '051c1068e6a0627f461948c365290410') + + depends_on('jpeg') + depends_on('zlib') + depends_on('xz') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libtool/package.py b/var/spack/repos/builtin/packages/libtool/package.py new file mode 100644 index 0000000000..a07daf9781 --- /dev/null +++ b/var/spack/repos/builtin/packages/libtool/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Libtool(Package): + """libtool -- library building part of autotools""" + homepage = "https://www.gnu.org/software/libtool/" + url = "http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz" + + version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py new file mode 100644 index 0000000000..239fcbcfd5 --- /dev/null +++ b/var/spack/repos/builtin/packages/libunwind/package.py @@ -0,0 +1,38 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Libunwind(Package): + """A portable and efficient C programming interface (API) to determine + the call-chain of a program.""" + homepage = "http://www.nongnu.org/libunwind/" + url = "http://download.savannah.gnu.org/releases/libunwind/libunwind-1.1.tar.gz" + + version('1.1', 'fb4ea2f6fbbe45bf032cd36e586883ce') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libuuid/package.py b/var/spack/repos/builtin/packages/libuuid/package.py new file mode 100644 index 0000000000..373c5bfcac --- /dev/null +++ b/var/spack/repos/builtin/packages/libuuid/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libuuid(Package): + """Portable uuid C library""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://sourceforge.net/projects/libuuid/" + url = "http://downloads.sourceforge.net/project/libuuid/libuuid-1.0.3.tar.gz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Flibuuid%2F&ts=1433881396&use_mirror=iweb" + + version('1.0.3', 'd44d866d06286c08ba0846aba1086d68') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py new file mode 100644 index 0000000000..16a5525c0d --- /dev/null +++ b/var/spack/repos/builtin/packages/libxcb/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Libxcb(Package): + """The X protocol C-language Binding (XCB) is a replacement + for Xlib featuring a small footprint, latency hiding, direct + access to the protocol, improved threading support, and + extensibility.""" + + homepage = "http://xcb.freedesktop.org/" + url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz" + + version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb') + version('1.11.1', '118623c15a96b08622603a71d8789bf3') + depends_on("python") + depends_on("xcb-proto") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py new file mode 100644 index 0000000000..3a0af6b368 --- /dev/null +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Libxml2(Package): + """Libxml2 is the XML C parser and toolkit developed for the Gnome + project (but usable outside of the Gnome platform), it is free + software available under the MIT License.""" + homepage = "http://xmlsoft.org" + url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz" + + version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788') + + extends('python') + depends_on('zlib') + depends_on('xz') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxshmfence/package.py b/var/spack/repos/builtin/packages/libxshmfence/package.py new file mode 100644 index 0000000000..3aa2448b46 --- /dev/null +++ b/var/spack/repos/builtin/packages/libxshmfence/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Libxshmfence(Package): + """This is a tiny library that exposes a event API on top of Linux + futexes.""" + + homepage = "http://keithp.com/blogs/dri3_extension/" # not really... + url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz" + + version('1.2', 'f0b30c0fc568b22ec524859ee28556f1') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/libxslt/package.py b/var/spack/repos/builtin/packages/libxslt/package.py new file mode 100644 index 0000000000..f97332d020 --- /dev/null +++ b/var/spack/repos/builtin/packages/libxslt/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Libxslt(Package): + """Libxslt is the XSLT C library developed for the GNOME + project. XSLT itself is a an XML language to define + transformation for XML. Libxslt is based on libxml2 the XML C + library developed for the GNOME project. It also implements + most of the EXSLT set of processor-portable extensions + functions and some of Saxon's evaluate and expressions + extensions.""" + homepage = "http://www.xmlsoft.org/XSLT/index.html" + url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz" + + version('1.1.28', '9667bf6f9310b957254fdcf6596600b7') + + depends_on("libxml2") + depends_on("xz") + depends_on("zlib") + depends_on("libgcrypt") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/llvm-lld/package.py b/var/spack/repos/builtin/packages/llvm-lld/package.py new file mode 100644 index 0000000000..f229211396 --- /dev/null +++ b/var/spack/repos/builtin/packages/llvm-lld/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class LlvmLld(Package): + """lld - The LLVM Linker + lld is a new set of modular code for creating linker tools.""" + homepage = "http://lld.llvm.org" + url = "http://llvm.org/releases/3.4/lld-3.4.src.tar.gz" + + depends_on('llvm') + + version('3.4', '3b6a17e58c8416c869c14dd37682f78e') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compier.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DLLD_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, + '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, + *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py new file mode 100644 index 0000000000..a6759c3033 --- /dev/null +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by David Beckingsale, david@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Llvm(Package): + """The LLVM Project is a collection of modular and reusable compiler and + toolchain technologies. Despite its name, LLVM has little to do with + traditional virtual machines, though it does provide helpful libraries + that can be used to build them. The name "LLVM" itself is not an acronym; + it is the full name of the project. + """ + homepage = 'http://llvm.org/' + url = 'http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz' + + version('3.7.0', 'b98b9495e5655a672d6cb83e1a180f8e', url='http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz') + version('3.6.2', '0c1ee3597d75280dee603bae9cbf5cc2', url='http://llvm.org/releases/3.6.2/llvm-3.6.2.src.tar.xz') + version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz') + + depends_on('python@2.7:') + + def install(self, spec, prefix): + env['CXXFLAGS'] = self.compiler.cxx11_flag + + with working_dir('spack-build', create=True): + cmake('..', + '-DLLVM_REQUIRES_RTTI=1', + '-DPYTHON_EXECUTABLE=%s/bin/python' % spec['python'].prefix, + *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/lmdb/package.py b/var/spack/repos/builtin/packages/lmdb/package.py new file mode 100644 index 0000000000..875b8100c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/lmdb/package.py @@ -0,0 +1,39 @@ +import os +from spack import * + +class Lmdb(Package): + """Read-only mirror of official repo on openldap.org. Issues and + pull requests here are ignored. Use OpenLDAP ITS for issues. + http://www.openldap.org/software/repo.html""" + + + homepage = "http://www.openldap.org/software/repo.html" + url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.16.tar.gz" + + version('0.9.16', '0de89730b8f3f5711c2b3a4ba517b648') + + def install(self, spec, prefix): + os.chdir('libraries/liblmdb') + + make() + + mkdirp(prefix.bin) + mkdirp(prefix + '/man/man1') + mkdirp(prefix.lib) + mkdirp(prefix.include) + + bins = ['mdb_stat', 'mdb_copy', 'mdb_dump', 'mdb_load'] + for f in bins: + install(f, prefix.bin) + + mans = ['mdb_stat.1', 'mdb_copy.1', 'mdb_dump.1', 'mdb_load.1'] + for f in mans: + install(f, prefix + '/man/man1') + + libs = ['liblmdb.a', 'liblmdb.so'] + for f in libs: + install(f, prefix.lib) + + includes = ['lmdb.h'] + for f in includes: + install(f, prefix.include) diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py new file mode 100644 index 0000000000..57c443cc2d --- /dev/null +++ b/var/spack/repos/builtin/packages/lua/package.py @@ -0,0 +1,26 @@ +from spack import * +import os + +class Lua(Package): + """ The Lua programming language interpreter and library """ + homepage = "http://www.lua.org" + url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz" + + version('5.3.1', '797adacada8d85761c079390ff1d9961') + version('5.3.0', 'a1b0a7e92d0c85bbff7a8d27bf29f8af') + version('5.2.4', '913fdb32207046b273fdb17aad70be13') + version('5.2.3', 'dc7f94ec6ff15c985d2d6ad0f1b35654') + version('5.2.2', 'efbb645e897eae37cad4344ce8b0a614') + version('5.2.1', 'ae08f641b45d737d12d30291a5e5f6e3') + version('5.2.0', 'f1ea831f397214bae8a265995ab1a93e') + version('5.1.5', '2e115fe26e435e33b0d5c022e4490567') + version('5.1.4', 'd0870f2de55d59c1c8419f36e8fac150') + version('5.1.3', 'a70a8dfaa150e047866dc01a46272599') + + depends_on('ncurses') + + def install(self, spec, prefix): + make('INSTALL_TOP=%s' % prefix, + 'MYLDFLAGS=-L%s/lib' % spec['ncurses'].prefix, + 'linux', + 'install') diff --git a/var/spack/repos/builtin/packages/lwgrp/package.py b/var/spack/repos/builtin/packages/lwgrp/package.py new file mode 100644 index 0000000000..5963382b92 --- /dev/null +++ b/var/spack/repos/builtin/packages/lwgrp/package.py @@ -0,0 +1,18 @@ +import os +from spack import * + +class Lwgrp(Package): + """Thie light-weight group library provides process group + representations using O(log N) space and time.""" + + homepage = "https://github.com/hpc/lwgrp" + url = "https://github.com/hpc/lwgrp/releases/download/v1.0.2/lwgrp-1.0.2.tar.gz" + + version('1.0.2', 'ab7ba3bdd8534a651da5076f47f27d8a') + + depends_on('mpi') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/lwm2/package.py b/var/spack/repos/builtin/packages/lwm2/package.py new file mode 100644 index 0000000000..31afff8816 --- /dev/null +++ b/var/spack/repos/builtin/packages/lwm2/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Lwm2(Package): + """LWM2: Light Weight Measurement Module. This is a PMPI module + that can collect a number of time-sliced MPI and POSIX I/O + measurements from a program. + """ + homepage = "https://jay.grs.rwth-aachen.de/redmine/projects/lwm2" + + version('torus', hg='https://jay.grs.rwth-aachen.de/hg/lwm2', revision='torus') + + depends_on("papi") + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/matio/package.py b/var/spack/repos/builtin/packages/matio/package.py new file mode 100644 index 0000000000..12cfb80926 --- /dev/null +++ b/var/spack/repos/builtin/packages/matio/package.py @@ -0,0 +1,15 @@ +from spack import * + + +class Matio(Package): + """matio is an C library for reading and writing Matlab MAT files""" + homepage = "http://sourceforge.net/projects/matio/" + url = "http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz" + + version('1.5.2', '85b007b99916c63791f28398f6a4c6f1') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/memaxes/package.py b/var/spack/repos/builtin/packages/memaxes/package.py new file mode 100644 index 0000000000..76d5d3f831 --- /dev/null +++ b/var/spack/repos/builtin/packages/memaxes/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Memaxes(Package): + """MemAxes is a visualizer for sampled memory trace data.""" + + homepage = "https://github.com/scalability-llnl/MemAxes" + + version('0.5', '5874f3fda9fd2d313c0ff9684f915ab5', + url='https://github.com/scalability-llnl/MemAxes/archive/v0.5.tar.gz') + + depends_on("cmake@2.8.9:") + depends_on("qt@5:") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py new file mode 100644 index 0000000000..2a04a8fd51 --- /dev/null +++ b/var/spack/repos/builtin/packages/mesa/package.py @@ -0,0 +1,34 @@ +from spack import * + +class Mesa(Package): + """Mesa is an open-source implementation of the OpenGL + specification - a system for rendering interactive 3D graphics.""" + + homepage = "http://www.mesa3d.org" + url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz" + # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz" + + # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce') + version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2') + + # mesa 7.x, 8.x, 9.x + depends_on("libdrm@2.4.33") + depends_on("llvm@3.0") + depends_on("libxml2") + + # patch("llvm-fixes.patch") # using newer llvm + + # mesa 10.x + # depends_on("py-mako") + # depends_on("flex") + # depends_on("bison") + # depends_on("dri2proto") + # depends_on("libxcb") + # depends_on("libxshmfence") + + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py new file mode 100644 index 0000000000..7ce5ae1925 --- /dev/null +++ b/var/spack/repos/builtin/packages/metis/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Metis(Package): + """METIS is a set of serial programs for partitioning graphs, + partitioning finite element meshes, and producing fill reducing + orderings for sparse matrices. The algorithms implemented in + METIS are based on the multilevel recursive-bisection, + multilevel k-way, and multi-constraint partitioning schemes.""" + + homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview" + url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz" + + version('5.1.0', '5465e67079419a69e0116de24fce58fe') + + depends_on('mpi') + + def install(self, spec, prefix): + cmake(".", + '-DGKLIB_PATH=%s/GKlib' % pwd(), + '-DSHARED=1', + '-DCMAKE_C_COMPILER=mpicc', + '-DCMAKE_CXX_COMPILER=mpicxx', + '-DSHARED=1', + *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py new file mode 100644 index 0000000000..6fbfca3007 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpc/package.py @@ -0,0 +1,42 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpc(Package): + """Gnu Mpc is a C library for the arithmetic of complex numbers + with arbitrarily high precision and correct rounding of the + result.""" + homepage = "http://www.multiprecision.org" + url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz" + + version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') + + depends_on("gmp") + depends_on("mpfr") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mpe2/mpe2.patch b/var/spack/repos/builtin/packages/mpe2/mpe2.patch new file mode 100644 index 0000000000..3ade1f04f4 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpe2/mpe2.patch @@ -0,0 +1,12 @@ +diff -rupN mpe2-1.3.0/src/graphics/src/mpe_graphics.c mpe2-1.3.0.new/src/graphics/src/mpe_graphics.c +--- mpe2-1.3.0/src/graphics/src/mpe_graphics.c 2009-06-15 10:36:22.000000000 -0600 ++++ mpe2-1.3.0.new/src/graphics/src/mpe_graphics.c 2014-10-25 00:11:22.000000000 -0600 +@@ -982,7 +982,7 @@ char *string; + return MPE_ERR_BAD_ARGS; + } + +- printf("color = %d, string = %s\n",(int) color, string); ++//printf("color = %d, string = %s\n",(int) color, string); + + XBSetPixVal( graph->xwin, graph->xwin->cmapping[color] ); + returnVal = XDrawString( graph->xwin->disp, XBDrawable(graph->xwin), diff --git a/var/spack/repos/builtin/packages/mpe2/package.py b/var/spack/repos/builtin/packages/mpe2/package.py new file mode 100644 index 0000000000..27295172cc --- /dev/null +++ b/var/spack/repos/builtin/packages/mpe2/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Mpe2(Package): + """Message Passing Extensions (MPE) -- Parallel, shared X window graphics""" + + homepage = "http://www.mcs.anl.gov/research/projects/perfvis/software/MPE/" + url = "ftp://ftp.mcs.anl.gov/pub/mpi/mpe/mpe2-1.3.0.tar.gz" + + version('1.3.0', '67bf0c7b2e573df3ba0d2059a96c2f7b') + + patch('mpe2.patch') + + depends_on("mpi") + + provides("mpe") + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--x-includes=/usr/X11R6/include", + "--x-libraries=/usr/X11R6/lib", + "--enable-mpe_graphics=yes", + "--disable-f77", + "--enable-viewers=no", + "--enable-slog2=no", + "--with-mpicc=mpicc") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py new file mode 100644 index 0000000000..9c744a22df --- /dev/null +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpfr(Package): + """The MPFR library is a C library for multiple-precision + floating-point computations with correct rounding.""" + homepage = "http://www.mpfr.org" + url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.3.tar.bz2" + + version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') + # version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') + + depends_on('gmp') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch b/var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch new file mode 100644 index 0000000000..17e285b0bf --- /dev/null +++ b/var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch @@ -0,0 +1,1565 @@ +diff -Naur bash-4.3/builtins/circle.def mpibash-4.3/builtins/circle.def +--- bash-4.3/builtins/circle.def 1969-12-31 17:00:00.000000000 -0700 ++++ mpibash-4.3/builtins/circle.def 2014-05-13 11:27:37.314100671 -0600 +@@ -0,0 +1,620 @@ ++This file is circle.def, from which is created circle.c. ++It implements all of the "circle_*" builtins in Bash. ++ ++$PRODUCES circle.c ++ ++#include ++ ++#include ++#if defined (HAVE_UNISTD_H) ++# ifdef _MINIX ++# include ++# endif ++# include ++#endif ++ ++#include "../bashintl.h" ++#include "../shell.h" ++#include "common.h" ++#include "bashgetopt.h" ++#include ++ ++extern int running_trap, trap_saved_exit_value; ++ ++static int circle_rank; /* Rank in the Libcircle job */ ++static SHELL_VAR *create_func = NULL; /* User-defined callback function for CIRCLE_cb_create. */ ++static SHELL_VAR *process_func = NULL; /* User-defined callback function for CIRCLE_cb_process. */ ++static SHELL_VAR *reduce_init_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_init. */ ++static SHELL_VAR *reduce_fini_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_fini. */ ++static SHELL_VAR *reduce_op_func = NULL; /* User-defined callback function for CIRCLE_cb_reduce_op. */ ++static CIRCLE_handle *current_handle = NULL; /* Active handle within a callback or NULL if not within a callback */ ++static int within_reduction = 0; /* 1=within a reduction callback; 0=not */ ++ ++/* Return with a usage message if no arguments remain. */ ++#define YES_ARGS(LIST) \ ++ if ((LIST) == 0) \ ++ { \ ++ builtin_usage (); \ ++ return (EX_USAGE); \ ++ } ++ ++/* Perform the same operation as bind_variable, but with VALUE being a ++ * number, not a string. */ ++static SHELL_VAR * ++bind_variable_number (name, value, flags) ++ const char *name; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_variable (name, numstr, flags); ++} ++ ++/* Invoke the user-defined creation-callback function (create_func). */ ++static void ++internal_create_func (handle) ++ CIRCLE_handle *handle; ++{ ++ WORD_LIST *funcargs; ++ ++ if (create_func == NULL) ++ return; ++ current_handle = handle; ++ funcargs = make_word_list (make_word ("cb_create"), NULL); ++ execute_shell_function (create_func, funcargs); ++ dispose_words (funcargs); ++ current_handle = NULL; ++} ++ ++/* Invoke the user-defined process-callback function (process_func). */ ++static void ++internal_process_func (handle) ++ CIRCLE_handle *handle; ++{ ++ WORD_LIST *funcargs; ++ ++ if (process_func == NULL) ++ return; ++ current_handle = handle; ++ funcargs = make_word_list (make_word ("cb_process"), NULL); ++ execute_shell_function (process_func, funcargs); ++ dispose_words (funcargs); ++ current_handle = NULL; ++} ++ ++/* Invoke the user-defined reduction-initiation callback function ++ * (reduce_init_func). */ ++static void ++internal_reduce_init_func (void) ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_init_func == NULL) ++ return; ++ within_reduction = 1; ++ funcargs = make_word_list (make_word ("cb_reduce_init"), NULL); ++ execute_shell_function (reduce_init_func, funcargs); ++ dispose_words (funcargs); ++ within_reduction = 0; ++} ++ ++/* Invoke the user-defined reduction callback function ++ * (reduce_op_func). */ ++static void ++internal_reduce_op_func (buf1, size1, buf2, size2) ++ const void* buf1; ++ size_t size1; ++ const void* buf2; ++ size_t size2; ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_op_func == NULL) ++ return; ++ within_reduction = 1; ++ funcargs = make_word_list (make_word (buf2), NULL); ++ funcargs = make_word_list (make_word (buf1), funcargs); ++ funcargs = make_word_list (make_word ("cb_reduce_op"), funcargs); ++ execute_shell_function (reduce_op_func, funcargs); ++ dispose_words (funcargs); ++ within_reduction = 0; ++} ++ ++/* Invoke the user-defined reduction-finalization callback function ++ * (reduce_fini_func). */ ++static void ++internal_reduce_fini_func (buf, size) ++ const void* buf; ++ size_t size; ++{ ++ WORD_LIST *funcargs; ++ ++ if (reduce_fini_func == NULL) ++ return; ++ funcargs = make_word_list (make_word (buf), NULL); ++ funcargs = make_word_list (make_word ("cb_reduce_fini"), funcargs); ++ execute_shell_function (reduce_fini_func, funcargs); ++ dispose_words (funcargs); ++} ++ ++/* Look up a user-provided callback function. */ ++static int ++find_callback_function (list, user_func) ++ WORD_LIST *list; ++ SHELL_VAR **user_func; ++{ ++ char *funcname; /* Name of the user-defined function. */ ++ ++ /* If no argument was provided, nullify the callback function. */ ++ if (list == NULL) ++ { ++ *user_func = NULL; ++ return EXECUTION_SUCCESS; ++ } ++ ++ /* Get the callback function. */ ++ funcname = list->word->word; ++ list = list->next; ++ no_args (list); ++ *user_func = find_function (funcname); ++ if (*user_func == NULL) ++ { ++ builtin_error (_("function %s not found"), funcname); ++ return EXECUTION_FAILURE; ++ } ++ return EXECUTION_SUCCESS; ++} ++ ++/* Initialize Libcircle. */ ++void ++initialize_libcircle (argc, argv) ++ int argc; ++ char **argv; ++{ ++ circle_rank = CIRCLE_init (argc, argv, CIRCLE_DEFAULT_FLAGS); ++ bind_variable_number ("circle_rank", circle_rank, 0); ++ CIRCLE_enable_logging (CIRCLE_LOG_WARN); ++ CIRCLE_cb_create (internal_create_func); ++ CIRCLE_cb_process (internal_process_func); ++ CIRCLE_cb_reduce_init (internal_reduce_init_func); ++ CIRCLE_cb_reduce_op (internal_reduce_op_func); ++ CIRCLE_cb_reduce_fini (internal_reduce_fini_func); ++} ++ ++/* Finalize Libcircle. */ ++void ++finalize_libcircle (void) ++{ ++ CIRCLE_finalize (); ++} ++ ++/* ---------------------------------------------------------------------- */ ++ ++$BUILTIN circle_set_options ++$FUNCTION circle_set_options_builtin ++$SHORT_DOC circle_set_options [flag]... ++Change Libcircle's run-time behavior. ++ ++Arguments: ++ FLAG "split_random", "split_equal", or "create_global" ++ ++Multiple flags can be provided. If no flags are provided, Libcircle ++reverts to its default options. ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the circle_set_options builtin. */ ++int ++circle_set_options_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ int flags = 0; /* Flags to pass to CIRCLE_set_options */ ++ ++ if (list == NULL) ++ flags = CIRCLE_DEFAULT_FLAGS; ++ else ++ while (list != NULL) ++ { ++ word = list->word->word; ++ if (!strcmp (word, "split_random")) ++ flags |= CIRCLE_SPLIT_RANDOM; ++ else if (!strcmp (word, "split_equal")) ++ flags |= CIRCLE_SPLIT_EQUAL; ++ else if (!strcmp (word, "create_global")) ++ flags |= CIRCLE_CREATE_GLOBAL; ++ else ++ { ++ builtin_error (_("invalid flag \"%s\""), word); ++ return (EXECUTION_FAILURE); ++ } ++ list = list->next; ++ } ++ CIRCLE_set_options (flags); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_cb_create ++$FUNCTION circle_cb_create_builtin ++$SHORT_DOC circle_cb_create [func] ++Register a function that will create work when asked. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_enqueue when called ++ ++If FUNC is omitted, no function will be associated with work creation. ++This can be used to nullify a previous circle_cb_create invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_create builtin. */ ++int ++circle_cb_create_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &create_func); ++} ++ ++$BUILTIN circle_cb_process ++$FUNCTION circle_cb_process_builtin ++$SHORT_DOC circle_cb_process [func] ++Register a function that will process work when asked. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_enqueue when called ++ ++If FUNC is omitted, no function will be associated with work processing. ++This can be used to nullify a previous circle_cb_process invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_process builtin. */ ++int ++circle_cb_process_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &process_func); ++} ++ ++$BUILTIN circle_begin ++$FUNCTION circle_begin_builtin ++$SHORT_DOC circle_begin ++Begin creation and processing of the distributed work queue. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_begin builtin. */ ++int ++circle_begin_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_begin (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_enqueue ++$FUNCTION circle_enqueue_builtin ++$SHORT_DOC circle_enqueue work ++Enqueue work onto the distributed queue. ++ ++Arguments: ++ WORK "Work" as represented by an arbitrary string of limited ++ size (generally around 4KB) ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_enqueue builtin. */ ++int ++circle_enqueue_builtin (list) ++ WORD_LIST *list; ++{ ++ char *work; /* Work to perform */ ++ ++ /* Extract the work argument. */ ++ YES_ARGS (list); ++ work = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a proper callback function. */ ++ if (current_handle == NULL) ++ { ++ builtin_error (_("not within a Libcircle \"create\" or \"process\" callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Enqueue the work. */ ++ if (current_handle->enqueue (work) == -1) ++ return EXECUTION_FAILURE; ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_dequeue ++$FUNCTION circle_dequeue_builtin ++$SHORT_DOC circle_dequeue var ++Dequeue work from the distributed queue into a variable. ++ ++Arguments: ++ VAR Variable in which to receive previously enqueued "work" ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_dequeue builtin. */ ++int ++circle_dequeue_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Variable in which to store the work string */ ++ char work[CIRCLE_MAX_STRING_LEN+1]; /* Work to perform */ ++ ++ /* Extract the variable-name argument. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a callback function. */ ++ if (current_handle == NULL) ++ { ++ builtin_error (_("not within a Libcircle callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Dequeue the work and bind it to the given variable. */ ++ if (current_handle->dequeue (work) == -1) ++ return EXECUTION_FAILURE; ++ bind_variable (varname, work, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_enable_logging ++$FUNCTION circle_enable_logging_builtin ++$SHORT_DOC circle_enable_logging log_level ++Change Libcircle's logging verbosity ++ ++Arguments: ++ LOG_LEVEL "fatal", "error", "warning", "info", or "debug" ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the circle_enable_logging builtin. */ ++int ++circle_enable_logging_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ CIRCLE_loglevel loglevel; /* Level to set */ ++ ++ /* Parse the log level. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!strcmp (word, "fatal")) ++ loglevel = CIRCLE_LOG_FATAL; ++ else if (!strcmp (word, "error")) ++ loglevel = CIRCLE_LOG_ERR; ++ else if (!strcmp (word, "warning")) ++ loglevel = CIRCLE_LOG_WARN; ++ else if (!strcmp (word, "info")) ++ loglevel = CIRCLE_LOG_INFO; ++ else if (!strcmp (word, "debug")) ++ loglevel = CIRCLE_LOG_DBG; ++ else ++ { ++ builtin_error (_("invalid log level \"%s\""), word); ++ return (EXECUTION_FAILURE); ++ } ++ ++ /* Set the log level. */ ++ CIRCLE_enable_logging (loglevel); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_abort ++$FUNCTION circle_abort_builtin ++$SHORT_DOC circle_abort ++Terminate queue processing. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_abort builtin. */ ++int ++circle_abort_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_abort (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_checkpoint ++$FUNCTION circle_checkpoint_builtin ++$SHORT_DOC circle_checkpoint ++Checkpoint a work queue to disk. ++ ++Write a file called circle${circle_rank}.txt containing the current ++queue state of rank ${circle_rank}. On a later run, a worker can ++invoke circle_read_restarts to repopulate its queue from such a ++checkpoint file. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++/*'*/ ++ ++/* Here is the circle_checkpoint builtin. */ ++int ++circle_checkpoint_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_checkpoint (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_read_restarts ++$FUNCTION circle_read_restarts_builtin ++$SHORT_DOC circle_read_restarts ++Repopulate a work queue from a disk checkpoint. ++ ++Read queue contents from a file called circle${circle_rank}.txt, which ++was previously produced by circle_checkpoint. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++/*'*/ ++ ++/* Here is the circle_read_restarts builtin. */ ++int ++circle_read_restarts_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ CIRCLE_read_restarts (); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN circle_cb_reduce_init ++$FUNCTION circle_cb_reduce_init_builtin ++$SHORT_DOC circle_cb_reduce_init [func] ++Register a function that will initiate a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will invoke ++ circle_reduce when called ++ ++FUNC will be invoked on all ranks. ++ ++If FUNC is omitted, no function will be associated with reduction ++initialization. This can be used to nullify a previous ++circle_cb_reduce_init invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_init builtin. */ ++int ++circle_cb_reduce_init_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_init_func); ++} ++ ++$BUILTIN circle_cb_reduce_op ++$FUNCTION circle_cb_reduce_op_builtin ++$SHORT_DOC circle_cb_reduce_op [func] ++Register a function that will complete a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will receive ++ two items to reduce and invoke circle_reduce on ++ the reduced value ++ ++If FUNC is omitted, no function will be associated with reduction ++execution. This can be used to nullify a previous circle_cb_reduce_op ++invocation. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_op builtin. */ ++int ++circle_cb_reduce_op_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_op_func); ++} ++ ++$BUILTIN circle_cb_reduce_fini ++$FUNCTION circle_cb_reduce_fini_builtin ++$SHORT_DOC circle_cb_reduce_fini [func] ++Register a function that will complete a reduction operation. ++ ++Arguments: ++ FUNC User-defined callback function that will receive ++ the final reduced data ++ ++If FUNC is omitted, no function will be associated with reduction ++completion. This can be used to nullify a previous ++circle_cb_reduce_fini invocation. ++ ++Libcircle guarantees that FUNC will be invoked only on rank 0. ++ ++Exit Status: ++Returns 0 unless an invalid function is given or an error occurs. ++$END ++ ++/* Here is the circle_cb_reduce_fini builtin. */ ++int ++circle_cb_reduce_fini_builtin (list) ++ WORD_LIST *list; ++{ ++ return find_callback_function (list, &reduce_fini_func); ++} ++ ++$BUILTIN circle_reduce ++$FUNCTION circle_reduce_builtin ++$SHORT_DOC circle_reduce work ++Seed the next phase of a reduction operation ++ ++Arguments: ++ WORK "Work" as represented by an arbitrary string of limited ++ size (generally around 4KB) ++ ++This function should be called both by the callback function ++registered with circle_reduce_init and the callback function ++registered with circle_reduce_op. ++ ++Exit Status: ++Returns 0 unless an error occurs. ++$END ++ ++/* Here is the circle_reduce builtin. */ ++int ++circle_reduce_builtin (list) ++ WORD_LIST *list; ++{ ++ char *work; /* Work to perform */ ++ ++ /* Extract the work argument. */ ++ YES_ARGS (list); ++ work = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Complain if we're not within a proper callback function. */ ++ if (!within_reduction) ++ { ++ builtin_error (_("not within a Libcircle \"reduce_init\" or \"reduce_op\" callback function")); ++ return EXECUTION_FAILURE; ++ } ++ ++ /* Reduce the work. */ ++ CIRCLE_reduce (work, strlen (work)); ++ return EXECUTION_SUCCESS; ++} +diff -Naur bash-4.3/builtins/Makefile.in mpibash-4.3/builtins/Makefile.in +--- bash-4.3/builtins/Makefile.in 2012-05-25 07:29:19.000000000 -0600 ++++ mpibash-4.3/builtins/Makefile.in 2014-05-13 11:27:37.314100671 -0600 +@@ -141,7 +141,9 @@ + $(srcdir)/times.def $(srcdir)/trap.def $(srcdir)/type.def \ + $(srcdir)/ulimit.def $(srcdir)/umask.def $(srcdir)/wait.def \ + $(srcdir)/reserved.def $(srcdir)/pushd.def $(srcdir)/shopt.def \ +- $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def ++ $(srcdir)/printf.def $(srcdir)/complete.def $(srcdir)/mapfile.def \ ++ $(srcdir)/mpi.def \ ++@CIRCLE@ $(srcdir)/circle.def + + STATIC_SOURCE = common.c evalstring.c evalfile.c getopt.c bashgetopt.c \ + getopt.h +@@ -153,7 +155,9 @@ + jobs.o kill.o let.o mapfile.o \ + pushd.o read.o return.o set.o setattr.o shift.o source.o \ + suspend.o test.o times.o trap.o type.o ulimit.o umask.o \ +- wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o ++ wait.o getopts.o shopt.o printf.o getopt.o bashgetopt.o complete.o \ ++ mpi.o \ ++@CIRCLE@ circle.o + + CREATED_FILES = builtext.h builtins.c psize.aux pipesize.h tmpbuiltins.c \ + tmpbuiltins.h +@@ -317,6 +321,8 @@ + getopts.o: getopts.def + reserved.o: reserved.def + complete.o: complete.def ++@CIRCLE@ circle.o: circle.def ++mpi.o: mpi.def + + # C files + bashgetopt.o: ../config.h $(topdir)/bashansi.h $(BASHINCDIR)/ansi_stdlib.h +@@ -644,6 +650,19 @@ + mapfile.o: $(topdir)/subst.h $(topdir)/externs.h $(BASHINCDIR)/maxpath.h + mapfile.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/variables.h $(topdir)/conftypes.h + mapfile.o: $(topdir)/arrayfunc.h ../pathnames.h ++@CIRCLE@ circle.o: $(topdir)/command.h ../config.h $(BASHINCDIR)/memalloc.h $(topdir)/error.h ++@CIRCLE@ circle.o: $(topdir)/general.h $(topdir)/xmalloc.h $(topdir)/subst.h $(topdir)/externs.h ++@CIRCLE@ circle.o: $(topdir)/quit.h $(topdir)/dispose_cmd.h $(topdir)/make_cmd.h ++@CIRCLE@ circle.o: $(topdir)/shell.h $(topdir)/syntax.h $(topdir)/unwind_prot.h $(topdir)/variables.h $(topdir)/conftypes.h ++@CIRCLE@ circle.o: $(BASHINCDIR)/maxpath.h ../pathnames.h ++mpi.o: ../config.h ../config-top.h ../config-bot.h ../bashintl.h ++mpi.o: ../include/gettext.h ../shell.h ../config.h ../bashjmp.h ++mpi.o: ../include/posixjmp.h ../command.h ../syntax.h ../general.h ++mpi.o: ../bashtypes.h ../include/chartypes.h ../xmalloc.h ../bashansi.h ++mpi.o: ../error.h ../variables.h ../array.h ../assoc.h ../hashlib.h ++mpi.o: ../conftypes.h ../arrayfunc.h ../quit.h ../sig.h ../include/maxpath.h ++mpi.o: ../unwind_prot.h ../dispose_cmd.h ../make_cmd.h ../include/ocache.h ++mpi.o: ../subst.h ../pathnames.h ../externs.h common.h bashgetopt.h + + #bind.o: $(RL_LIBSRC)chardefs.h $(RL_LIBSRC)readline.h $(RL_LIBSRC)keymaps.h + +diff -Naur bash-4.3/builtins/mpi.def mpibash-4.3/builtins/mpi.def +--- bash-4.3/builtins/mpi.def 1969-12-31 17:00:00.000000000 -0700 ++++ mpibash-4.3/builtins/mpi.def 2014-05-13 11:27:37.314100671 -0600 +@@ -0,0 +1,744 @@ ++This file is mpi.def, from which is created mpi.c. ++It implements all of the "mpi_*" builtins in Bash. ++ ++$PRODUCES mpi.c ++ ++#include ++ ++#include ++#if defined (HAVE_UNISTD_H) ++# ifdef _MINIX ++# include ++# endif ++# include ++#endif ++ ++#include "../bashintl.h" ++#include "../shell.h" ++#include "common.h" ++#include "bashgetopt.h" ++#include ++ ++extern int running_trap, trap_saved_exit_value; ++ ++/* Keep track of who we are within MPI_COMM_WORLD. */ ++static int mpi_rank; ++static int mpi_num_ranks; ++ ++/* Try an MPI operation. Return with an error message on failure. */ ++#define MPI_TRY(STMT) \ ++ do \ ++ { \ ++ int mpierr; \ ++ mpierr = STMT; \ ++ if (mpierr != MPI_SUCCESS) \ ++ return report_mpi_error (mpierr); \ ++ } \ ++ while (0) ++ ++/* Return with a usage message if no arguments remain. */ ++#define YES_ARGS(LIST) \ ++ if ((LIST) == 0) \ ++ { \ ++ builtin_usage (); \ ++ return (EX_USAGE); \ ++ } ++ ++/* Return with an error message if a given variable is read-only or if ++ * we can't write to it for any other reason (e.g., it's defined as a ++ * function). */ ++#define REQUIRE_WRITABLE(NAME) \ ++ do \ ++ { \ ++ SHELL_VAR *bindvar = find_shell_variable (NAME); \ ++ if (bindvar) \ ++ { \ ++ if (readonly_p (bindvar)) \ ++ { \ ++ err_readonly (NAME); \ ++ return (EXECUTION_FAILURE); \ ++ } \ ++ if (unbind_variable (NAME) == -1) \ ++ { \ ++ builtin_error ("Failed to write to variable %s", NAME); \ ++ return (EXECUTION_FAILURE); \ ++ } \ ++ } \ ++ } \ ++ while (0) ++ ++/* Initialize MPI. */ ++void ++initialize_mpi (argc, argv) ++ int argc; ++ char **argv; ++{ ++ int init_done; ++ ++ MPI_Initialized (&init_done); ++ if (!init_done) ++ MPI_Init (&argc, &argv); ++ MPI_Errhandler_set (MPI_COMM_WORLD, MPI_ERRORS_RETURN); ++ MPI_Comm_rank (MPI_COMM_WORLD, &mpi_rank); ++ MPI_Comm_size (MPI_COMM_WORLD, &mpi_num_ranks); ++} ++ ++/* Finalize MPI. */ ++void ++finalize_mpi () ++{ ++ MPI_Finalize (); ++} ++ ++/* Parse an operation name into an MPI_Op. Return 1 on success, 0 on ++ * failure. */ ++static int ++parse_operation (char *name, MPI_Op *op) ++{ ++ /* Define a mapping from operator names to MPI_Op values. */ ++ typedef struct { ++ char *name; /* Operation name (e.g., "sum") */ ++ MPI_Op value; /* Operation value (e.g., MPI_SUM) */ ++ } opname2value_t; ++ static opname2value_t oplist[] = { ++ {"max", MPI_MAX}, ++ {"min", MPI_MIN}, ++ {"sum", MPI_SUM}, ++ {"prod", MPI_PROD}, ++ {"land", MPI_LAND}, ++ {"band", MPI_BAND}, ++ {"lor", MPI_LOR}, ++ {"bor", MPI_BOR}, ++ {"lxor", MPI_LXOR}, ++ {"bxor", MPI_BXOR}, ++ {"maxloc", MPI_MAXLOC}, ++ {"minloc", MPI_MINLOC} ++ }; ++ size_t i; ++ ++ for (i = 0; i < sizeof(oplist)/sizeof(opname2value_t); i++) ++ if (!strcmp(name, oplist[i].name)) ++ { ++ *op = oplist[i].value; ++ if (i > 0) ++ { ++ /* As a performance optimization, bubble up the value we ++ * just found. */ ++ opname2value_t prev = oplist[i - 1]; ++ oplist[i - 1] = oplist[i]; ++ oplist[i] = prev; ++ } ++ return 1; ++ } ++ return 0; ++} ++ ++/* Report an error to the user and return EXECUTION_FAILURE. */ ++static int ++report_mpi_error (mpierr) ++ int mpierr; ++{ ++ char errstr[MPI_MAX_ERROR_STRING]; ++ int errstrlen; ++ ++ MPI_Error_string (mpierr, errstr, &errstrlen); ++ builtin_error ("%s", errstr); ++ return EXECUTION_FAILURE; ++} ++ ++/* Perform the same operation as bind_variable, but with VALUE being a ++ * number, not a string. */ ++static SHELL_VAR * ++bind_variable_number (name, value, flags) ++ const char *name; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_variable (name, numstr, flags); ++} ++ ++/* Perform the same operation as bind_array_variable, but with VALUE ++ * being a number, not a string. */ ++static SHELL_VAR * ++bind_array_variable_number (name, ind, value, flags) ++ char *name; ++ arrayind_t ind; ++ long value; ++ int flags; ++{ ++ char numstr[25]; /* String version of VALUE */ ++ ++ sprintf (numstr, "%ld", value); ++ return bind_array_variable (name, ind, numstr, flags); ++} ++ ++/* Define a reduction-type function (allreduce, scan, exscan, etc.). */ ++typedef int (*reduction_func_t)(void *, void *, int, MPI_Datatype, MPI_Op, MPI_Comm); ++ ++/* Perform any reduction-type operation (allreduce, scan, exscan, etc.). */ ++static int ++reduction_like (list, funcname, func) ++ WORD_LIST *list; ++ char *funcname; ++ reduction_func_t func; ++{ ++ char *word; /* One argument */ ++ struct { ++ long int value; /* Reduced value */ ++ int rank; /* Rank associated with the above */ ++ } number, result; ++ MPI_Op operation = MPI_SUM; /* Operation to perform */ ++ char *varname; /* Name of the variable to bind the results to */ ++ intmax_t n; ++ int i; ++ ++ /* Parse "-O OPERATION" (optional), where OPERATION is a reduction ++ * operation. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (ISOPTION (word, 'O')) ++ { ++ list = list->next; ++ if (list == 0) ++ { ++ sh_needarg (funcname); ++ return (EX_USAGE); ++ } ++ word = list->word->word; ++ if (!parse_operation (word, &operation)) ++ { ++ sh_invalidopt ("-O"); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ } ++ ++ /* Parse the argument, which must be a number. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &n)) ++ { ++ sh_neednumarg (funcname); ++ return (EX_USAGE); ++ } ++ number.value = (long int) n; ++ number.rank = mpi_rank; ++ list = list->next; ++ ++ /* Parse the target variable, which must not be read-only. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Perform the reduction operation. Bind the given array variable ++ * to the result and, for minloc/maxloc, the associated rank. */ ++ if (mpi_rank != 0 || func != MPI_Exscan) { ++ bind_array_variable (varname, 0, "", 0); ++ bind_array_variable (varname, 1, "", 0); ++ } ++ if (operation == MPI_MINLOC || operation == MPI_MAXLOC) ++ { ++ MPI_TRY (func (&number, &result, 1, MPI_LONG_INT, operation, MPI_COMM_WORLD)); ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ bind_array_variable_number (varname, 1, result.rank, 0); ++ } ++ else ++ MPI_TRY (func (&number.value, &result.value, 1, MPI_LONG, operation, MPI_COMM_WORLD)); ++ if (mpi_rank != 0 || func != MPI_Exscan) ++ bind_array_variable_number (varname, 0, result.value, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_comm_rank ++$FUNCTION mpi_comm_rank_builtin ++$SHORT_DOC mpi_comm_rank name ++Return the process's rank in the MPI job. ++ ++Arguments: ++ NAME Scalar variable in which to receive the rank ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++/*'*/ ++ ++/* Here is the mpi_comm_rank builtin. */ ++int ++mpi_comm_rank_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Name of the variable to bind the results to */ ++ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ bind_variable_number (varname, mpi_rank, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_comm_size ++$FUNCTION mpi_comm_size_builtin ++$SHORT_DOC mpi_comm_size name ++Return the total number of ranks in the MPI job. ++ ++Arguments: ++ NAME Scalar variable in which to receive the number of ranks ++ ++Exit Status: ++Returns 0 unless an invalid option is given. ++$END ++ ++/* Here is the mpi_comm_size builtin. */ ++int ++mpi_comm_size_builtin (list) ++ WORD_LIST *list; ++{ ++ char *varname; /* Name of the variable to bind the results to */ ++ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ bind_variable_number (varname, mpi_num_ranks, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_abort ++$FUNCTION mpi_abort_builtin ++$SHORT_DOC mpi_abort [n] ++Abort all processes in the MPI job and exit the shell. ++ ++Exits not only the caller's shell (with a status of N) but also all ++remote shells that are part of the same MPI job. If N is omitted, the ++exit status is that of the last command executed. ++ ++This command should be used only in extreme circumstances. It is ++better for each process to exit normally on its own. ++$END ++/*'*/ ++ ++/* Here is the mpi_abort builtin. */ ++int ++mpi_abort_builtin (list) ++ WORD_LIST *list; ++{ ++ int exit_value; ++ ++ exit_value = (running_trap == 1 && list == 0) ? trap_saved_exit_value : get_exitstat (list); /* Copied from exit.def */ ++ MPI_TRY (MPI_Abort (MPI_COMM_WORLD, exit_value)); ++ return EXECUTION_FAILURE; ++} ++ ++$BUILTIN mpi_send ++$FUNCTION mpi_send_builtin ++$SHORT_DOC mpi_send [-t tag] rank message ++Send a message to a remote process in the same MPI job. ++ ++Options: ++ -t TAG Send the message using tag TAG (default: 0). TAG must ++ be a nonnegative integer. ++ ++Arguments: ++ RANK Whom to send the message to. RANK must be an integer in ++ the range [0, $(mpi_comm_size)-1]. ++ ++ MESSAGE String to send to rank RANK. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_send builtin. */ ++int ++mpi_send_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ intmax_t target_rank; /* MPI target rank */ ++ char *message; /* Message to send to rank target_rank */ ++ intmax_t tag = 0; /* Message tag to use */ ++ ++ /* Parse "-t TAG" (optional), where TAG is a number or "any". */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (ISOPTION (word, 't')) ++ { ++ list = list->next; ++ if (list == 0) ++ { ++ sh_needarg ("mpi_recv"); ++ return (EX_USAGE); ++ } ++ word = list->word->word; ++ if (!legal_number (word, &tag)) ++ { ++ sh_neednumarg ("-t"); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ } ++ else if (*word == '-') ++ { ++ sh_invalidopt (word); ++ builtin_usage (); ++ return (EX_USAGE); ++ } ++ ++ /* Parse the target rank, which must be a number. */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &target_rank)) ++ { ++ builtin_error (_("mpi_send: numeric rank required")); ++ return (EX_USAGE); ++ } ++ list = list->next; ++ ++ /* Parse the message to send. */ ++ YES_ARGS (list); ++ message = list->word->word; ++ list = list->next; ++ no_args (list); ++ ++ /* Send the message. */ ++ MPI_TRY (MPI_Send (message, strlen(message)+1, MPI_BYTE, (int)target_rank, (int)tag, MPI_COMM_WORLD)); ++ return EXECUTION_SUCCESS; ++} ++ ++ ++$BUILTIN mpi_recv ++$FUNCTION mpi_recv_builtin ++$SHORT_DOC mpi_recv [-t tag] rank name ++Receive a message from a remote process in the same MPI job. ++ ++Options: ++ -t TAG Receive only messages sent using tag TAG (default: 0). ++ TAG must be either a nonnegative integer or the string ++ "any" to receive messages sent using any tag. ++ ++Arguments: ++ RANK Receive only messages sent from sender RANK. RANK ++ must either be in the range [0, $(mpi_comm_size)-1] or ++ be the string "any" to receive messages from any sender. ++ ++ NAME Array variable in which to receive the message, sender ++ rank, and tag. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_recv builtin. */ ++int ++mpi_recv_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ intmax_t source_rank; /* MPI source rank */ ++ char *endptr; /* Used for parsing strings into numbers */ ++ MPI_Status status; /* Status of an MPI operation */ ++ int count; /* Message length in bytes */ ++ intmax_t tag = 0; /* Message tag to use */ ++ char *varname; /* Name of the variable to bind the results to */ ++ static char *message = NULL; /* Message received from MPI */ ++ static size_t alloced = 0; /* Number of bytes allocated for the above */ ++ int opt; /* Parsed option */ ++ ++ /* Parse any options provided. */ ++ reset_internal_getopt (); ++ while ((opt = internal_getopt (list, "t:")) != -1) ++ { ++ switch (opt) ++ { ++ case 't': ++ if (!strcmp (list_optarg, "any")) ++ tag = MPI_ANY_TAG; ++ else if (!legal_number (list_optarg, &tag)) ++ { ++ builtin_error (_("-t: numeric argument or \"any\" required")); ++ return (EX_USAGE); ++ } ++ break; ++ ++ default: ++ sh_invalidopt (word); ++ builtin_usage (); ++ return (EX_USAGE); ++ } ++ } ++ list = loptend; ++ ++ /* Parse the source rank, which must be a number or "any". */ ++ YES_ARGS (list); ++ word = list->word->word; ++ if (!legal_number (word, &source_rank)) ++ { ++ if (!strcmp (word, "any")) ++ source_rank = MPI_ANY_SOURCE; ++ else ++ { ++ builtin_error (_("mpi_recv: numeric rank or \"any\" required")); ++ return (EX_USAGE); ++ } ++ } ++ list = list->next; ++ ++ /* Parse the target variable, which must not be read-only. */ ++ YES_ARGS (list); ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Receive a message. Because we don't know long the message will ++ * be, we first probe to get the length. */ ++ MPI_TRY (MPI_Probe ((int)source_rank, (int)tag, MPI_COMM_WORLD, &status)); ++ MPI_TRY (MPI_Get_count (&status, MPI_BYTE, &count)); ++ if (alloced < count) ++ { ++ message = xrealloc (message, count); ++ alloced = count; ++ } ++ MPI_TRY (MPI_Recv (message, count, MPI_BYTE, status.MPI_SOURCE, status.MPI_TAG, MPI_COMM_WORLD, &status)); ++ bind_array_variable (varname, 0, message, 0); ++ bind_array_variable_number (varname, 1, status.MPI_SOURCE, 0); ++ bind_array_variable_number (varname, 2, status.MPI_TAG, 0); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_barrier ++$FUNCTION mpi_barrier_builtin ++$SHORT_DOC mpi_barrier ++Synchronizes all of the processes in the MPI job. ++ ++No process will return from mpi_barrier until all processes have ++called mpi_barrier. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_barrier builtin. */ ++int ++mpi_barrier_builtin (list) ++ WORD_LIST *list; ++{ ++ no_args (list); ++ MPI_TRY (MPI_Barrier (MPI_COMM_WORLD)); ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_bcast ++$FUNCTION mpi_bcast_builtin ++$SHORT_DOC mpi_bcast [message] name ++Broadcast a message to all processes in the same MPI job. ++ ++Arguments: ++ MESSAGE String to broadcast from one process to all the others. ++ ++ NAME Scalar variable in which to receive the broadcast message. ++ ++Exactly one process in the MPI job must specify a message to ++broadcast. No process will return from mpi_bcast until all processes ++have called mpi_bcast. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_bcast builtin. */ ++int ++mpi_bcast_builtin (list) ++ WORD_LIST *list; ++{ ++ char *word; /* One argument */ ++ int root; /* MPI root rank */ ++ char *root_message; /* Message to broadcast */ ++ int msglen; /* Length in bytes of the above (including the NULL byte) */ ++ char *varname; /* Name of the variable to bind the results to */ ++ static int *all_lengths = NULL; /* List of every rank's msglen */ ++ static char *message = NULL; /* Message received from the root */ ++ static int alloced = 0; /* Bytes allocated for the above */ ++ int i; ++ ++ /* Parse the optional message and target variable, which must not be ++ * read-only. */ ++ YES_ARGS (list); ++ if (list->next == NULL) ++ { ++ /* Non-root */ ++ root_message = NULL; ++ msglen = -1; ++ } ++ else ++ { ++ /* Root */ ++ root_message = list->word->word; ++ msglen = (int) strlen(root_message) + 1; ++ list = list->next; ++ } ++ varname = list->word->word; ++ REQUIRE_WRITABLE (varname); ++ list = list->next; ++ no_args (list); ++ ++ /* Acquire global agreement on the root and the message size. */ ++ if (all_lengths == NULL) ++ all_lengths = xmalloc (mpi_num_ranks*sizeof(int)); ++ MPI_TRY (MPI_Allgather (&msglen, 1, MPI_INT, all_lengths, 1, MPI_INT, MPI_COMM_WORLD)); ++ root = -1; ++ for (i = 0; i < mpi_num_ranks; i++) ++ { ++ if (all_lengths[i] == -1) ++ continue; ++ if (root != -1) ++ { ++ builtin_error (_("mpi_bcast: more than one process specified a message")); ++ return (EXECUTION_FAILURE); ++ } ++ root = i; ++ msglen = all_lengths[i]; ++ } ++ if (root == -1) ++ { ++ builtin_error (_("mpi_bcast: no process specified a message")); ++ return (EXECUTION_FAILURE); ++ } ++ ++ /* Broadcast the message. */ ++ if (mpi_rank == root) ++ { ++ MPI_TRY (MPI_Bcast (root_message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); ++ bind_variable (varname, root_message, 0); ++ } ++ else ++ { ++ if (alloced < msglen) ++ { ++ message = xrealloc (message, msglen); ++ alloced = msglen; ++ } ++ MPI_TRY (MPI_Bcast (message, msglen, MPI_BYTE, root, MPI_COMM_WORLD)); ++ bind_variable (varname, message, 0); ++ } ++ return EXECUTION_SUCCESS; ++} ++ ++$BUILTIN mpi_scan ++$FUNCTION mpi_scan_builtin ++$SHORT_DOC mpi_scan number name ++Perform an inclusive scan across all processes in the same MPI job. ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the scan operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In an inclusive-scan operation, each process i presents a number, ++a[i]. Once all processes in the MPI job have presented their number, ++the command returns a[0] to rank 0, a[0]+a[1] to rank 1, ++a[0]+a[1]+a[2] to rank 2, and so forth. The -O option enables "+" to ++be replaced with other operations. ++ ++Inclusive scans can be useful for assigning a unique index to each ++process in the MPI job. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_scan builtin. */ ++int ++mpi_scan_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_scan", MPI_Scan); ++} ++ ++$BUILTIN mpi_exscan ++$FUNCTION mpi_exscan_builtin ++$SHORT_DOC mpi_exscan number name ++Perform an exclusive scan across all processes in the same MPI job. ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the scan operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In a exclusive-scan operation, each process i presents a number, a[i]. ++Once all processes in the MPI job have presented their number, the ++command assigns a[0] to NAME on rank 1, a[0]+a[1] to NAME on rank 2, ++a[0]+a[1]+a[2] to NAME on rank 3, and so forth. No assignment is ++performed on rank 0. The -O option enables "+" to be replaced with ++other operations. ++ ++Exclusive scans can be useful for assigning a unique index to each ++process in the MPI job. ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_exscan builtin. */ ++int ++mpi_exscan_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_exscan", MPI_Exscan); ++} ++ ++$BUILTIN mpi_allreduce ++$FUNCTION mpi_allreduce_builtin ++$SHORT_DOC mpi_allreduce number name ++Reduce numbers from all processes in an MPI job to a single number. ++ ++Options: ++ ++ -O OPERATION Operation to perform. Must be one of "max", "min", ++ "sum", "prod", "land", "band", "lor", "bor", "lxor", ++ "bxor", "maxloc", or "minloc" (default: "sum"). ++ ++Arguments: ++ NUMBER Integer to use in the allreduce operation. ++ ++ NAME Array variable in which to receive the result and, in ++ the case of maxloc and minloc, the associated rank. ++ ++In an all-reduce operation, each process i presents a number, a[i]. ++Once all processes in the MPI job have presented their number, the ++command returns a[0]+a[1]+...+a[n-1] to all ranks. The -O option ++enables "+" to be replaced with other operations. ++ ++All-reduces can be useful for reaching global agreement (e.g., of a ++termination condition). ++ ++Exit Status: ++Returns 0 unless an invalid option is given or an error occurs. ++$END ++ ++/* Here is the mpi_allreduce builtin. */ ++int ++mpi_allreduce_builtin (list) ++ WORD_LIST *list; ++{ ++ return reduction_like (list, "mpi_allreduce", MPI_Allreduce); ++} +diff -Naur bash-4.3/config.h.in mpibash-4.3/config.h.in +--- bash-4.3/config.h.in 2013-06-29 15:35:33.000000000 -0600 ++++ mpibash-4.3/config.h.in 2014-05-13 11:27:37.314100671 -0600 +@@ -1147,6 +1147,12 @@ + /* Define if you have the `__argz_stringify' function. */ + #undef HAVE___ARGZ_STRINGIFY + ++/* Define if you have both the header file and the libcircle library. */ ++#undef HAVE_LIBCIRCLE ++ ++/* Define if you have the `CIRCLE_cb_reduce_op' function. */ ++#undef HAVE_CIRCLE_CB_REDUCE_OP ++ + /* End additions for lib/intl */ + + #include "config-bot.h" +diff -Naur bash-4.3/configure.ac mpibash-4.3/configure.ac +--- bash-4.3/configure.ac 2014-02-11 08:37:53.000000000 -0700 ++++ mpibash-4.3/configure.ac 2014-05-13 11:27:37.302100179 -0600 +@@ -24,7 +24,7 @@ + AC_REVISION([for Bash 4.3, version 4.063])dnl + + define(bashvers, 4.3) +-define(relstatus, release) ++define(relstatus, MPI) + + AC_INIT([bash], bashvers-relstatus, [bug-bash@gnu.org]) + +@@ -813,6 +813,21 @@ + fi + ]) + ++dnl Ensure that we can find an MPI library. ++AC_CHECK_FUNCS([MPI_Init], [], [ ++ AC_MSG_ERROR([Cannot continue without MPI. Consider specifying CC=mpicc.])]) ++ ++dnl If we have Libcircle, use it, too. ++AC_SEARCH_LIBS([CIRCLE_cb_create], [circle], [AC_CHECK_HEADERS([libcircle.h])]) ++if test "x$ac_cv_header_libcircle_h" = xyes; then ++ libcircle_make_prefix="" ++ AC_DEFINE([HAVE_LIBCIRCLE], [1], [Define if you have the Libcircle header and library.]) ++ AC_CHECK_FUNCS([CIRCLE_cb_reduce_op]) ++else ++ libcircle_make_prefix="#" ++fi ++AC_SUBST([CIRCLE], [$libcircle_make_prefix]) ++ + BASH_CHECK_DECL(strtoimax) + BASH_CHECK_DECL(strtol) + BASH_CHECK_DECL(strtoll) +diff -Naur bash-4.3/Makefile.in mpibash-4.3/Makefile.in +--- bash-4.3/Makefile.in 2014-01-25 14:27:30.000000000 -0700 ++++ mpibash-4.3/Makefile.in 2014-05-13 11:27:37.314100671 -0600 +@@ -104,7 +104,7 @@ + VERSPROG = bashversion$(EXEEXT) + VERSOBJ = bashversion.$(OBJEXT) + +-Program = bash$(EXEEXT) ++Program = mpibash$(EXEEXT) + Version = @BASHVERS@ + PatchLevel = `$(BUILD_DIR)/$(VERSPROG) -p` + RELSTATUS = @RELSTATUS@ +diff -Naur bash-4.3/shell.c mpibash-4.3/shell.c +--- bash-4.3/shell.c 2014-01-14 06:04:32.000000000 -0700 ++++ mpibash-4.3/shell.c 2014-05-13 11:27:37.314100671 -0600 +@@ -107,6 +107,13 @@ + extern char *primary_prompt, *secondary_prompt; + extern char *this_command_name; + ++extern void initialize_mpi __P((int, char **)); ++extern void finalize_mpi __P((void)); ++#ifdef HAVE_LIBCIRCLE ++extern void initialize_libcircle __P((int, char **)); ++extern void finalize_libcircle __P((void)); ++#endif ++ + /* Non-zero means that this shell has already been run; i.e. you should + call shell_reinitialize () if you need to start afresh. */ + int shell_initialized = 0; +@@ -324,7 +331,7 @@ + static void init_interactive_script __P((void)); + + static void set_shell_name __P((char *)); +-static void shell_initialize __P((void)); ++static void shell_initialize __P((int, char **)); + static void shell_reinitialize __P((void)); + + static void show_shell_usage __P((FILE *, int)); +@@ -561,7 +568,7 @@ + + /* From here on in, the shell must be a normal functioning shell. + Variables from the environment are expected to be set, etc. */ +- shell_initialize (); ++ shell_initialize (argc, argv); + + set_default_lang (); + set_default_locale_vars (); +@@ -941,6 +948,12 @@ + end_job_control (); + #endif /* JOB_CONTROL */ + ++#ifdef HAVE_LIBCIRCLE ++ finalize_libcircle (); ++#else ++ finalize_mpi (); ++#endif ++ + /* Always return the exit status of the last command to our parent. */ + sh_exit (s); + } +@@ -1691,7 +1704,9 @@ + /* Do whatever is necessary to initialize the shell. + Put new initializations in here. */ + static void +-shell_initialize () ++shell_initialize (argc, argv) ++ int argc; ++ char **argv; + { + char hostname[256]; + +@@ -1760,6 +1775,17 @@ + initialize_shell_options (privileged_mode||running_setuid); + initialize_bashopts (privileged_mode||running_setuid); + #endif ++ ++ /* Initialize Libcircle and MPI. */ ++#ifdef HAVE_LIBCIRCLE ++ initialize_libcircle (argc, argv); ++ initialize_mpi (argc, argv); ++ bind_variable ("libcircle", "yes", 0); ++#else ++ initialize_mpi (argc, argv); ++ bind_variable ("libcircle", "no", 0); ++#endif ++ bind_variable ("mpibash", "yes", 0); + } + + /* Function called by main () when it appears that the shell has already diff --git a/var/spack/repos/builtin/packages/mpibash/package.py b/var/spack/repos/builtin/packages/mpibash/package.py new file mode 100644 index 0000000000..d0f6dafed6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpibash/package.py @@ -0,0 +1,32 @@ +import os +from spack import * + +class Mpibash(Package): + """Parallel scripting right from the Bourne-Again Shell (Bash)""" + homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html" + + version('4.3', '81348932d5da294953e15d4814c74dd1', + url="http://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz") + + # patch -p1 < ../mpibash-4.3.patch + patch('mpibash-4.3.patch', level=1, when='@4.3') + + # above patch modifies configure.ac + depends_on('autoconf') + + # uses MPI_Exscan which is in MPI-1.2 and later + depends_on('mpi@1.2:') + + depends_on('libcircle') + + def install(self, spec, prefix): + # run autoconf to rebuild configure + autoconf = which('autoconf') + autoconf() + + configure("--prefix=" + prefix, + "CC=mpicc") + + make(parallel=False) + + make("install") diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py new file mode 100644 index 0000000000..d48bf878f6 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -0,0 +1,92 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +import os + +class Mpich(Package): + """MPICH is a high performance and widely portable implementation of + the Message Passing Interface (MPI) standard.""" + homepage = "http://www.mpich.org" + url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" + list_url = "http://www.mpich.org/static/downloads/" + list_depth = 2 + + version('3.1.4', '2ab544607986486562e076b83937bba2') + version('3.1.3', '93cb17f91ac758cbf9174ecb03563778') + version('3.1.2', '7fbf4b81dcb74b07ae85939d1ceee7f1') + version('3.1.1', '40dc408b1e03cc36d80209baaa2d32b7') + version('3.1', '5643dd176499bfb7d25079aaff25f2ec') + version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') + + provides('mpi@:3.0', when='@3:') + provides('mpi@:1.3', when='@1:') + + def setup_dependent_environment(self, module, spec, dep_spec): + """For dependencies, make mpicc's use spack wrapper.""" + os.environ['MPICH_CC'] = 'cc' + os.environ['MPICH_CXX'] = 'c++' + os.environ['MPICH_F77'] = 'f77' + os.environ['MPICH_F90'] = 'f90' + + + def install(self, spec, prefix): + config_args = ["--prefix=" + prefix, + "--enable-shared"] + + # TODO: Spack should make it so that you can't actually find + # these compilers if they're "disabled" for the current + # compiler configuration. + if not self.compiler.f77: + config_args.append("--disable-f77") + + if not self.compiler.fc: + config_args.append("--disable-fc") + + configure(*config_args) + make() + make("install") + + self.filter_compilers() + + + def filter_compilers(self): + """Run after install to make the MPI compilers use the + compilers that Spack built the package with. + + If this isn't done, they'll have CC, CXX, F77, and FC set + to Spack's generic cc, c++, f77, and f90. We want them to + be bound to whatever compiler they were built with. + """ + bin = self.prefix.bin + mpicc = os.path.join(bin, 'mpicc') + mpicxx = os.path.join(bin, 'mpicxx') + mpif77 = os.path.join(bin, 'mpif77') + mpif90 = os.path.join(bin, 'mpif90') + + kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True } + filter_file('CC="cc"', 'CC="%s"' % self.compiler.cc, mpicc, **kwargs) + filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs) + filter_file('F77="f77"', 'F77="%s"' % self.compiler.f77, mpif77, **kwargs) + filter_file('FC="f90"', 'FC="%s"' % self.compiler.fc, mpif90, **kwargs) diff --git a/var/spack/repos/builtin/packages/mpileaks/package.py b/var/spack/repos/builtin/packages/mpileaks/package.py new file mode 100644 index 0000000000..4ef866588c --- /dev/null +++ b/var/spack/repos/builtin/packages/mpileaks/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Mpileaks(Package): + """Tool to detect and report leaked MPI objects like MPI_Requests and MPI_Datatypes.""" + + homepage = "https://github.com/hpc/mpileaks" + url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz" + + version('1.0', '8838c574b39202a57d7c2d68692718aa') + + depends_on("mpi") + depends_on("adept-utils") + depends_on("callpath") + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--with-adept-utils=" + spec['adept-utils'].prefix, + "--with-callpath=" + spec['callpath'].prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py new file mode 100644 index 0000000000..6e9766f275 --- /dev/null +++ b/var/spack/repos/builtin/packages/mrnet/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Mrnet(Package): + """The MRNet Multi-Cast Reduction Network.""" + homepage = "http://paradyn.org/mrnet" + url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz" + + version('4.0.0', 'd00301c078cba57ef68613be32ceea2f') + version('4.1.0', '5a248298b395b329e2371bf25366115c') + + parallel = False + + depends_on("boost") + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix, "--enable-shared") + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/munge/package.py b/var/spack/repos/builtin/packages/munge/package.py new file mode 100644 index 0000000000..c737ca0354 --- /dev/null +++ b/var/spack/repos/builtin/packages/munge/package.py @@ -0,0 +1,20 @@ +from spack import * +import os + +class Munge(Package): + """ MUNGE Uid 'N' Gid Emporium """ + homepage = "https://code.google.com/p/munge/" + url = "https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2" + + version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01', url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2') + + depends_on('openssl') + depends_on('libgcrypt') + + def install(self, spec, prefix): + os.makedirs(os.path.join(prefix, "lib/systemd/system")) + configure("--prefix=%s" % prefix) + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/muster/package.py b/var/spack/repos/builtin/packages/muster/package.py new file mode 100644 index 0000000000..722daf3d7f --- /dev/null +++ b/var/spack/repos/builtin/packages/muster/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Muster(Package): + """The Muster library provides implementations of sequential and + parallel K-Medoids clustering algorithms. It is intended as a + general framework for parallel cluster analysis, particularly + for performance data analysis on systems with very large + numbers of processes. + """ + homepage = "https://github.com/scalability-llnl/muster" + url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz" + + version('1.0.1', 'd709787db7e080447afb6571ac17723c') + version('1.0', '2eec6979a4a36d3a65a792d12969be16') + + depends_on("boost") + depends_on("mpi") + + def install(self, spec, prefix): + cmake(".", *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch b/var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch new file mode 100644 index 0000000000..ff85845cf8 --- /dev/null +++ b/var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch @@ -0,0 +1,11 @@ +--- a/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800 ++++ b/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800 +@@ -8,7 +8,7 @@ + * Copyright (C) 2008 Sun Microsystems, Lustre group + */ + +-#define _XOPEN_SOURCE 600 ++//#define _XOPEN_SOURCE 600 + #include + #include + #include "ad_lustre.h" diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py new file mode 100644 index 0000000000..ca0b1287c1 --- /dev/null +++ b/var/spack/repos/builtin/packages/mvapich2/package.py @@ -0,0 +1,104 @@ +import os +from spack import * + +class Mvapich2(Package): + """mvapich2 is an MPI implmenetation for infiniband networks.""" + homepage = "http://mvapich.cse.ohio-state.edu/" + + version('1.9', '5dc58ed08fd3142c260b70fe297e127c', + url="http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz") + patch('ad_lustre_rwcontig_open_source.patch', when='@1.9') + + version('2.0', '9fbb68a4111a8b6338e476dc657388b4', + url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz') + + provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2 + provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0 + + + def install(self, spec, prefix): + # we'll set different configure flags depending on our environment + configure_args = [] + + # TODO: The MPICH*_FLAGS have a different name for 1.9 + + if '+debug' in spec: + # set configure flags for debug build + configure_args.append("--disable-fast") + configure_args.append("--enable-g=dbg") + configure_args.append("--enable-error-checking=runtime") + configure_args.append("--enable-error-messages=all") + configure_args.append("--enable-nmpi-as-mpi") + + if "%gnu" in spec: + # set variables for GNU compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fno-second-underscore" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fno-second-underscore" + elif "%intel" in spec: + # set variables for Inel compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0" + elif "%pgi" in spec: + # set variables for PGI compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fPIC" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fPIC" + + else: + # set configure flags for normal optimizations + configure_args.append("--enable-fast=all") + configure_args.append("--enable-g=dbg") + configure_args.append("--enable-nmpi-as-mpi") + + if "%gnu" in spec: + # set variables for what compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fno-second-underscore" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fno-second-underscore" + elif "%intel" in spec: + # set variables for Inel compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2" + elif "%pgi" in spec: + # set variables for PGI compilers + os.environ['MPICHLIB_CFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fPIC" + os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fPIC" + + # determine network type by variant + if "+psm" in spec: + # throw this flag on QLogic systems to use PSM + configure_args.append("--with-device=ch3:psm") + else: + # throw this flag on IB systems + configure_args.append("--with-device=ch3:mrail", "--with-rdma=gen2") + + # TODO: shared-memory build + + # TODO: CUDA + + # TODO: other file systems like panasis + + configure( + "--prefix=" + prefix, + "--enable-f77", "--enable-fc", "--enable-cxx", + "--enable-shared", "--enable-sharedlibs=gcc", + "--enable-debuginfo", + "--with-pm=no", "--with-pmi=slurm", + "--enable-romio", "--with-file-system=lustre+nfs+ufs", + "--disable-mpe", "--without-mpe", + "--disable-silent-rules", + *configure_args) + + make() + + make("install") diff --git a/var/spack/repos/builtin/packages/nasm/package.py b/var/spack/repos/builtin/packages/nasm/package.py new file mode 100644 index 0000000000..933b6a62c5 --- /dev/null +++ b/var/spack/repos/builtin/packages/nasm/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Nasm(Package): + """NASM (Netwide Assembler) is an 80x86 assembler designed for + portability and modularity. It includes a disassembler as well.""" + homepage = "http://www.nasm.us" + url = "http://www.nasm.us/pub/nasm/releasebuilds/2.11.06/nasm-2.11.06.tar.xz" + + version('2.11.06', '2b958e9f5d200641e6fc9564977aecc5') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ncdu/package.py b/var/spack/repos/builtin/packages/ncdu/package.py new file mode 100644 index 0000000000..234f9730d6 --- /dev/null +++ b/var/spack/repos/builtin/packages/ncdu/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Ncdu(Package): + """ + Ncdu is a disk usage analyzer with an ncurses interface. It is designed + to find space hogs on a remote server where you don't have an entire + gaphical setup available, but it is a useful tool even on regular desktop + systems. Ncdu aims to be fast, simple and easy to use, and should be able + to run in any minimal POSIX-like environment with ncurses installed. + """ + + homepage = "http://dev.yorhel.nl/ncdu" + url = "http://dev.yorhel.nl/download/ncdu-1.11.tar.gz" + + version('1.11', '9e44240a5356b029f05f0e70a63c4d12') + version('1.10', '7535decc8d54eca811493e82d4bfab2d') + version('1.9' , '93258079db897d28bb8890e2db89b1fb') + version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5') + version('1.7' , '172047c29d232724cc62e773e82e592a') + + depends_on("ncurses") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--with-ncurses=%s' % spec['ncurses']) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py new file mode 100644 index 0000000000..cc180bbae1 --- /dev/null +++ b/var/spack/repos/builtin/packages/ncurses/package.py @@ -0,0 +1,33 @@ +from spack import * + +class Ncurses(Package): + """The ncurses (new curses) library is a free software emulation of curses + in System V Release 4.0, and more. It uses terminfo format, supports pads and + color and multiple highlights and forms characters and function-key mapping, + and has all the other SYSV-curses enhancements over BSD curses. + """ + + homepage = "http://invisible-island.net/ncurses/ncurses.html" + + version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1', + url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz') + version('6.0', 'ee13d052e1ead260d7c28071f46eefb1', + url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-shared", + "--enable-widec", + "--disable-pc-files", + "--without-ada") + make() + make("install") + + configure("--prefix=%s" % prefix, + "--with-shared", + "--disable-widec", + "--disable-pc-files", + "--without-ada") + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch b/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch new file mode 100644 index 0000000000..46dda5fc9d --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch @@ -0,0 +1,25 @@ +diff -Nur netcdf-4.3.3/CMakeLists.txt netcdf-4.3.3.mpi/CMakeLists.txt +--- netcdf-4.3.3/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 ++++ netcdf-4.3.3.mpi/CMakeLists.txt 2015-10-14 16:44:41.176300658 -0400 +@@ -753,6 +753,7 @@ + SET(USE_PARALLEL OFF CACHE BOOL "") + MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.") + ELSE() ++ FIND_PACKAGE(MPI REQUIRED) + SET(USE_PARALLEL ON CACHE BOOL "") + SET(STATUS_PARALLEL "ON") + ENDIF() +diff -Nur netcdf-4.3.3/liblib/CMakeLists.txt netcdf-4.3.3.mpi/liblib/CMakeLists.txt +--- netcdf-4.3.3/liblib/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500 ++++ netcdf-4.3.3.mpi/liblib/CMakeLists.txt 2015-10-14 16:44:57.757793634 -0400 +@@ -71,6 +71,10 @@ + SET(TLL_LIBS ${TLL_LIBS} ${CURL_LIBRARY}) + ENDIF() + ++IF(USE_PARALLEL) ++ SET(TLL_LIBS ${TLL_LIBS} ${MPI_C_LIBRARIES}) ++ENDIF() ++ + IF(USE_HDF4) + SET(TLL_LIBS ${TLL_LIBS} ${HDF4_LIBRARIES}) + ENDIF() diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py new file mode 100644 index 0000000000..e1e0d836c6 --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Netcdf(Package): + """NetCDF is a set of software libraries and self-describing, machine-independent + data formats that support the creation, access, and sharing of array-oriented + scientific data.""" + + homepage = "http://www.unidata.ucar.edu/software/netcdf/" + url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz" + + version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae') + + patch('netcdf-4.3.3-mpi.patch') + + # Dependencies: + # >HDF5 + depends_on("hdf5") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', + "-DCMAKE_INSTALL_PREFIX:PATH=%s" % prefix, + "-DENABLE_DAP:BOOL=OFF", # Disable DAP. + "-DBUILD_SHARED_LIBS:BOOL=OFF") # Don't build shared libraries (use static libs). + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netgauge/package.py b/var/spack/repos/builtin/packages/netgauge/package.py new file mode 100644 index 0000000000..c2378b0718 --- /dev/null +++ b/var/spack/repos/builtin/packages/netgauge/package.py @@ -0,0 +1,43 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Netgauge(Package): + """Netgauge is a high-precision network parameter measurement + tool. It supports benchmarking of many different network protocols + and communication patterns. The main focus lies on accuracy, + statistical analysis and easy extensibility. + """ + homepage = "http://unixer.de/research/netgauge/" + url = "http://unixer.de/research/netgauge/netgauge-2.4.6.tar.gz" + + version('2.4.6', 'e0e040ec6452e93ca21ccc54deac1d7f') + + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/netlib-blas/package.py b/var/spack/repos/builtin/packages/netlib-blas/package.py new file mode 100644 index 0000000000..85e97323d3 --- /dev/null +++ b/var/spack/repos/builtin/packages/netlib-blas/package.py @@ -0,0 +1,46 @@ +from spack import * +import os + + +class NetlibBlas(Package): + """Netlib reference BLAS""" + homepage = "http://www.netlib.org/lapack/" + url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" + + version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') + + variant('fpic', default=False, description="Build with -fpic compiler option") + + # virtual dependency + provides('blas') + + # Doesn't always build correctly in parallel + parallel = False + + def patch(self): + os.symlink('make.inc.example', 'make.inc') + + mf = FileFilter('make.inc') + mf.filter('^FORTRAN.*', 'FORTRAN = f90') + mf.filter('^LOADER.*', 'LOADER = f90') + mf.filter('^CC =.*', 'CC = cc') + + if '+fpic' in self.spec: + mf.filter('^OPTS.*=.*', 'OPTS = -O2 -frecursive -fpic') + mf.filter('^CFLAGS =.*', 'CFLAGS = -O3 -fpic') + + + def install(self, spec, prefix): + make('blaslib') + + # Tests that blas builds correctly + make('blas_testing') + + # No install provided + mkdirp(prefix.lib) + install('librefblas.a', prefix.lib) + + # Blas virtual package should provide blas.a and libblas.a + with working_dir(prefix.lib): + symlink('librefblas.a', 'blas.a') + symlink('librefblas.a', 'libblas.a') diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py new file mode 100644 index 0000000000..fb6b99e27c --- /dev/null +++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py @@ -0,0 +1,59 @@ +from spack import * + +class NetlibLapack(Package): + """ + LAPACK version 3.X is a comprehensive FORTRAN library that does + linear algebra operations including matrix inversions, least + squared solutions to linear sets of equations, eigenvector + analysis, singular value decomposition, etc. It is a very + comprehensive and reputable package that has found extensive + use in the scientific community. + """ + homepage = "http://www.netlib.org/lapack/" + url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz" + + version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf') + version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478') + version('3.4.1', '44c3869c38c8335c2b9c2a8bb276eb55') + version('3.4.0', '02d5706ec03ba885fc246e5fa10d8c70') + version('3.3.1', 'd0d533ec9a5b74933c2a1e84eedc58b4') + + variant('shared', default=False, description="Build shared library version") + + # virtual dependency + provides('lapack') + + # blas is a virtual dependency. + depends_on('blas') + + depends_on('cmake') + + # Doesn't always build correctly in parallel + parallel = False + + @when('^netlib-blas') + def get_blas_libs(self): + blas = self.spec['netlib-blas'] + return [join_path(blas.prefix.lib, 'blas.a')] + + + @when('^atlas') + def get_blas_libs(self): + blas = self.spec['atlas'] + return [join_path(blas.prefix.lib, l) + for l in ('libf77blas.a', 'libatlas.a')] + + + def install(self, spec, prefix): + blas_libs = ";".join(self.get_blas_libs()) + cmake_args = [".", '-DBLAS_LIBRARIES=' + blas_libs] + + if '+shared' in spec: + cmake_args.append('-DBUILD_SHARED_LIBS=ON') + + cmake_args += std_cmake_args + + cmake(*cmake_args) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py new file mode 100644 index 0000000000..cd600b0b87 --- /dev/null +++ b/var/spack/repos/builtin/packages/nettle/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Nettle(Package): + """The Nettle package contains the low-level cryptographic library + that is designed to fit easily in many contexts.""" + + homepage = "http://www.example.com" + url = "http://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz" + + version('2.7', '2caa1bd667c35db71becb93c5d89737f') + + depends_on('gmp') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ompss/package.py b/var/spack/repos/builtin/packages/ompss/package.py new file mode 100644 index 0000000000..e09e0a624f --- /dev/null +++ b/var/spack/repos/builtin/packages/ompss/package.py @@ -0,0 +1,50 @@ +from spack import * +import os +import glob + +# working config lines for ompss 14.06 : +#./nanox-0.7/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-mcc=/usr/gapps/exmatex/ompss/ --with-hwloc=/usr +#./mcxx-1.99.2/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-nanox=/usr/gapps/exmatex/ompss --enable-ompss --with-mpi=/opt/mvapich2-intel-shmem-1.7 --enable-tl-openmp-profile --enable-tl-openmp-intel + +class Ompss(Package): + """OmpSs is an effort to integrate features from the StarSs + programming model developed by BSC into a single programming + model. In particular, our objective is to extend OpenMP with + new directives to support asynchronous parallelism and + heterogeneity (devices like GPUs). However, it can also be + understood as new directives extending other accelerator based + APIs like CUDA or OpenCL. Our OmpSs environment is built on top + of our Mercurium compiler and Nanos++ runtime system.""" + homepage = "http://pm.bsc.es/" + url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz" + list_url = 'http://pm.bsc.es/ompss-downloads' + + version('14.10', '404d161265748f2f96bb35fd8c7e79ee') + + # all dependencies are optional, really + depends_on("mpi") + #depends_on("openmp") + depends_on("hwloc") + depends_on("extrae") + + def install(self, spec, prefix): + if 'openmpi' in spec: + mpi = spec['openmpi'] + elif 'mpich' in spec: + mpi = spec['mpich'] + elif 'mvapich' in spec: + mpi = spec['mvapich'] + + openmp_options = ["--enable-tl-openmp-profile"] + if spec.satisfies('%intel'): + openmp_options.append( "--enable-tl-openmp-intel" ) + + os.chdir(glob.glob('./nanox-*').pop()) + configure("--prefix=%s" % prefix, "--with-mcc=%s" % prefix, "--with-extrae=%s" % spec['extrae'].prefix, "--with-hwloc=%s" % spec['hwloc'].prefix) + make() + make("install") + + os.chdir(glob.glob('../mcxx-*').pop()) + configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ompt-openmp/package.py b/var/spack/repos/builtin/packages/ompt-openmp/package.py new file mode 100644 index 0000000000..5d380ebd77 --- /dev/null +++ b/var/spack/repos/builtin/packages/ompt-openmp/package.py @@ -0,0 +1,23 @@ +from spack import * + +class OmptOpenmp(Package): + """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang.""" + homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp" + url = "http://github.com/khuck/LLVM-openmp/archive/v0.1-spack.tar.gz" + + version('spack', '35227b2726e377faa433fc841226e036') + + # depends_on("foo") + + def install(self, spec, prefix): + with working_dir("runtime/build", create=True): + + # FIXME: Modify the configure line to suit your build system here. + cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc, + '-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx, + '-DCMAKE_INSTALL_PREFIX=%s' % prefix, + '..', *std_cmake_args) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/opari2/package.py b/var/spack/repos/builtin/packages/opari2/package.py new file mode 100644 index 0000000000..daaee61e3a --- /dev/null +++ b/var/spack/repos/builtin/packages/opari2/package.py @@ -0,0 +1,65 @@ +# FIXME: Add copyright statement here + +from spack import * +from contextlib import closing + +class Opari2(Package): + """OPARI2 is a source-to-source instrumentation tool for OpenMP and + hybrid codes. It surrounds OpenMP directives and runtime library + calls with calls to the POMP2 measurement interface. + OPARI2 will provide you with a new initialization method that allows + for multi-directory and parallel builds as well as the usage of + pre-instrumented libraries. Furthermore, an efficient way of + tracking parent-child relationships was added. Additionally, we + extended OPARI2 to support instrumentation of OpenMP 3.0 + tied tasks. """ + + homepage = "http://www.vi-hps.org/projects/score-p" + url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" + + version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=mpicc +MPICXX=mpicxx +MPIF77=mpif77 +MPIFC=mpif90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with closing(open("platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + with closing(open("platform-mpi-user-provided", "w")) as mpi_file: + mpi_file.write(self.mpi_user_provided) + + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix, + "--with-custom-compilers", + "--enable-shared") + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch b/var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch new file mode 100644 index 0000000000..daa825ccbe --- /dev/null +++ b/var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch @@ -0,0 +1,11 @@ +--- a/ompi/mca/io/romio/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800 ++++ b/ompi/mca/io/romio/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800 +@@ -8,7 +8,7 @@ + * Copyright (C) 2008 Sun Microsystems, Lustre group + */ + +-#define _XOPEN_SOURCE 600 ++//#define _XOPEN_SOURCE 600 + #include + #include + #include "ad_lustre.h" diff --git a/var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch b/var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch new file mode 100644 index 0000000000..f515743c4d --- /dev/null +++ b/var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch @@ -0,0 +1,151 @@ +diff -Nuar openmpi-1.6.5.orig/contrib/platform/llnl/optimized openmpi-1.6.5.llnl/contrib/platform/llnl/optimized +--- openmpi-1.6.5.orig/contrib/platform/llnl/optimized 1969-12-31 16:00:00.000000000 -0800 ++++ openmpi-1.6.5.llnl/contrib/platform/llnl/optimized 2013-08-08 23:47:12.704029000 -0700 +@@ -0,0 +1,29 @@ ++enable_dlopen=no ++enable_mem_debug=no ++enable_mem_profile=no ++enable_debug_symbols=no ++enable_binaries=yes ++enable_heterogeneous=no ++enable_debug=no ++enable_shared=yes ++enable_static=yes ++enable_memchecker=no ++enable_ipv6=no ++enable_mpi_f77=yes ++enable_mpi_f90=yes ++enable_mpi_cxx=yes ++enable_mpi_cxx_seek=yes ++enable_cxx_exceptions=no ++enable_ft_thread=no ++enable_per_user_config_files=no ++enable_mca_no_build=carto,crs,filem,routed-linear,snapc,pml-dr,pml-crcp2,pml-crcpw,pml-v,pml-example,crcp,btl-tcp ++enable_contrib_no_build=libnbc,vt ++with_slurm=yes ++with_pmi=yes ++with_tm=no ++with_openib=yes ++with_psm=yes ++with_devel_headers=yes ++with_io_romio_flags=--with-file-system=ufs+nfs+lustre ++with_memory_manager=ptmalloc2 ++with_valgrind=no +diff -Nuar openmpi-1.6.5.orig/contrib/platform/llnl/optimized.conf openmpi-1.6.5.llnl/contrib/platform/llnl/optimized.conf +--- openmpi-1.6.5.orig/contrib/platform/llnl/optimized.conf 1969-12-31 16:00:00.000000000 -0800 ++++ openmpi-1.6.5.llnl/contrib/platform/llnl/optimized.conf 2013-08-08 23:43:52.907553000 -0700 +@@ -0,0 +1,114 @@ ++# ++# Copyright (c) 2004-2005 The Trustees of Indiana University and Indiana ++# University Research and Technology ++# Corporation. All rights reserved. ++# Copyright (c) 2004-2005 The University of Tennessee and The University ++# of Tennessee Research Foundation. All rights ++# reserved. ++# Copyright (c) 2004-2005 High Performance Computing Center Stuttgart, ++# University of Stuttgart. All rights reserved. ++# Copyright (c) 2004-2005 The Regents of the University of California. ++# All rights reserved. ++# Copyright (c) 2006 Cisco Systems, Inc. All rights reserved. ++# Copyright (c) 2011 Los Alamos National Security, LLC. ++# All rights reserved. ++# $COPYRIGHT$ ++# ++# Additional copyrights may follow ++# ++# $HEADER$ ++# ++ ++# This is the default system-wide MCA parameters defaults file. ++# Specifically, the MCA parameter "mca_param_files" defaults to a ++# value of ++# "$HOME/.openmpi/mca-params.conf:$sysconf/openmpi-mca-params.conf" ++# (this file is the latter of the two). So if the default value of ++# mca_param_files is not changed, this file is used to set system-wide ++# MCA parameters. This file can therefore be used to set system-wide ++# default MCA parameters for all users. Of course, users can override ++# these values if they want, but this file is an excellent location ++# for setting system-specific MCA parameters for those users who don't ++# know / care enough to investigate the proper values for them. ++ ++# Note that this file is only applicable where it is visible (in a ++# filesystem sense). Specifically, MPI processes each read this file ++# during their startup to determine what default values for MCA ++# parameters should be used. mpirun does not bundle up the values in ++# this file from the node where it was run and send them to all nodes; ++# the default value decisions are effectively distributed. Hence, ++# these values are only applicable on nodes that "see" this file. If ++# $sysconf is a directory on a local disk, it is likely that changes ++# to this file will need to be propagated to other nodes. If $sysconf ++# is a directory that is shared via a networked filesystem, changes to ++# this file will be visible to all nodes that share this $sysconf. ++ ++# The format is straightforward: one per line, mca_param_name = ++# rvalue. Quoting is ignored (so if you use quotes or escape ++# characters, they'll be included as part of the value). For example: ++ ++# Disable run-time MPI parameter checking ++# mpi_param_check = 0 ++ ++# Note that the value "~/" will be expanded to the current user's home ++# directory. For example: ++ ++# Change component loading path ++# component_path = /usr/local/lib/openmpi:~/my_openmpi_components ++ ++# See "ompi_info --param all all" for a full listing of Open MPI MCA ++# parameters available and their default values. ++# ++ ++# Basic behavior to smooth startup ++mca_component_show_load_errors = 0 ++orte_abort_timeout = 10 ++opal_set_max_sys_limits = 1 ++orte_report_launch_progress = 1 ++ ++# Define timeout for daemons to report back during launch ++orte_startup_timeout = 10000 ++ ++## Protect the shared file systems ++orte_no_session_dirs = /p,/usr/local,/usr/global,/nfs/tmp1,/nfs/tmp2 ++orte_tmpdir_base = /tmp ++ ++## Require an allocation to run - protects the frontend ++## from inadvertent job executions ++orte_allocation_required = 1 ++ ++## MPI behavior ++## Do NOT specify mpi_leave_pinned so system ++## can figure out for itself whether or not ++## it is supported and usable ++orte_notifier = syslog ++ ++## Add the interface for out-of-band communication ++## and set it up ++oob_tcp_if_include=ib0 ++oob_tcp_peer_retries = 1000 ++oob_tcp_disable_family = IPv6 ++oob_tcp_listen_mode = listen_thread ++oob_tcp_sndbuf = 32768 ++oob_tcp_rcvbuf = 32768 ++ ++## Define the MPI interconnects ++btl = sm,openib,self ++ ++## We are using the PSM MTL by default ++## There can only be one! ++pml = cm ++ ++## Setup OpenIB - just in case ++btl_openib_want_fork_support = 0 ++btl_openib_cpc_include = oob ++btl_openib_receive_queues = S,4096,1024:S,12288,512:S,65536,512 ++ ++## Enable cpu affinity ++opal_paffinity_alone = 1 ++ ++## Setup MPI options ++mpi_show_handle_leaks = 0 ++mpi_warn_on_fork = 1 ++mpi_abort_print_stack = 0 ++ diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py new file mode 100644 index 0000000000..5e429dedf5 --- /dev/null +++ b/var/spack/repos/builtin/packages/openmpi/package.py @@ -0,0 +1,109 @@ +import os + +from spack import * + + +class Openmpi(Package): + """Open MPI is a project combining technologies and resources from + several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI) + in order to build the best MPI library available. A completely + new MPI-2 compliant implementation, Open MPI offers advantages + for system and software vendors, application developers and + computer science researchers. + """ + + homepage = "http://www.open-mpi.org" + + version('1.10.0', '280cf952de68369cebaca886c5ce0304', + url = "http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.0.tar.bz2") + version('1.8.8', '0dab8e602372da1425e9242ae37faf8c', + url = 'http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.8.tar.bz2') + version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475', + url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2") + + patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5") + patch('llnl-platforms.patch', when="@1.6.5") + + provides('mpi@:2.2', when='@1.6.5') # Open MPI 1.6.5 supports MPI-2.2 + provides('mpi@:3.0', when='@1.8.8') # Open MPI 1.8.8 supports MPI-3.0 + provides('mpi@:3.0', when='@1.10.0') # Open MPI 1.10.0 supports MPI-3.0 + + + def setup_dependent_environment(self, module, spec, dep_spec): + """For dependencies, make mpicc's use spack wrapper.""" + os.environ['OMPI_CC'] = 'cc' + os.environ['OMPI_CXX'] = 'c++' + os.environ['OMPI_FC'] = 'f90' + os.environ['OMPI_F77'] = 'f77' + + + def install(self, spec, prefix): + config_args = ["--prefix=%s" % prefix] + + # TODO: use variants for this, e.g. +lanl, +llnl, etc. + # use this for LANL builds, but for LLNL builds, we need: + # "--with-platform=contrib/platform/llnl/optimized" + if self.version == ver("1.6.5") and '+lanl' in spec: + config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas") + + # TODO: Spack should make it so that you can't actually find + # these compilers if they're "disabled" for the current + # compiler configuration. + if not self.compiler.f77 and not self.compiler.fc: + config_args.append("--enable-mpi-fortran=no") + + configure(*config_args) + make() + make("install") + + self.filter_compilers() + + + def filter_compilers(self): + """Run after install to make the MPI compilers use the + compilers that Spack built the package with. + + If this isn't done, they'll have CC, CXX and FC set + to Spack's generic cc, c++ and f90. We want them to + be bound to whatever compiler they were built with. + """ + kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : False } + dir = os.path.join(self.prefix, 'share/openmpi/') + + cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt', + 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt'] + + cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt', + 'ortec++-wrapper-data.txt'] + + fc_wrappers = ['mpifort-vt-wrapper-data.txt', + 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt'] + + for wrapper in cc_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.cc, + os.path.join(dir, wrapper), **kwargs) + + for wrapper in cxx_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.cxx, + os.path.join(dir, wrapper), **kwargs) + + for wrapper in fc_wrappers: + filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, + os.path.join(dir, wrapper), **kwargs) + + # These are symlinks in newer versions, so check that here + f77_wrappers = ['mpif77-vt-wrapper-data.txt', 'mpif77-wrapper-data.txt'] + f90_wrappers = ['mpif90-vt-wrapper-data.txt', 'mpif90-wrapper-data.txt'] + + for wrapper in f77_wrappers: + path = os.path.join(dir, wrapper) + if not os.path.islink(path): + filter_file('compiler=.*', 'compiler=%s' % self.compiler.f77, + path, **kwargs) + for wrapper in f90_wrappers: + path = os.path.join(dir, wrapper) + if not os.path.islink(path): + filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc, + path, **kwargs) + + diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py new file mode 100644 index 0000000000..c5a8aeb9dc --- /dev/null +++ b/var/spack/repos/builtin/packages/openssl/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Openssl(Package): + """The OpenSSL Project is a collaborative effort to develop a + robust, commercial-grade, full-featured, and Open Source + toolkit implementing the Secure Sockets Layer (SSL v2/v3) and + Transport Layer Security (TLS v1) protocols as well as a + full-strength general purpose cryptography library.""" + homepage = "http://www.openssl.org" + url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz" + + version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf') + + depends_on("zlib") + parallel = False + + def install(self, spec, prefix): + config = Executable("./config") + config("--prefix=%s" % prefix, + "--openssldir=%s/etc/openssl" % prefix, + "zlib", + "no-krb5", + "shared") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/otf/package.py b/var/spack/repos/builtin/packages/otf/package.py new file mode 100644 index 0000000000..52893dd265 --- /dev/null +++ b/var/spack/repos/builtin/packages/otf/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Otf(Package): + """To improve scalability for very large and massively parallel + traces the Open Trace Format (OTF) is developed at ZIH as a + successor format to the Vampir Trace Format (VTF3).""" + + homepage = "http://tu-dresden.de/die_tu_dresden/zentrale_einrichtungen/zih/forschung/projekte/otf/index_html/document_view?set_language=en" + url = "http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz" + + version('1.12.5salmon', 'bf260198633277031330e3356dcb4eec') + + depends_on('zlib') + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix, + '--without-vtf3', + '--with-zlib', + '--with-zlibsymbols') + make() + make("install") diff --git a/var/spack/repos/builtin/packages/otf2/package.py b/var/spack/repos/builtin/packages/otf2/package.py new file mode 100644 index 0000000000..fa0a5898b6 --- /dev/null +++ b/var/spack/repos/builtin/packages/otf2/package.py @@ -0,0 +1,74 @@ +# FIXME: Add copyright + +from spack import * +from contextlib import closing +import os + +class Otf2(Package): + """The Open Trace Format 2 is a highly scalable, memory efficient event + trace data format plus support library.""" + + homepage = "http://www.vi-hps.org/score-p" + url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" + + version('1.4', 'a23c42e936eb9209c4e08b61c3cf5092', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz") + version('1.3.1', 'd0ffc4e858455ace4f596f910e68c9f2', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz") + version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8', + url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=cc +MPICXX=c++ +MPIF77=f77 +MPIFC=f90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + @when('@:1.2.1') + def version_specific_args(self): + return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"] + + @when('@1.3:') + def version_specific_args(self): + # TODO: figure out what scorep's build does as of otf2 1.3 + return ["--with-custom-compilers"] + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with closing(open("platform-backend-user-provided", "w")) as backend_file: + backend_file.write(self.backend_user_provided) + with closing(open("platform-frontend-user-provided", "w")) as frontend_file: + frontend_file.write(self.frontend_user_provided) + with closing(open("platform-mpi-user-provided", "w")) as mpi_file: + mpi_file.write(self.mpi_user_provided) + + configure_args=["--prefix=%s" % prefix, + "--enable-shared"] + + configure_args.extend(self.version_specific_args()) + + configure(*configure_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py new file mode 100644 index 0000000000..df43625bf5 --- /dev/null +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Pango(Package): + """Pango is a library for laying out and rendering of text, with + an emphasis on internationalization. It can be used anywhere + that text layout is needed, though most of the work on Pango so + far has been done in the context of the GTK+ widget toolkit.""" + homepage = "http://www.pango.org" + url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz" + + version('1.36.8', '217a9a753006275215fa9fa127760ece') + + depends_on("harfbuzz") + depends_on("cairo") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py new file mode 100644 index 0000000000..596f7114d6 --- /dev/null +++ b/var/spack/repos/builtin/packages/papi/package.py @@ -0,0 +1,35 @@ +from spack import * +import os + +class Papi(Package): + """PAPI provides the tool designer and application engineer with a + consistent interface and methodology for use of the performance + counter hardware found in most major microprocessors. PAPI + enables software engineers to see, in near real time, the + relation between software performance and processor events. In + addition Component PAPI provides access to a collection of + components that expose performance measurement opportunites + across the hardware and software stack.""" + homepage = "http://icl.cs.utk.edu/papi/index.html" + url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.3.0.tar.gz" + + version('5.3.0', '367961dd0ab426e5ae367c2713924ffb') + + def install(self, spec, prefix): + os.chdir("src/") + + configure_args=["--prefix=%s" % prefix] + + # need to force consistency in the use of compilers + if spec.satisfies('%gcc'): + configure_args.append('CC=gcc') + configure_args.append('MPICH_CC=gcc') + if spec.satisfies('%intel'): + configure_args.append('CC=icc') + configure_args.append('MPICH_CC=icc') + + configure(*configure_args) + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/paraver/package.py b/var/spack/repos/builtin/packages/paraver/package.py new file mode 100644 index 0000000000..5f8a153d4c --- /dev/null +++ b/var/spack/repos/builtin/packages/paraver/package.py @@ -0,0 +1,41 @@ +from spack import * +import os + +class Paraver(Package): + """"A very powerful performance visualization and analysis tool + based on traces that can be used to analyse any information that + is expressed on its input trace format. Traces for parallel MPI, + OpenMP and other programs can be genereated with Extrae.""" + homepage = "http://www.bsc.es/computer-sciences/performance-tools/paraver" + url = "http://www.bsc.es/ssl/apps/performanceTools/files/paraver-sources-4.5.3.tar.gz" + + version('4.5.3', '625de9ec0d639acd18d1aaa644b38f72') + + depends_on("boost") + #depends_on("extrae") + depends_on("wx") + depends_on("wxpropgrid") + + def install(self, spec, prefix): + os.chdir("ptools_common_files") + configure("--prefix=%s" % prefix) + make() + make("install") + + os.chdir("../paraver-kernel") + #"--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, "--with-ptools-common-files=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization") + make() + make("install") + + os.chdir("../paraver-toolset") + configure("--prefix=%s" % prefix) + make() + make("install") + + os.chdir("../wxparaver") + #"--with-extrae=%s" % spec['extrae'].prefix, + configure("--prefix=%s" % prefix, "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", "--with-wxdir=%s" % spec['wx'].prefix.bin) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py new file mode 100644 index 0000000000..a0ff812ca2 --- /dev/null +++ b/var/spack/repos/builtin/packages/paraview/package.py @@ -0,0 +1,72 @@ +from spack import * + +class Paraview(Package): + homepage = 'http://www.paraview.org' + url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz' + + version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz') + + variant('python', default=False, description='Enable Python support') + variant('matplotlib', default=False, description='Enable Matplotlib support') + variant('numpy', default=False, description='Enable NumPy support') + + variant('tcl', default=False, description='Enable TCL support') + + variant('mpi', default=False, description='Enable MPI support') + + variant('osmesa', default=False, description='Enable OSMesa support') + variant('qt', default=False, description='Enable Qt support') + + depends_on('python', when='+python') + depends_on('py-numpy', when='+python+numpy') + depends_on('py-matplotlib', when='+python+matplotlib') + depends_on('tcl', when='+tcl') + depends_on('mpi', when='+mpi') + depends_on('qt', when='+qt') + + depends_on('bzip2') + depends_on('freetype') + depends_on('hdf5') # drags in mpi + depends_on('jpeg') + depends_on('libpng') + depends_on('libtiff') + #depends_on('libxml2') # drags in python + depends_on('netcdf') + #depends_on('protobuf') # version mismatches? + #depends_on('sqlite') # external version not supported + depends_on('zlib') + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + def feature_to_bool(feature, on='ON', off='OFF'): + if feature in spec: + return on + return off + + def nfeature_to_bool(feature): + return feature_to_bool(feature, on='OFF', off='ON') + + feature_args = std_cmake_args[:] + feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt')) + feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python')) + feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi')) + feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl')) + feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa')) + feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa')) + feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL')) + + feature_args.extend(std_cmake_args) + + cmake('..', + '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix, + '-DBUILD_TESTING:BOOL=OFF', + '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON', + '-DVTK_USER_SYSTEM_HDF5:BOOL=ON', + '-DVTK_USER_SYSTEM_JPEG:BOOL=ON', + #'-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON', + '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON', + '-DVTK_USER_SYSTEM_TIFF:BOOL=ON', + '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON', + *feature_args) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py new file mode 100644 index 0000000000..d8cd337304 --- /dev/null +++ b/var/spack/repos/builtin/packages/parmetis/package.py @@ -0,0 +1,26 @@ +from spack import * + +class Parmetis(Package): + """ParMETIS is an MPI-based parallel library that implements a + variety of algorithms for partitioning unstructured graphs, + meshes, and for computing fill-reducing orderings of sparse + matrices.""" + homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview" + url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz" + + version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628') + + depends_on('mpi') + + def install(self, spec, prefix): + cmake(".", + '-DGKLIB_PATH=%s/metis/GKlib' % pwd(), + '-DMETIS_PATH=%s/metis' % pwd(), + '-DSHARED=1', + '-DCMAKE_C_COMPILER=mpicc', + '-DCMAKE_CXX_COMPILER=mpicxx', + '-DSHARED=1', + *std_cmake_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/parpack/package.py b/var/spack/repos/builtin/packages/parpack/package.py new file mode 100644 index 0000000000..622aceca04 --- /dev/null +++ b/var/spack/repos/builtin/packages/parpack/package.py @@ -0,0 +1,43 @@ +from spack import * +import os +import shutil + +class Parpack(Package): + """ARPACK is a collection of Fortran77 subroutines designed to solve large + scale eigenvalue problems.""" + + homepage = "http://www.caam.rice.edu/software/ARPACK/download.html" + url = "http://www.caam.rice.edu/software/ARPACK/SRC/parpack96.tar.Z" + + version('96', 'a175f70ff71837a33ff7e4b0b6054f43') + + depends_on('mpi') + depends_on('blas') + depends_on('lapack') + + def patch(self): + # Filter the CJ makefile to make a spack one. + shutil.move('ARMAKES/ARmake.CJ', 'ARmake.inc') + mf = FileFilter('ARmake.inc') + + # Be sure to use Spack F77 wrapper + mf.filter('^FC.*', 'FC = f77') + mf.filter('^FFLAGS.*', 'FFLAGS = -O2 -g') + + # Set up some variables. + mf.filter('^PLAT.*', 'PLAT = ') + mf.filter('^home.*', 'home = %s' % os.getcwd()) + mf.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix) + mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix) + mf.filter('^MAKE.*', 'MAKE = make') + + # build the library in our own prefix. + mf.filter('^ARPACKLIB.*', 'PARPACKLIB = %s/libparpack.a' % os.getcwd()) + + + def install(self, spec, prefix): + with working_dir('PARPACK/SRC/MPI'): + make('all') + + mkdirp(prefix.lib) + install('libparpack.a', prefix.lib) diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py new file mode 100644 index 0000000000..3424048a6c --- /dev/null +++ b/var/spack/repos/builtin/packages/pcre/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Pcre(Package): + """The PCRE package contains Perl Compatible Regular Expression + libraries. These are useful for implementing regular expression + pattern matching using the same syntax and semantics as Perl 5.""" + homepage = "http://www.pcre.org""" + url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2" + + version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py new file mode 100644 index 0000000000..4864e39bf1 --- /dev/null +++ b/var/spack/repos/builtin/packages/petsc/package.py @@ -0,0 +1,40 @@ +from spack import * + +class Petsc(Package): + """PETSc is a suite of data structures and routines for the + scalable (parallel) solution of scientific applications modeled by + partial differential equations.""" + + homepage = "http://www.mcs.anl.gov/petsc/index.html" + url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz" + + version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f') + version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13') + version('3.5.1', 'a557e029711ebf425544e117ffa44d8f') + + depends_on("boost") + depends_on("blas") + depends_on("lapack") + depends_on("hypre") + depends_on("parmetis") + depends_on("metis") + depends_on("hdf5") + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "CC=cc", + "CXX=c++", + "FC=f90", + "--with-blas-lib=%s/libblas.a" % spec['blas'].prefix.lib, + "--with-lapack-lib=%s/liblapack.a" % spec['lapack'].prefix.lib, + "--with-boost-dir=%s" % spec['boost'].prefix, + "--with-hypre-dir=%s" % spec['hypre'].prefix, + "--with-parmetis-dir=%s" % spec['parmetis'].prefix, + "--with-metis-dir=%s" % spec['metis'].prefix, + "--with-hdf5-dir=%s" % spec['hdf5'].prefix, + "--with-shared-libraries=0") + + # PETSc has its own way of doing parallel make. + make('MAKE_NP=%s' % make_jobs, parallel=False) + make("install") diff --git a/var/spack/repos/builtin/packages/pidx/package.py b/var/spack/repos/builtin/packages/pidx/package.py new file mode 100644 index 0000000000..81aed62fb1 --- /dev/null +++ b/var/spack/repos/builtin/packages/pidx/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Pidx(Package): + """PIDX Parallel I/O Library. + + PIDX is an efficient parallel I/O library that reads and writes + multiresolution IDX data files. + """ + + homepage = "http://www.cedmav.com/pidx" + + version('1.0', git='https://github.com/sci-visus/PIDX.git', + commit='6afa1cf71d1c41263296dc049c8fabaf73c296da') + + depends_on("mpi") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py new file mode 100644 index 0000000000..895cbdbca5 --- /dev/null +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Pixman(Package): + """The Pixman package contains a library that provides low-level + pixel manipulation features such as image compositing and + trapezoid rasterization.""" + homepage = "http://www.pixman.org" + url = "http://cairographics.org/releases/pixman-0.32.6.tar.gz" + + version('0.32.6', '3a30859719a41bd0f5cccffbfefdd4c2') + + depends_on("libpng") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--disable-gtk") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py new file mode 100644 index 0000000000..9964c6ce34 --- /dev/null +++ b/var/spack/repos/builtin/packages/pkg-config/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PkgConfig(Package): + """pkg-config is a helper tool used when compiling applications and libraries""" + homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/" + url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz" + + version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d') + + parallel = False + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix, "--enable-shared") + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/pmgr_collective/package.py b/var/spack/repos/builtin/packages/pmgr_collective/package.py new file mode 100644 index 0000000000..5d9b02acc3 --- /dev/null +++ b/var/spack/repos/builtin/packages/pmgr_collective/package.py @@ -0,0 +1,37 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class PmgrCollective(Package): + """PMGR_COLLECTIVE provides a scalable network for bootstrapping + MPI jobs.""" + homepage = "http://www.sourceforge.net/projects/pmgrcollective" + url = "http://downloads.sourceforge.net/project/pmgrcollective/pmgrcollective/PMGR_COLLECTIVE-1.0/pmgr_collective-1.0.tgz" + + version('1.0', '0384d008774274cc3fc7b4d810dfd07e') + + def install(self, spec, prefix): + make('PREFIX="' + prefix + '"') + make('PREFIX="' + prefix + '"', "install") diff --git a/var/spack/repos/builtin/packages/postgresql/package.py b/var/spack/repos/builtin/packages/postgresql/package.py new file mode 100644 index 0000000000..46922b7b71 --- /dev/null +++ b/var/spack/repos/builtin/packages/postgresql/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Postgresql(Package): + """PostgreSQL is a powerful, open source object-relational + database system. It has more than 15 years of active + development and a proven architecture that has earned it a + strong reputation for reliability, data integrity, and + correctness.""" + homepage = "http://www.postgresql.org/" + url = "http://ftp.postgresql.org/pub/source/v9.3.4/postgresql-9.3.4.tar.bz2" + + version('9.3.4', 'd0a41f54c377b2d2fab4a003b0dac762') + + depends_on("openssl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-openssl") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ppl/package.py b/var/spack/repos/builtin/packages/ppl/package.py new file mode 100644 index 0000000000..018d5c523d --- /dev/null +++ b/var/spack/repos/builtin/packages/ppl/package.py @@ -0,0 +1,28 @@ +from spack import * + +class Ppl(Package): + """The Parma Polyhedra Library (PPL) provides numerical + abstractions especially targeted at applications in the field of + analysis and verification of complex systems. These abstractions + include convex polyhedra, some special classes of polyhedra shapes + that offer interesting complexity/precision tradeoffs, and grids + which represent regularly spaced points that satisfy a set of + linear congruence relations. The library also supports finite + powersets and products of polyhedra and grids, a mixed integer + linear programming problem solver using an exact-arithmetic + version of the simplex algorithm, a parametric integer programming + solver, and primitives for termination analysis via the automatic + synthesis of linear ranking functions.""" + + homepage = "http://bugseng.com/products/ppl/" + url = "http://bugseng.com/products/ppl/download/ftp/releases/1.1/ppl-1.1.tar.gz" + + version('1.1', '4f2422c0ef3f409707af32108deb30a7') + + depends_on("gmp") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-gmp=%s" % spec['gmp'].prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py new file mode 100644 index 0000000000..34085c7ce9 --- /dev/null +++ b/var/spack/repos/builtin/packages/protobuf/package.py @@ -0,0 +1,16 @@ +import os +from spack import * + +class Protobuf(Package): + """Google's data interchange format.""" + + homepage = "https://developers.google.com/protocol-buffers" + url = "https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.bz2" + + version('2.5.0', 'a72001a9067a4c2c4e0e836d0f92ece4') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("check") + make("install") diff --git a/var/spack/repos/builtin/packages/py-basemap/package.py b/var/spack/repos/builtin/packages/py-basemap/package.py new file mode 100644 index 0000000000..45f1085ba1 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-basemap/package.py @@ -0,0 +1,20 @@ +from spack import * +import os + +class PyBasemap(Package): + """The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python.""" + homepage = "http://matplotlib.org/basemap/" + url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz" + + version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8') + + extends('python') + depends_on('py-setuptools') + depends_on('py-numpy') + depends_on('py-matplotlib') + depends_on('py-pil') + depends_on("geos") + + def install(self, spec, prefix): + env['GEOS_DIR'] = spec['geos'].prefix + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py new file mode 100644 index 0000000000..8ecaf48626 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-biopython/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyBiopython(Package): + """It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics.""" + homepage = "http://biopython.org/wiki/Main_Page" + url = "http://biopython.org/DIST/biopython-1.65.tar.gz" + + version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67') + + extends('python') + depends_on('py-mx') + depends_on('py-numpy') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py new file mode 100644 index 0000000000..a4d37483fe --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cffi/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PyCffi(Package): + """Foreign Function Interface for Python calling C code""" + homepage = "http://cffi.readthedocs.org/en/latest/" + # base https://pypi.python.org/pypi/cffi + url = "https://pypi.python.org/packages/source/c/cffi/cffi-1.1.2.tar.gz#md5=" + + version('1.1.2', 'ca6e6c45b45caa87aee9adc7c796eaea') + + extends('python') + depends_on('py-setuptools') + depends_on('py-pycparser') + depends_on('libffi') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py new file mode 100644 index 0000000000..68eb735ad9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyCython(Package): + """The Cython compiler for writing C extensions for the Python language.""" + homepage = "https://pypi.python.org/pypi/cython" + url = "https://pypi.python.org/packages/source/C/Cython/cython-0.22.tar.gz" + + version('0.21.2', 'd21adb870c75680dc857cd05d41046a4') + version('0.22', '1ae25add4ef7b63ee9b4af697300d6b6') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py new file mode 100644 index 0000000000..0a17f2f2d2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-dateutil/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyDateutil(Package): + """Extensions to the standard Python datetime module.""" + homepage = "https://pypi.python.org/pypi/dateutil" + url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz" + + version('2.4.0', '75714163bb96bedd07685cdb2071b8bc') + version('2.4.2', '4ef68e1c485b09e9f034e10473e5add2') + + extends('python') + depends_on('py-setuptools') + depends_on('py-six') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-epydoc/package.py b/var/spack/repos/builtin/packages/py-epydoc/package.py new file mode 100644 index 0000000000..af05510504 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-epydoc/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyEpydoc(Package): + """Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings.""" + homepage = "https://pypi.python.org/pypi/epydoc" + url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz" + + version('3.0.1', '36407974bd5da2af00bf90ca27feeb44') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-genders/package.py b/var/spack/repos/builtin/packages/py-genders/package.py new file mode 100644 index 0000000000..c49c8fd5b2 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-genders/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyGenders(Package): + """Genders is a static cluster configuration database used for cluster configuration management. It is used by a variety of tools and scripts for management of large clusters.""" + homepage = "https://github.com/chaos/genders" + url = "https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz" + + version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e', url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz') + extends('python') + + def install(self, spec, prefix): + configure("--prefix=%s" %prefix) + make(parallel=False) + make("install") + diff --git a/var/spack/repos/builtin/packages/py-gnuplot/package.py b/var/spack/repos/builtin/packages/py-gnuplot/package.py new file mode 100644 index 0000000000..ede4472c03 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-gnuplot/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyGnuplot(Package): + """Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program.""" + homepage = "http://gnuplot-py.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz" + + version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1') + + extends('python') + depends_on('py-numpy') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py new file mode 100644 index 0000000000..6293da5407 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -0,0 +1,19 @@ +from spack import * +import re + +class PyH5py(Package): + """The h5py package provides both a high- and low-level interface to the HDF5 library from Python.""" + homepage = "https://pypi.python.org/pypi/h5py" + url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz" + + version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758') + version('2.5.0', '6e4301b5ad5da0d51b0a1e5ac19e3b74') + + extends('python', ignore=lambda f: re.match(r'bin/cy*', f)) + depends_on('hdf5') + depends_on('py-numpy') + depends_on('py-cython') + + def install(self, spec, prefix): + python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix) + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py new file mode 100644 index 0000000000..8d0e64a07f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ipython/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyIpython(Package): + """IPython provides a rich toolkit to help you make the most out of using Python interactively.""" + homepage = "https://pypi.python.org/pypi/ipython" + url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz" + + version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf') + version('3.1.0', 'a749d90c16068687b0ec45a27e72ef8f') + + extends('python') + depends_on('py-pygments') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-libxml2/package.py b/var/spack/repos/builtin/packages/py-libxml2/package.py new file mode 100644 index 0000000000..59005428e4 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-libxml2/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyLibxml2(Package): + """A Python wrapper around libxml2.""" + homepage = "https://xmlsoft.org/python.html" + url = "ftp://xmlsoft.org/libxml2/python/libxml2-python-2.6.21.tar.gz" + + version('2.6.21', '229dd2b3d110a77defeeaa73af83f7f3') + + extends('python') + depends_on('libxml2') + depends_on('libxslt') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py new file mode 100644 index 0000000000..8722914d94 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-lockfile/package.py @@ -0,0 +1,23 @@ +from spack import * + +class PyLockfile(Package): + """The lockfile package exports a LockFile class which provides a + simple API for locking files. Unlike the Windows msvcrt.locking + function, the fcntl.lockf and flock functions, and the + deprecated posixfile module, the API is identical across both + Unix (including Linux and Mac) and Windows platforms. The lock + mechanism relies on the atomic nature of the link (on Unix) and + mkdir (on Windows) system calls. An implementation based on + SQLite is also provided, more as a demonstration of the + possibilities it provides than as production-quality code. + """ + homepage = "https://pypi.python.org/pypi/lockfile" + url = "https://pypi.python.org/packages/source/l/lockfile/lockfile-0.10.2.tar.gz" + + version('0.10.2', '1aa6175a6d57f082cd12e7ac6102ab15') + + extends("python") + depends_on("py-setuptools") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py new file mode 100644 index 0000000000..3e91ffd8e5 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mako/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyMako(Package): + """A super-fast templating language that borrows the best + ideas from the existing templating languages.""" + + homepage = "https://pypi.python.org/pypi/mako" + url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz" + + version('1.0.1', '9f0aafd177b039ef67b90ea350497a54') + + depends_on('py-setuptools') + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py new file mode 100644 index 0000000000..e7ce3dfd24 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -0,0 +1,47 @@ +from spack import * +import os + +class PyMatplotlib(Package): + """Python plotting package.""" + homepage = "https://pypi.python.org/pypi/matplotlib" + url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz" + + version('1.4.2', '7d22efb6cce475025733c50487bd8898') + version('1.4.3', '86af2e3e3c61849ac7576a6f5ca44267') + + extends('python', ignore=r'bin/nosetests.*$') + + depends_on('py-pyside') + depends_on('py-ipython') + depends_on('py-pyparsing') + depends_on('py-six') + depends_on('py-dateutil') + depends_on('py-pytz') + depends_on('py-nose') + depends_on('py-numpy') + + depends_on('qt') + depends_on('bzip2') + depends_on('tcl') + depends_on('tk') + depends_on('qhull') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + + if str(self.version) in ['1.4.2', '1.4.3']: + # hack to fix configuration file + config_file = None + for p,d,f in os.walk(prefix.lib): + for file in f: + if file.find('matplotlibrc') != -1: + config_file = join_path(p, 'matplotlibrc') + print config_file + if config_file == None: + raise InstallError('could not find config file') + filter_file(r'backend : pyside', + 'backend : Qt4Agg', + config_file) + filter_file(r'#backend.qt4 : PyQt4', + 'backend.qt4 : PySide', + config_file) diff --git a/var/spack/repos/builtin/packages/py-mock/package.py b/var/spack/repos/builtin/packages/py-mock/package.py new file mode 100644 index 0000000000..3b08428ba0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mock/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PyMock(Package): + """mock is a library for testing in Python. It allows you to replace parts + of your system under test with mock objects and make assertions about how + they have been used.""" + + homepage = "https://github.com/testing-cabal/mock" + url = "https://pypi.python.org/packages/source/m/mock/mock-1.3.0.tar.gz" + + version('1.3.0', '73ee8a4afb3ff4da1b4afa287f39fdeb') + + extends('python') + depends_on('py-setuptools@17.1:') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py new file mode 100644 index 0000000000..8001689a18 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyMpi4py(Package): + """This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings.""" + homepage = "https://pypi.python.org/pypi/mpi4py" + url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz" + + version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c') + extends('python') + depends_on('py-setuptools') + depends_on('mpi') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mx/package.py b/var/spack/repos/builtin/packages/py-mx/package.py new file mode 100644 index 0000000000..717ee0562b --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mx/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyMx(Package): + """The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types.""" + homepage = "http://www.egenix.com/products/python/mxBase/" + url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz" + + version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py new file mode 100644 index 0000000000..e7c6cf0264 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-nose/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PyNose(Package): + """nose extends the test loading and running features of unittest, + making it easier to write, find and run tests.""" + + homepage = "https://pypi.python.org/pypi/nose" + url = "https://pypi.python.org/packages/source/n/nose/nose-1.3.4.tar.gz" + + version('1.3.4', '6ed7169887580ddc9a8e16048d38274d') + version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16') + + extends('python', ignore=r'bin/nosetests.*$') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py new file mode 100644 index 0000000000..efa109a3e9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -0,0 +1,28 @@ +from spack import * + +class PyNumpy(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/numpy" + url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz" + + version('1.9.1', '78842b73560ec378142665e712ae4ad9') + version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645') + + extends('python') + depends_on('py-nose') + depends_on('netlib-blas+fpic') + depends_on('netlib-lapack+shared') + + def patch(self): + filter_file( + "possible_executables = \['(gfortran|g77|ifort|efl)", + "possible_executables = ['fc", + "numpy/distutils/fcompiler/gnu.py", + "numpy/distutils/fcompiler/intel.py") + + def install(self, spec, prefix): + with open('site.cfg', 'w') as f: + f.write('[DEFAULT]\n') + f.write('libraries=lapack,blas\n') + f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix)) + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py new file mode 100644 index 0000000000..5b9997faa9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -0,0 +1,25 @@ +from spack import * +import os + +class PyPandas(Package): + """pandas is a Python package providing fast, flexible, and expressive data structures designed to make working with relational or labeled data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, real world data analysis in Python. Additionally, it has the broader goal of becoming the most powerful and flexible open source data analysis / manipulation tool available in any language.""" + homepage = "http://pandas.pydata.org/" + url = "https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73" + + version('0.16.0', 'bfe311f05dc0c351f8955fbd1e296e73') + version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8') + + extends('python') + depends_on('py-dateutil') + depends_on('py-numpy') + depends_on('py-matplotlib') + depends_on('py-scipy') + depends_on('py-setuptools') + depends_on('py-pytz') + depends_on('libdrm') + depends_on('libpciaccess') + depends_on('llvm') + depends_on('mesa') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py new file mode 100644 index 0000000000..ff5fac84e0 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pexpect/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPexpect(Package): + """Pexpect allows easy control of interactive console applications.""" + homepage = "https://pypi.python.org/pypi/pexpect" + url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz" + + version('3.3', '0de72541d3f1374b795472fed841dce8') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pil/package.py b/var/spack/repos/builtin/packages/py-pil/package.py new file mode 100644 index 0000000000..743b761981 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pil/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyPil(Package): + """The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities.""" + + homepage = "http://www.pythonware.com/products/pil/" + url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz" + + version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pmw/package.py b/var/spack/repos/builtin/packages/py-pmw/package.py new file mode 100644 index 0000000000..56131811e9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pmw/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPmw(Package): + """Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts.""" + homepage = "https://pypi.python.org/pypi/Pmw" + url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz" + + version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pychecker/package.py b/var/spack/repos/builtin/packages/py-pychecker/package.py new file mode 100644 index 0000000000..bda5a746aa --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pychecker/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPychecker(Package): + """""" + homepage = "http://pychecker.sourceforge.net/" + url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz" + + version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pycparser/package.py b/var/spack/repos/builtin/packages/py-pycparser/package.py new file mode 100644 index 0000000000..f2bb679d25 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pycparser/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyPycparser(Package): + """pycparser is a complete parser of the C language, written in pure python""" + homepage = "https://github.com/eliben/pycparser" + url = "https://pypi.python.org/packages/source/p/pycparser/pycparser-2.13.tar.gz" + + version('2.13', 'e4fe1a2d341b22e25da0d22f034ef32f') + + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py new file mode 100644 index 0000000000..d5ad32e624 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPyelftools(Package): + """A pure-Python library for parsing and analyzing ELF files and DWARF debugging information""" + homepage = "https://pypi.python.org/pypi/pyelftools" + url = "https://pypi.python.org/packages/source/p/pyelftools/pyelftools-0.23.tar.gz" + + version('0.23', 'aa7cefa8bd2f63d7b017440c9084f310') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pygments/package.py b/var/spack/repos/builtin/packages/py-pygments/package.py new file mode 100644 index 0000000000..7e07bf6869 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pygments/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyPygments(Package): + """Pygments is a syntax highlighting package written in Python.""" + homepage = "https://pypi.python.org/pypi/pygments" + url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz" + + version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c') + version('2.0.2', '238587a1370d62405edabd0794b3ec4a') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py new file mode 100644 index 0000000000..9579708c29 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pylint/package.py @@ -0,0 +1,17 @@ +from spack import * +import re + +class PyPylint(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/pylint" + url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz" + + version('1.4.1', 'df7c679bdcce5019389038847e4de622') + version('1.4.3', '5924c1c7ca5ca23647812f5971d0ea44') + + extends('python') + depends_on('py-nose') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py new file mode 100644 index 0000000000..af9c76ccd8 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pypar/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyPypar(Package): + """Pypar is an efficient but easy-to-use module that allows programs written in Python to run in parallel on multiple processors and communicate using MPI.""" + homepage = "http://code.google.com/p/pypar/" + url = "https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz" + + version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8', url='https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz') + extends('python') + depends_on('mpi') + + def install(self, spec, prefix): + with working_dir('source'): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py new file mode 100644 index 0000000000..a6e50ad139 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PyPyparsing(Package): + """A Python Parsing Module.""" + homepage = "https://pypi.python.org/pypi/pyparsing" + url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz" + + version('2.0.3', '0fe479be09fc2cf005f753d3acc35939') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyqt/package.py b/var/spack/repos/builtin/packages/py-pyqt/package.py new file mode 100644 index 0000000000..8edca105bb --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyqt/package.py @@ -0,0 +1,24 @@ +from spack import * + +class PyPyqt(Package): + """PyQt is a set of Python v2 and v3 bindings for Digia's Qt + application framework and runs on all platforms supported by Qt + including Windows, MacOS/X and Linux.""" + homepage = "http://www.riverbankcomputing.com/software/pyqt/intro" + url = "http://sourceforge.net/projects/pyqt/files/PyQt4/PyQt-4.11.3/PyQt-x11-gpl-4.11.3.tar.gz" + + version('4.11.3', '997c3e443165a89a559e0d96b061bf70') + + extends('python') + depends_on('py-sip') + + # TODO: allow qt5 when conditional deps are supported. + # TODO: Fix version matching so that @4 works like @:4 + depends_on('qt@:4') + + def install(self, spec, prefix): + python('configure.py', + '--confirm-license', + '--destdir=%s' % site_packages_dir) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/py-pyside/package.py b/var/spack/repos/builtin/packages/py-pyside/package.py new file mode 100644 index 0000000000..bb5da44d02 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyside/package.py @@ -0,0 +1,45 @@ +from spack import * +import os + +class PyPyside(Package): + """array processing for numbers, strings, records, and objects.""" + homepage = "https://pypi.python.org/pypi/pyside" + url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz" + + version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d') + + # TODO: make build dependency + # depends_on("cmake") + + extends('python') + depends_on('py-setuptools') + depends_on('qt@:4') + + def patch(self): + """Undo PySide RPATH handling and add Spack RPATH.""" + # Figure out the special RPATH + pypkg = self.spec['python'].package + rpath = self.rpath + rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide')) + + # Add Spack's standard CMake args to the sub-builds. + # They're called BY setup.py so we have to patch it. + filter_file( + r'OPTION_CMAKE,', + r'OPTION_CMAKE, ' + ( + '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), + 'setup.py') + + # PySide tries to patch ELF files to remove RPATHs + # Disable this and go with the one we set. + filter_file( + r'^\s*rpath_cmd\(pyside_path, srcpath\)', + r'#rpath_cmd(pyside_path, srcpath)', + 'pyside_postinstall.py') + + + def install(self, spec, prefix): + python('setup.py', 'install', + '--prefix=%s' % prefix, + '--jobs=%s' % make_jobs) diff --git a/var/spack/repos/builtin/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py new file mode 100644 index 0000000000..12cbe9101c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py @@ -0,0 +1,26 @@ +from spack import * + +class PyPythonDaemon(Package): + """Library to implement a well-behaved Unix daemon process. + + This library implements the well-behaved daemon specification of + PEP Standard daemon process. + + A well-behaved Unix daemon process is tricky to get right, but the + required steps are much the same for every daemon program. A + DaemonContext instance holds the behaviour and configured process + environment for the program; use the instance as a context manager + to enter a daemon state. + """ + homepage = "https://pypi.python.org/pypi/python-daemon/" + url = "https://pypi.python.org/packages/source/p/python-daemon/python-daemon-2.0.5.tar.gz" + + version('2.0.5', '73e7f49f525c51fa4a995aea4d80de41') + + extends("python") + depends_on("py-setuptools") + depends_on("py-lockfile") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/repos/builtin/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py new file mode 100644 index 0000000000..da6311a784 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pytz/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyPytz(Package): + """World timezone definitions, modern and historical.""" + homepage = "https://pypi.python.org/pypi/pytz" + url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz" + + version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7') + version('2015.4', '417a47b1c432d90333e42084a605d3d8') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py new file mode 100644 index 0000000000..a0b03d03e3 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-rpy2/package.py @@ -0,0 +1,17 @@ +from spack import * + +class PyRpy2(Package): + """rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions.""" + homepage = "https://pypi.python.org/pypi/rpy2" + url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz" + + version('2.5.4', '115a20ac30883f096da2bdfcab55196d') + version('2.5.6', 'a36e758b633ce6aec6a5f450bfee980f') + + extends('python') + depends_on('py-setuptools') + + depends_on('R') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scientificpython/package.py b/var/spack/repos/builtin/packages/py-scientificpython/package.py new file mode 100644 index 0000000000..df2c86caac --- /dev/null +++ b/var/spack/repos/builtin/packages/py-scientificpython/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyScientificpython(Package): + """ScientificPython is a collection of Python modules for + scientific computing. It contains support for geometry, + mathematical functions, statistics, physical units, IO, + visualization, and parallelization.""" + + homepage = "https://sourcesup.renater.fr/projects/scientific-py/" + url = "https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz" + version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py new file mode 100644 index 0000000000..5b078ce901 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PyScikitLearn(Package): + """""" + homepage = "https://pypi.python.org/pypi/scikit-learn" + url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz" + + version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d') + version('0.16.1', '363ddda501e3b6b61726aa40b8dbdb7e') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py new file mode 100644 index 0000000000..3a1124cc15 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -0,0 +1,18 @@ +from spack import * + +class PyScipy(Package): + """Scientific Library for Python.""" + homepage = "https://pypi.python.org/pypi/scipy" + url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz" + + version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a') + version('0.15.1', 'be56cd8e60591d6332aac792a5880110') + + extends('python') + depends_on('py-nose') + depends_on('py-numpy') + depends_on('blas') + depends_on('lapack') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py new file mode 100644 index 0000000000..760ad4d6db --- /dev/null +++ b/var/spack/repos/builtin/packages/py-setuptools/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PySetuptools(Package): + """Easily download, build, install, upgrade, and uninstall Python packages.""" + homepage = "https://pypi.python.org/pypi/setuptools" + url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz" + + version('11.3.1', '01f69212e019a2420c1693fb43593930') + version('16.0', '0ace0b96233516fc5f7c857d086aa3ad') + version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-shiboken/package.py b/var/spack/repos/builtin/packages/py-shiboken/package.py new file mode 100644 index 0000000000..e4bf4ce07e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-shiboken/package.py @@ -0,0 +1,45 @@ +from spack import * +import os + +class PyShiboken(Package): + """Shiboken generates bindings for C++ libraries using CPython source code.""" + homepage = "https://shiboken.readthedocs.org/" + url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz" + + version('1.2.2', '345cfebda221f525842e079a6141e555') + + # TODO: make build dependency + # depends_on("cmake") + + extends('python') + depends_on("py-setuptools") + depends_on("libxml2") + depends_on("qt@:4.8") + + def patch(self): + """Undo Shiboken RPATH handling and add Spack RPATH.""" + # Add Spack's standard CMake args to the sub-builds. + # They're called BY setup.py so we have to patch it. + pypkg = self.spec['python'].package + rpath = self.rpath + rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken')) + + filter_file( + r'OPTION_CMAKE,', + r'OPTION_CMAKE, ' + ( + '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", ' + '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)), + 'setup.py') + + # Shiboken tries to patch ELF files to remove RPATHs + # Disable this and go with the one we set. + filter_file( + r'^\s*rpath_cmd\(shiboken_path, srcpath\)', + r'#rpath_cmd(shiboken_path, srcpath)', + 'shiboken_postinstall.py') + + + def install(self, spec, prefix): + python('setup.py', 'install', + '--prefix=%s' % prefix, + '--jobs=%s' % make_jobs) diff --git a/var/spack/repos/builtin/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py new file mode 100644 index 0000000000..e4a6fb6961 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sip/package.py @@ -0,0 +1,21 @@ +from spack import * +import os + +class PySip(Package): + """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries.""" + homepage = "http://www.riverbankcomputing.com/software/sip/intro" + url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz" + + version('4.16.5', '6d01ea966a53e4c7ae5c5e48c40e49e5') + version('4.16.7', '32abc003980599d33ffd789734de4c36') + + extends('python') + + def install(self, spec, prefix): + python('configure.py', + '--destdir=%s' % site_packages_dir, + '--bindir=%s' % spec.prefix.bin, + '--incdir=%s' % python_include_dir, + '--sipdir=%s' % os.path.join(spec.prefix.share, 'sip')) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/py-six/package.py b/var/spack/repos/builtin/packages/py-six/package.py new file mode 100644 index 0000000000..05c5bd00a9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-six/package.py @@ -0,0 +1,14 @@ +from spack import * + +class PySix(Package): + """Python 2 and 3 compatibility utilities.""" + homepage = "https://pypi.python.org/pypi/six" + url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz" + + version('1.9.0', '476881ef4012262dfc8adc645ee786c4') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py new file mode 100644 index 0000000000..ec2e89a098 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sphinx/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PySphinx(Package): + """Sphinx Documentation Generator.""" + homepage = "http://sphinx-doc.org" + url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.3.1.tar.gz" + + version('1.3.1', '8786a194acf9673464c5455b11fd4332') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py new file mode 100644 index 0000000000..c17e35b95f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-sympy/package.py @@ -0,0 +1,13 @@ +from spack import * + +class PySympy(Package): + """SymPy is a Python library for symbolic mathematics.""" + homepage = "https://pypi.python.org/pypi/sympy" + url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz" + + version('0.7.6', '3d04753974306d8a13830008e17babca') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py new file mode 100644 index 0000000000..037a6fc59f --- /dev/null +++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py @@ -0,0 +1,16 @@ +from spack import * +import shutil + +class PyVirtualenv(Package): + """virtualenv is a tool to create isolated Python environments.""" + homepage = "http://virtualenv.readthedocs.org/projects/virtualenv/" + url = "https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.11.6.tar.gz" + + version('1.11.6', 'f61cdd983d2c4e6aeabb70b1060d6f49') + version('13.0.1', '1ffc011bde6667f0e37ecd976f4934db') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-yapf/package.py b/var/spack/repos/builtin/packages/py-yapf/package.py new file mode 100644 index 0000000000..12ef191515 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-yapf/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyYapf(Package): + """ Yet Another Python Formatter """ + homepage = "https://github.com/google/yapf" + # base https://pypi.python.org/pypi/cffi + url = "https://github.com/google/yapf/archive/v0.2.1.tar.gz" + + version('0.2.1', '348ccf86cf2057872e4451b204fb914c') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py new file mode 100644 index 0000000000..000881a846 --- /dev/null +++ b/var/spack/repos/builtin/packages/python/package.py @@ -0,0 +1,160 @@ +import os +import re +from contextlib import closing +from llnl.util.lang import match_predicate + +from spack import * +import spack + + +class Python(Package): + """The Python programming language.""" + homepage = "http://www.python.org" + url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz" + + extendable = True + + version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') + version('2.7.10', 'c685ef0b8e9f27b5e3db5db12b268ac6') + + depends_on("openssl") + depends_on("bzip2") + depends_on("readline") + depends_on("ncurses") + depends_on("sqlite") + + def install(self, spec, prefix): + # Need this to allow python build to find the Python installation. + env['PYTHONHOME'] = prefix + + # Rest of install is pretty standard. + configure("--prefix=%s" % prefix, + "--with-threads", + "--enable-shared") + make() + make("install") + + + # ======================================================================== + # Set up environment to make install easy for python extensions. + # ======================================================================== + + @property + def python_lib_dir(self): + return os.path.join('lib', 'python%d.%d' % self.version[:2]) + + + @property + def python_include_dir(self): + return os.path.join('include', 'python%d.%d' % self.version[:2]) + + + @property + def site_packages_dir(self): + return os.path.join(self.python_lib_dir, 'site-packages') + + + def setup_dependent_environment(self, module, spec, ext_spec): + """Called before python modules' install() methods. + + In most cases, extensions will only need to have one line:: + + python('setup.py', 'install', '--prefix=%s' % prefix) + """ + # Python extension builds can have a global python executable function + module.python = Executable(join_path(spec.prefix.bin, 'python')) + + # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. + module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) + module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir) + module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir) + + # Make the site packages directory if it does not exist already. + mkdirp(module.site_packages_dir) + + # Set PYTHONPATH to include site-packages dir for the + # extension and any other python extensions it depends on. + python_paths = [] + for d in ext_spec.traverse(): + if d.package.extends(self.spec): + python_paths.append(os.path.join(d.prefix, self.site_packages_dir)) + os.environ['PYTHONPATH'] = ':'.join(python_paths) + + + # ======================================================================== + # Handle specifics of activating and deactivating python modules. + # ======================================================================== + + def python_ignore(self, ext_pkg, args): + """Add some ignore files to activate/deactivate args.""" + ignore_arg = args.get('ignore', lambda f: False) + + # Always ignore easy-install.pth, as it needs to be merged. + patterns = [r'easy-install\.pth$'] + + # Ignore pieces of setuptools installed by other packages. + if ext_pkg.name != 'py-setuptools': + patterns.append(r'/site\.pyc?$') + patterns.append(r'setuptools\.pth') + patterns.append(r'bin/easy_install[^/]*$') + patterns.append(r'setuptools.*egg$') + + return match_predicate(ignore_arg, patterns) + + + def write_easy_install_pth(self, exts): + paths = [] + for ext in sorted(exts.values()): + ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir) + easy_pth = "%s/easy-install.pth" % ext_site_packages + + if not os.path.isfile(easy_pth): + continue + + with closing(open(easy_pth)) as f: + for line in f: + line = line.rstrip() + + # Skip lines matching these criteria + if not line: continue + if re.search(r'^(import|#)', line): continue + if (ext.name != 'py-setuptools' and + re.search(r'setuptools.*egg$', line)): continue + + paths.append(line) + + site_packages = os.path.join(self.prefix, self.site_packages_dir) + main_pth = "%s/easy-install.pth" % site_packages + + if not paths: + if os.path.isfile(main_pth): + os.remove(main_pth) + + else: + with closing(open(main_pth, 'w')) as f: + f.write("import sys; sys.__plen = len(sys.path)\n") + for path in paths: + f.write("%s\n" % path) + f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; " + "p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n") + + + def activate(self, ext_pkg, **args): + ignore=self.python_ignore(ext_pkg, args) + args.update(ignore=ignore) + + super(Python, self).activate(ext_pkg, **args) + + exts = spack.install_layout.extension_map(self.spec) + exts[ext_pkg.name] = ext_pkg.spec + self.write_easy_install_pth(exts) + + + def deactivate(self, ext_pkg, **args): + args.update(ignore=self.python_ignore(ext_pkg, args)) + super(Python, self).deactivate(ext_pkg, **args) + + exts = spack.install_layout.extension_map(self.spec) + if ext_pkg.name in exts: # Make deactivate idempotent. + del exts[ext_pkg.name] + self.write_easy_install_pth(exts) diff --git a/var/spack/repos/builtin/packages/qhull/package.py b/var/spack/repos/builtin/packages/qhull/package.py new file mode 100644 index 0000000000..9da4078a70 --- /dev/null +++ b/var/spack/repos/builtin/packages/qhull/package.py @@ -0,0 +1,27 @@ +from spack import * + +class Qhull(Package): + """Qhull computes the convex hull, Delaunay triangulation, Voronoi + diagram, halfspace intersection about a point, furt hest-site + Delaunay triangulation, and furthest-site Voronoi diagram. The + source code runs in 2-d, 3-d, 4-d, and higher dimensions. Qhull + implements the Quickhull algorithm for computing the convex + hull. It handles roundoff errors from floating point + arithmetic. It computes volumes, surface areas, and + approximations to the convex hull. + + Qhull does not support triangulation of non-convex surfaces, + mesh generation of non-convex objects, medium-sized inputs in + 9-D and higher, alpha shapes, weighted Voronoi diagrams, + Voronoi volumes, or constrained Delaunay triangulations.""" + + homepage = "http://www.qhull.org" + + version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c', + url="http://www.qhull.org/download/qhull-2012.1-src.tgz") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('..', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py new file mode 100644 index 0000000000..0e4abe3b1d --- /dev/null +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -0,0 +1,109 @@ +import os +from spack import * +import os + +class Qt(Package): + """Qt is a comprehensive cross-platform C++ application framework.""" + homepage = "http://qt.io" + list_url = 'http://download.qt-project.org/official_releases/qt/' + list_depth = 2 + + version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6', + url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz') + version('5.3.2', 'febb001129927a70174467ecb508a682', + url='http://download.qt.io/archive/qt/5.3/5.3.2/single/qt-everywhere-opensource-src-5.3.2.tar.gz') + + version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8', + url='http://download.qt.io/archive/qt/5.2/5.2.1/single/qt-everywhere-opensource-src-5.2.1.tar.gz') + version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb', + url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz") + + # Use system openssl for security. + #depends_on("openssl") + + depends_on("glib") + depends_on("gtkplus") + depends_on("libxml2") + depends_on("zlib") + depends_on("dbus") + depends_on("libtiff") + depends_on("libpng") + depends_on("libmng") + depends_on("jpeg") + + # Webkit + # depends_on("gperf") + # depends_on("flex") + # depends_on("bison") + # depends_on("ruby") + # depends_on("icu4c") + + # OpenGL hardware acceleration + depends_on("mesa") + depends_on("libxcb") + + + def setup_dependent_environment(self, module, spec, dep_spec): + """Dependencies of Qt find it using the QTDIR environment variable.""" + os.environ['QTDIR'] = self.prefix + + + def patch(self): + if self.spec.satisfies('@4'): + qmake_conf = 'mkspecs/common/g++-base.conf' + qmake_unix_conf = 'mkspecs/common/g++-unix.conf' + elif self.spec.satisfies('@5'): + qmake_conf = 'qtbase/mkspecs/common/g++-base.conf' + qmake_unix_conf = 'qtbase/mkspecs/common/g++-unix.conf' + else: + return + + # Fix qmake compilers in the default mkspec + filter_file(r'^QMAKE_COMPILER *=.*$', 'QMAKE_COMPILER = cc', qmake_conf) + filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf) + filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf) + filter_file(r'^QMAKE_LFLAGS_NOUNDEF *\+?=.*$', 'QMAKE_LFLAGS_NOUNDEF =', qmake_unix_conf) + + + @property + def common_config_args(self): + return [ + '-prefix', self.prefix, + '-v', + '-opensource', + '-opengl', + "-release", + '-shared', + '-confirm-license', + '-openssl-linked', + '-dbus-linked', + '-optimized-qmake', + '-no-openvg', + '-no-pch', + # NIS is deprecated in more recent glibc + "-no-nis"] + # Don't disable all the database drivers, but should + # really get them into spack at some point. + + + @when('@4') + def configure(self): + configure('-fast', + '-no-webkit', + *self.common_config_args) + + + @when('@5') + def configure(self): + configure('-no-eglfs', + '-no-directfb', + '-qt-xcb', + # If someone wants to get a webkit build working, be my guest! + '-skip', 'qtwebkit', + *self.common_config_args) + + + def install(self, spec, prefix): + self.configure() + make() + make("install") diff --git a/var/spack/repos/builtin/packages/qthreads/package.py b/var/spack/repos/builtin/packages/qthreads/package.py new file mode 100644 index 0000000000..dacdb71524 --- /dev/null +++ b/var/spack/repos/builtin/packages/qthreads/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Qthreads(Package): + """The qthreads API is designed to make using large numbers of + threads convenient and easy, and to allow portable access to + threading constructs used in massively parallel shared memory + environments. The API maps well to both MTA-style threading and + PIM-style threading, and we provide an implementation of this + interface in both a standard SMP context as well as the SST + context. The qthreads API provides access to full/empty-bit + (FEB) semantics, where every word of memory can be marked + either full or empty, and a thread can wait for any word to + attain either state.""" + homepage = "http://www.cs.sandia.gov/qthreads/" + url = "https://qthreads.googlecode.com/files/qthread-1.10.tar.bz2" + + version('1.10', '5af8c8bbe88c2a6d45361643780d1671') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/ravel/package.py b/var/spack/repos/builtin/packages/ravel/package.py new file mode 100644 index 0000000000..01fa941cfe --- /dev/null +++ b/var/spack/repos/builtin/packages/ravel/package.py @@ -0,0 +1,23 @@ +from spack import * + +class Ravel(Package): + """Ravel is a parallel communication trace visualization tool that + orders events according to logical time.""" + + homepage = "https://github.com/scalability-llnl/ravel" + url = 'https://github.com/scalability-llnl/ravel/archive/v1.0.0.tar.gz' + + version('1.0.0', 'b25fece58331c2adfcce76c5036485c2') + + # TODO: make this a build dependency + depends_on('cmake@2.8.9:') + + depends_on('muster@1.0.1:') + depends_on('otf') + depends_on('otf2') + depends_on('qt@5:') + + def install(self, spec, prefix): + cmake('-Wno-dev', *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py new file mode 100644 index 0000000000..1b870e0e7f --- /dev/null +++ b/var/spack/repos/builtin/packages/readline/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Readline(Package): + """The GNU Readline library provides a set of functions for use by + applications that allow users to edit command li nes as they + are typed in. Both Emacs and vi editing modes are + available. The Readline library includes additional functions + to maintain a list of previously-entered command lines, to + recall and perhaps reedit those lines, and perform csh-like + history expansion on previous commands. """ + homepage = "http://cnswww.cns.cwru.edu/php/chet/readline/rltop.html" + url = "ftp://ftp.cwru.edu/pub/bash/readline-6.3.tar.gz" + + version('6.3', '33c8fb279e981274f485fd91da77e94a') + + depends_on("ncurses") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make("SHLIB_LIBS=-lncurses") + make("install") diff --git a/var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch b/var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch new file mode 100644 index 0000000000..ce61ae4e4c --- /dev/null +++ b/var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch @@ -0,0 +1,13 @@ +diff --git a/config/compiler-defs.m4 b/config/compiler-defs.m4 +index d7d85d2..780c8de 100644 +--- a/config/compiler-defs.m4 ++++ b/config/compiler-defs.m4 +@@ -28,7 +28,7 @@ dnl predefined by a specific compiler + # g++|gcc|mpicc|mpic++|mpicxx|mpiCC) + # TOO (2/16/2011): added support for tensilica compilers, assuming they are + # like GCC (they use a GCC front-end) +- g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) ++ cc*|c++*|g++*|gcc*|mpicc|mpic++|mpicxx|mpiCC|xt-xc++|xt-xcc) + BACKEND_GCC_MAJOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f1` + BACKEND_GCC_MINOR=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f2` + BACKEND_GCC_PATCHLEVEL=`echo|$BACKEND_CXX_COMPILER -dumpversion | cut -d\. -f3` diff --git a/var/spack/repos/builtin/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py new file mode 100644 index 0000000000..1d7294acab --- /dev/null +++ b/var/spack/repos/builtin/packages/rose/package.py @@ -0,0 +1,39 @@ +#------------------------------------------------------------------------------ +# Author: Justin Too +#------------------------------------------------------------------------------ + +from spack import * + +class Rose(Package): + """A compiler infrastructure to build source-to-source program + transformation and analysis tools. + (Developed at Lawrence Livermore National Lab)""" + + homepage = "http://rosecompiler.org/" + url = "https://github.com/rose-compiler/edg4x-rose" + + version('master', branch='master', git='https://github.com/rose-compiler/edg4x-rose.git') + + patch('add_spack_compiler_recognition.patch') + + depends_on("autoconf@2.69") + depends_on("automake@1.14") + depends_on("libtool@2.4") + depends_on("boost@1.54.0") + depends_on("jdk@8u25-linux-x64") + + def install(self, spec, prefix): + # Bootstrap with autotools + bash = which('bash') + bash('build') + + # Configure, compile & install + with working_dir('rose-build', create=True): + boost = spec['boost'] + + configure = Executable('../configure') + configure("--prefix=" + prefix, + "--with-boost=" + boost.prefix, + "--disable-boost-version-check") + make("install-core") + diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py new file mode 100644 index 0000000000..6b6242362c --- /dev/null +++ b/var/spack/repos/builtin/packages/ruby/package.py @@ -0,0 +1,41 @@ +from spack import * +import spack +import os + +class Ruby(Package): + """A dynamic, open source programming language with a focus on + simplicity and productivity.""" + + homepage = "https://www.ruby-lang.org/" + url = "http://cache.ruby-lang.org/pub/ruby/2.2/ruby-2.2.0.tar.gz" + + extendable = True + + version('2.2.0', 'cd03b28fd0b555970f5c4fd481700852') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") + + def setup_dependent_environment(self, module, spec, ext_spec): + """Called before ruby modules' install() methods. Sets GEM_HOME + and GEM_PATH to values appropriate for the package being built. + + In most cases, extensions will only need to have one line:: + + gem('install', '.gem') + """ + # Ruby extension builds have global ruby and gem functions + module.ruby = Executable(join_path(spec.prefix.bin, 'ruby')) + module.gem = Executable(join_path(spec.prefix.bin, 'gem')) + + # Set GEM_PATH to include dependent gem directories + ruby_paths = [] + for d in ext_spec.traverse(): + if d.package.extends(self.spec): + ruby_paths.append(d.prefix) + os.environ['GEM_PATH'] = ':'.join(ruby_paths) + # The actual installation path for this gem + os.environ['GEM_HOME'] = ext_spec.prefix diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py new file mode 100644 index 0000000000..72900398d8 --- /dev/null +++ b/var/spack/repos/builtin/packages/samtools/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Samtools(Package): + """SAM Tools provide various utilities for manipulating alignments in the SAM format, + including sorting, merging, indexing and generating + alignments in a per-position format""" + + homepage = "www.htslib.org" + version('1.2','988ec4c3058a6ceda36503eebecd4122',url = "https://github.com/samtools/samtools/releases/download/1.2/samtools-1.2.tar.bz2") + + depends_on("zlib") + depends_on("mpc") + parallel=False + patch("samtools1.2.patch",level=0) + + def install(self, spec, prefix): + make("prefix=%s" % prefix, "install") + diff --git a/var/spack/repos/builtin/packages/samtools/samtools1.2.patch b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch new file mode 100644 index 0000000000..ead3ab4e2c --- /dev/null +++ b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch @@ -0,0 +1,20 @@ +--- Makefile 2015-02-03 08:27:34.000000000 -0800 ++++ Makefile.new 2015-07-21 10:38:27.881406892 -0700 +@@ -26,7 +26,7 @@ + CFLAGS = -g -Wall -O2 + LDFLAGS = + LDLIBS = +-DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=1 ++DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=0 + LOBJS= bam_aux.o bam.o bam_import.o sam.o \ + sam_header.o bam_plbuf.o + AOBJS= bam_index.o bam_plcmd.o sam_view.o \ +@@ -37,7 +37,7 @@ + faidx.o stats.o stats_isize.o bam_flags.o bam_split.o \ + bam_tview.o bam_tview_curses.o bam_tview_html.o bam_lpileup.o + INCLUDES= -I. -I$(HTSDIR) +-LIBCURSES= -lcurses # -lXCurses ++#LIBCURSES= -lcurses # -lXCurses + + prefix = /usr/local + exec_prefix = $(prefix) diff --git a/var/spack/repos/builtin/packages/scalasca/package.py b/var/spack/repos/builtin/packages/scalasca/package.py new file mode 100644 index 0000000000..cf7a40c1f5 --- /dev/null +++ b/var/spack/repos/builtin/packages/scalasca/package.py @@ -0,0 +1,65 @@ +# FIXME: Add copyright + +from spack import * + +class Scalasca(Package): + """Scalasca is a software tool that supports the performance optimization + of parallel programs by measuring and analyzing their runtime behavior. + The analysis identifies potential performance bottlenecks - in + particular those concerning communication and synchronization - and + offers guidance in exploring their causes.""" + + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.scalasca.org" + url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" + + version('2.1', 'bab9c2b021e51e2ba187feec442b96e6', + url = 'http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz' ) + + depends_on("mpi") + depends_on("otf2@1.4") + depends_on("cube@4.2.3") + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=mpicc +MPICXX=mpicxx +MPIF77=mpif77 +MPIFC=mpif90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + def install(self, spec, prefix): + configure_args = ["--prefix=%s" % prefix, + "--with-custom-compilers", + "--with-otf2=%s" % spec['otf2'].prefix.bin, + "--with-cube=%s" % spec['cube'].prefix.bin, + "--enable-shared"] + + configure(*configure_args) + + make() + make("install") + + # FIXME: Modify the configure line to suit your build system here. + configure("--prefix=%s" % prefix) + + # FIXME: Add logic to build and install here + make() + make("install") diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py new file mode 100644 index 0000000000..f013bd1cbb --- /dev/null +++ b/var/spack/repos/builtin/packages/scorep/package.py @@ -0,0 +1,74 @@ +# FIXME: Add copyright statement + +from spack import * + +class Scorep(Package): + """The Score-P measurement infrastructure is a highly scalable and + easy-to-use tool suite for profiling, event tracing, and online + analysis of HPC applications.""" + + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.vi-hps.org/projects/score-p" + url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" + + version('1.3', '9db6f957b7f51fa01377a9537867a55c', + url = 'http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz') + + version('1.2.3', '4978084e7cbd05b94517aa8beaea0817') + + depends_on("mpi") + depends_on("papi") + # depends_on("otf2@1.2:1.2.1") # only Score-P 1.2.x + depends_on("otf2") + depends_on("opari2") + depends_on("cube@4.2:4.2.3") + + backend_user_provided = """\ +CC=cc +CXX=c++ +F77=f77 +FC=f90 +CFLAGS=-fPIC +CXXFLAGS=-fPIC +""" + frontend_user_provided = """\ +CC_FOR_BUILD=cc +CXX_FOR_BUILD=c++ +F77_FOR_BUILD=f70 +FC_FOR_BUILD=f90 +CFLAGS_FOR_BUILD=-fPIC +CXXFLAGS_FOR_BUILD=-fPIC +""" + mpi_user_provided = """\ +MPICC=mpicc +MPICXX=mpicxx +MPIF77=mpif77 +MPIFC=mpif90 +MPI_CFLAGS=-fPIC +MPI_CXXFLAGS=-fPIC +""" + + def install(self, spec, prefix): + # Use a custom compiler configuration, otherwise the score-p + # build system messes with spack's compiler settings. + # Create these three files in the build directory + with open("platform-backend-user-provided", "w") as backend_file: + backend_file.write(self.backend_user_provided) + with open("platform-frontend-user-provided", "w") as frontend_file: + frontend_file.write(self.frontend_user_provided) + with open("platform-mpi-user-provided", "w") as mpi_file: + mpi_file.write(self.mpi_user_provided) + + configure_args = ["--prefix=%s" % prefix, + "--with-custom-compilers", + "--with-otf2=%s" % spec['otf2'].prefix.bin, + "--with-opari2=%s" % spec['opari2'].prefix.bin, + "--with-cube=%s" % spec['cube'].prefix.bin, + "--with-papi-header=%s" % spec['papi'].prefix.include, + "--with-papi-lib=%s" % spec['papi'].prefix.lib, + "--enable-shared"] + + configure(*configure_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py new file mode 100644 index 0000000000..79289ff2ad --- /dev/null +++ b/var/spack/repos/builtin/packages/scotch/package.py @@ -0,0 +1,40 @@ +from spack import * +import glob +import os + +class Scotch(Package): + """Scotch is a software package for graph and mesh/hypergraph + partitioning, graph clustering, and sparse matrix ordering.""" + homepage = "http://www.labri.fr/perso/pelegrin/scotch/" + url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz" + list_url = "http://gforge.inria.fr/frs/?group_id=248" + + version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc') + + depends_on('mpi') + + + def patch(self): + with working_dir('src/Make.inc'): + makefiles = glob.glob('Makefile.inc.x86-64_pc_linux2*') + filter_file(r'^CCS\s*=.*$', 'CCS = cc', *makefiles) + filter_file(r'^CCD\s*=.*$', 'CCD = cc', *makefiles) + + + def install(self, spec, prefix): + # Currently support gcc and icc on x86_64 (maybe others with + # vanilla makefile) + makefile = 'Make.inc/Makefile.inc.x86-64_pc_linux2' + if spec.satisfies('%icc'): + makefile += '.icc' + + with working_dir('src'): + force_symlink(makefile, 'Makefile.inc') + for app in ('scotch', 'ptscotch'): + make(app) + + install_tree('bin', prefix.bin) + install_tree('lib', prefix.lib) + install_tree('include', prefix.include) + install_tree('man/man1', prefix.share_man1) + diff --git a/var/spack/repos/builtin/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py new file mode 100644 index 0000000000..9fb758f072 --- /dev/null +++ b/var/spack/repos/builtin/packages/scr/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Scr(Package): + """SCR caches checkpoint data in storage on the compute nodes of a + Linux cluster to provide a fast, scalable checkpoint/restart + capability for MPI codes""" + + homepage = "https://computation.llnl.gov/project/scr/" + + depends_on("mpi") +# depends_on("dtcmp") + + version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz') + version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz') + + def install(self, spec, prefix): + configure("--prefix=" + prefix, + "--with-scr-config-file=" + prefix + "/etc/scr.conf") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py new file mode 100644 index 0000000000..9eda11df15 --- /dev/null +++ b/var/spack/repos/builtin/packages/silo/package.py @@ -0,0 +1,19 @@ +from spack import * + +class Silo(Package): + """Silo is a library for reading and writing a wide variety of scientific data to binary, disk files.""" + + homepage = "http://wci.llnl.gov/simulation/computer-codes/silo" + url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.8/silo-4.8.tar.gz" + + #version('4.9', 'a83eda4f06761a86726e918fc55e782a') + version('4.8', 'b1cbc0e7ec435eb656dc4b53a23663c9') + + depends_on("hdf5@:1.8.12") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-hdf5=%s" %spec['hdf5'].prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/snappy/package.py b/var/spack/repos/builtin/packages/snappy/package.py new file mode 100644 index 0000000000..c8f9ceef7d --- /dev/null +++ b/var/spack/repos/builtin/packages/snappy/package.py @@ -0,0 +1,15 @@ +import os +from spack import * + +class Snappy(Package): + """A fast compressor/decompressor: https://code.google.com/p/snappy""" + + homepage = "https://code.google.com/p/snappy" + url = "https://github.com/google/snappy/releases/download/1.1.3/snappy-1.1.3.tar.gz" + + version('1.1.3', '7358c82f133dc77798e4c2062a749b73') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/spindle/package.py b/var/spack/repos/builtin/packages/spindle/package.py new file mode 100644 index 0000000000..06a1e14284 --- /dev/null +++ b/var/spack/repos/builtin/packages/spindle/package.py @@ -0,0 +1,44 @@ +############################################################################## +# Copyright (c) 2014, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Spindle(Package): + """Spindle improves the library-loading performance of dynamically + linked HPC applications. Without Spindle large MPI jobs can + overload on a shared file system when loading dynamically + linked libraries, causing site-wide performance problems. + """ + homepage = "https://computation.llnl.gov/project/spindle/" + url = "https://github.com/hpc/Spindle/archive/v0.8.1.tar.gz" + list_url = "https://github.com/hpc/Spindle/releases" + + version('0.8.1', 'f11793a6b9d8df2cd231fccb2857d912') + + depends_on("launchmon") + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py new file mode 100644 index 0000000000..734b0b6cb6 --- /dev/null +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -0,0 +1,40 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Sqlite(Package): + """SQLite3 is an SQL database engine in a C library. Programs that + link the SQLite3 library can have SQL database access without + running a separate RDBMS process. + """ + homepage = "www.sqlite.org" + + version('3.8.5', '0544ef6d7afd8ca797935ccc2685a9ed', + url='http://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz') + + def install(self, spec, prefix): + configure("--prefix=" + prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/stat/configure_mpicxx.patch b/var/spack/repos/builtin/packages/stat/configure_mpicxx.patch new file mode 100644 index 0000000000..e09056d95c --- /dev/null +++ b/var/spack/repos/builtin/packages/stat/configure_mpicxx.patch @@ -0,0 +1,19 @@ +commit 07ab6e565f939c54fff6580fc8463ea61662871a +Author: Gregory L. Lee +Date: Tue May 20 14:53:35 2014 -0700 + + re-boostrap to update configure + +diff --git a/configure b/configure +index 6c4af7d..30901ea 100755 +--- a/configure ++++ b/configure +@@ -15529,7 +15529,7 @@ fi + done + test -n "$MPICC" || MPICC="$CC" + +- for ac_prog in mpig++ mpiicpc mpxlC mpixlC ++ for ac_prog in mpig++ mpiCC mpicxx mpiicpc mpxlC mpixlC + do + # Extract the first word of "$ac_prog", so it can be a program name with args. + set dummy $ac_prog; ac_word=$2 diff --git a/var/spack/repos/builtin/packages/stat/package.py b/var/spack/repos/builtin/packages/stat/package.py new file mode 100644 index 0000000000..5d81e62731 --- /dev/null +++ b/var/spack/repos/builtin/packages/stat/package.py @@ -0,0 +1,40 @@ +from spack import * + +class Stat(Package): + """Library to create, manipulate, and export graphs Graphlib.""" + homepage = "http://paradyn.org/STAT/STAT.html" + url = "https://github.com/lee218llnl/stat/archive/v2.0.0.tar.gz" + + version('2.2.0', '26bd69dd57a15afdd5d0ebdb0b7fb6fc') + version('2.1.0', 'ece26beaf057aa9134d62adcdda1ba91') + version('2.0.0', 'c7494210b0ba26b577171b92838e1a9b') + + variant('dysect', default=False, description="enable DySectAPI") + + depends_on('libelf') + depends_on('libdwarf') + depends_on('dyninst') + depends_on('graphlib') + depends_on('graphviz') + depends_on('launchmon') + depends_on('mrnet') + + patch('configure_mpicxx.patch', when='@2.1.0') + + def install(self, spec, prefix): + configure_args = [ + "--enable-gui", + "--prefix=%s" % prefix, + "--disable-examples", # Examples require MPI: avoid this dependency. + "--with-launchmon=%s" % spec['launchmon'].prefix, + "--with-mrnet=%s" % spec['mrnet'].prefix, + "--with-graphlib=%s" % spec['graphlib'].prefix, + "--with-stackwalker=%s" % spec['dyninst'].prefix, + "--with-libdwarf=%s" % spec['libdwarf'].prefix + ] + if '+dysect' in spec: + configure_args.append('--enable-dysectapi') + configure(*configure_args) + + make(parallel=False) + make("install") diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py new file mode 100644 index 0000000000..8b784c8c3c --- /dev/null +++ b/var/spack/repos/builtin/packages/sundials/package.py @@ -0,0 +1,39 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Sundials(Package): + """SUNDIALS (SUite of Nonlinear and DIfferential/ALgebraic equation Solvers)""" + homepage = "http://computation.llnl.gov/casc/sundials/" + url = "http://computation.llnl.gov/casc/sundials/download/code/sundials-2.5.0.tar.gz" + + version('2.5.0', 'aba8b56eec600de3109cfb967aa3ba0f') + + depends_on("mpi") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py new file mode 100644 index 0000000000..ee536d7063 --- /dev/null +++ b/var/spack/repos/builtin/packages/swig/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2014, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + +class Swig(Package): + """SWIG is an interface compiler that connects programs written in + C and C++ with scripting languages such as Perl, Python, Ruby, + and Tcl. It works by taking the declarations found in C/C++ + header files and using them to generate the wrapper code that + scripting languages need to access the underlying C/C++ + code. In addition, SWIG provides a variety of customization + features that let you tailor the wrapping process to suit your + application.""" + homepage = "http://www.swig.org" + url = "http://prdownloads.sourceforge.net/swig/swig-3.0.2.tar.gz" + + version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41') + + depends_on('pcre') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/task/package.py b/var/spack/repos/builtin/packages/task/package.py new file mode 100644 index 0000000000..07f44cc45b --- /dev/null +++ b/var/spack/repos/builtin/packages/task/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Task(Package): + """Feature-rich console based todo list manager""" + homepage = "http://www.taskwarrior.org" + url = "http://taskwarrior.org/download/task-2.4.4.tar.gz" + + version('2.4.4', '517450c4a23a5842df3e9905b38801b3') + + depends_on("gnutls") + depends_on("libuuid") + # depends_on("gcc@4.8:") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('-DCMAKE_BUILD_TYPE=release', + '..', + *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/taskd/package.py b/var/spack/repos/builtin/packages/taskd/package.py new file mode 100644 index 0000000000..66bc0cb484 --- /dev/null +++ b/var/spack/repos/builtin/packages/taskd/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Taskd(Package): + """TaskWarrior task synchronization daemon""" + # FIXME: add a proper url for your package's homepage here. + homepage = "http://www.taskwarrior.org" + url = "http://taskwarrior.org/download/taskd-1.1.0.tar.gz" + + version('1.1.0', 'ac855828c16f199bdbc45fbc227388d0') + + depends_on("libuuid") + depends_on("gnutls") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake('-DCMAKE_BUILD_TYPE=release', + '..', + *std_cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py new file mode 100644 index 0000000000..048fac80aa --- /dev/null +++ b/var/spack/repos/builtin/packages/tau/package.py @@ -0,0 +1,36 @@ +from spack import * + +import os +from llnl.util.filesystem import join_path + +class Tau(Package): + """A portable profiling and tracing toolkit for performance + analysis of parallel programs written in Fortran, C, C++, UPC, + Java, Python.""" + homepage = "http://www.cs.uoregon.edu/research/tau" + url = "http://www.cs.uoregon.edu/research/paracomp/tau/tauprofile/dist/tau-2.23.1.tar.gz" + + version('2.23.1', '6593b47ae1e7a838e632652f0426fe72') + + def install(self, spec, prefix): + # TAU isn't happy with directories that have '@' in the path. Sigh. + change_sed_delimiter('@', ';', 'configure') + change_sed_delimiter('@', ';', 'utils/FixMakefile') + change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default') + + # After that, it's relatively standard. + configure("-prefix=%s" % prefix) + make("install") + + # Link arch-specific directories into prefix since there is + # only one arch per prefix the way spack installs. + self.link_tau_arch_dirs() + + + def link_tau_arch_dirs(self): + for subdir in os.listdir(self.prefix): + for d in ('bin', 'lib'): + src = join_path(self.prefix, subdir, d) + dest = join_path(self.prefix, d) + if os.path.isdir(src) and not os.path.exists(dest): + os.symlink(join_path(subdir, d), dest) diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py new file mode 100644 index 0000000000..529adf7788 --- /dev/null +++ b/var/spack/repos/builtin/packages/tcl/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Tcl(Package): + """Tcl (Tool Command Language) is a very powerful but easy to + learn dynamic programming language, suitable for a very wide + range of uses, including web and desktop applications, + networking, administration, testing and many more. Open source + and business-friendly, Tcl is a mature yet evolving language + that is truly cross platform, easily deployed and highly + extensible.""" + homepage = "http://www.tcl.tk" + + version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f', + url="http://prdownloads.sourceforge.net/tcl/tcl8.6.3-src.tar.gz") + + depends_on('zlib') + + def install(self, spec, prefix): + with working_dir('unix'): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the_silver_searcher/package.py new file mode 100644 index 0000000000..e4020b6766 --- /dev/null +++ b/var/spack/repos/builtin/packages/the_silver_searcher/package.py @@ -0,0 +1,17 @@ +from spack import * + +class TheSilverSearcher(Package): + """Fast recursive grep alternative""" + homepage = "http://geoff.greer.fm/ag/" + url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.30.0.tar.gz" + + version('0.30.0', '95e2e7859fab1156c835aff7413481db') + + depends_on('pcre') + depends_on('xz') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py new file mode 100644 index 0000000000..0e15052f64 --- /dev/null +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -0,0 +1,44 @@ +from spack import * + +class Thrift(Package): + """The Apache Thrift software framework, for scalable cross-language services + development, combines a software stack with a code generation engine to build + services that work efficiently and seamlessly between C++, Java, Python, PHP, + Ruby, Erlang, Perl, Haskell, C#, Cocoa, JavaScript, Node.js, Smalltalk, OCaml + and Delphi and other languages.""" + + homepage = "http://thrift.apache.org" + url = "http://apache.mirrors.ionfish.org/thrift/0.9.2/thrift-0.9.2.tar.gz" + + version('0.9.2', '89f63cc4d0100912f4a1f8a9dee63678') + + extends("python") + + depends_on("autoconf") + depends_on("automake") + depends_on("bison") + depends_on("boost") + depends_on("flex") + depends_on("jdk") + depends_on("libtool") + depends_on("openssl") + depends_on("python") + + # Compilation fails for most languages, fortunately cpp installs fine + # All other languages (yes, including C) are omitted until someone needs them + def install(self, spec, prefix): + env["PY_PREFIX"] = prefix + env["JAVA_PREFIX"] = prefix + + configure("--prefix=%s" % prefix, + "--with-boost=%s" % spec['boost'].prefix, + "--with-c=no", + "--with-go=no", + "--with-python=yes", + "--with-lua=no", + "--with-php=no", + "--with-qt4=no", + "--enable-tests=no") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py new file mode 100644 index 0000000000..96736f6f95 --- /dev/null +++ b/var/spack/repos/builtin/packages/tk/package.py @@ -0,0 +1,22 @@ +from spack import * + +class Tk(Package): + """Tk is a graphical user interface toolkit that takes developing + desktop applications to a higher level than conventional + approaches. Tk is the standard GUI not only for Tcl, but for + many other dynamic languages, and can produce rich, native + applications that run unchanged across Windows, Mac OS X, Linux + and more.""" + homepage = "http://www.tcl.tk" + url = "http://prdownloads.sourceforge.net/tcl/tk8.6.3-src.tar.gz" + + version('src', '85ca4dbf4dcc19777fd456f6ee5d0221') + + depends_on("tcl") + + def install(self, spec, prefix): + with working_dir('unix'): + configure("--prefix=%s" % prefix, + "--with-tcl=%s" % spec['tcl'].prefix.lib) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py new file mode 100644 index 0000000000..23d36db427 --- /dev/null +++ b/var/spack/repos/builtin/packages/tmux/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Tmux(Package): + """tmux is a terminal multiplexer. What is a terminal multiplexer? It lets + you switch easily between several programs in one terminal, detach them (they + keep running in the background) and reattach them to a different terminal. And + do a lot more. + """ + + homepage = "http://tmux.sourceforge.net" + url = "http://downloads.sourceforge.net/project/tmux/tmux/tmux-1.9/tmux-1.9a.tar.gz" + + version('1.9a', 'b07601711f96f1d260b390513b509a2d') + + depends_on('libevent') + depends_on('ncurses') + + def install(self, spec, prefix): + configure( + "--prefix=%s" % prefix, + "PKG_CONFIG_PATH=%s:%s" % (spec['libevent'].prefix, spec['ncurses'].prefix)) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/tmuxinator/package.py b/var/spack/repos/builtin/packages/tmuxinator/package.py new file mode 100644 index 0000000000..26c061cbd6 --- /dev/null +++ b/var/spack/repos/builtin/packages/tmuxinator/package.py @@ -0,0 +1,17 @@ +from spack import * + +class Tmuxinator(Package): + """A session configuration creator and manager for tmux""" + homepage = "https://github.com/tmuxinator/tmuxinator" + url = "https://github.com/tmuxinator/tmuxinator" + + version('0.6.11', + git='https://github.com/tmuxinator/tmuxinator', + tag='v0.6.11') + + extends('ruby') + + def install(self, spec, prefix): + gem('build', 'tmuxinator.gemspec') + gem('install', 'tmuxinator-{}.gem'.format(self.version)) + diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py new file mode 100644 index 0000000000..7c43f796a4 --- /dev/null +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -0,0 +1,50 @@ +from spack import * + + +class Trilinos(Package): + """ + The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented + software framework for the solution of large-scale, complex multi-physics engineering and scientific problems. + A unique design feature of Trilinos is its focus on packages. + """ + homepage = "https://trilinos.org/" + url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz" + + version('12.2.1', '6161926ea247863c690e927687f83be9') + version('12.0.1', 'bd99741d047471e127b8296b2ec08017') + version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426') + version('11.14.2', 'a43590cf896c677890d75bfe75bc6254') + version('11.14.1', '40febc57f76668be8b6a77b7607bb67f') + + variant('mpi', default=True, description='Add a dependency on MPI and enables MPI dependent packages') + + # Everything should be compiled with -fpic + depends_on('blas') + depends_on('lapack') + depends_on('boost') + depends_on('netcdf') + depends_on('matio') + depends_on('glm') + depends_on('swig') + depends_on('mpi', when='+mpi') + + def install(self, spec, prefix): + + options = [ + '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON', + '-DTrilinos_ENABLE_TESTS:BOOL=OFF', + '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF', + '-DBUILD_SHARED_LIBS:BOOL=ON', + '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix, + '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix + ] + if '+mpi' in spec: + mpi_options = ['-DTPL_ENABLE_MPI:BOOL=ON'] + options.extend(mpi_options) + + # -DCMAKE_INSTALL_PREFIX and all the likes... + options.extend(std_cmake_args) + with working_dir('spack-build', create=True): + cmake('..', *options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/uncrustify/package.py b/var/spack/repos/builtin/packages/uncrustify/package.py new file mode 100644 index 0000000000..d3f2d1b473 --- /dev/null +++ b/var/spack/repos/builtin/packages/uncrustify/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Uncrustify(Package): + """Source Code Beautifier for C, C++, C#, ObjectiveC, D, Java, Pawn and VALA""" + + homepage = "http://uncrustify.sourceforge.net/" + url = "http://downloads.sourceforge.net/project/uncrustify/uncrustify/uncrustify-0.61/uncrustify-0.61.tar.gz" + + version('0.61', 'b6140106e74c64e831d0b1c4b6cf7727') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py new file mode 100644 index 0000000000..cb7ceabf57 --- /dev/null +++ b/var/spack/repos/builtin/packages/util-linux/package.py @@ -0,0 +1,20 @@ +from spack import * +import os + +class UtilLinux(Package): + """Util-linux is a suite of essential utilities for any Linux system.""" + + homepage = "http://freecode.com/projects/util-linux" + url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.25/util-linux-2.25.tar.gz" + + version('2.25', 'f6d7fc6952ec69c4dc62c8d7c59c1d57') + + depends_on("python@2.7:") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib, + "--disable-use-tty-group") + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py new file mode 100644 index 0000000000..4099b3257f --- /dev/null +++ b/var/spack/repos/builtin/packages/vim/package.py @@ -0,0 +1,83 @@ +from spack import * + +class Vim(Package): + """Vim is a highly configurable text editor built to enable efficient text + editing. It is an improved version of the vi editor distributed with most + UNIX systems. Vim is often called a "programmer's editor," and so useful + for programming that many consider it an entire IDE. It's not just for + programmers, though. Vim is perfect for all kinds of text editing, from + composing email to editing configuration files. + """ + + homepage = "http://www.vim.org" + url = "ftp://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2" + list_url = "http://ftp.vim.org/pub/vim/unix/" + + version('7.4', '607e135c559be642f210094ad023dc65') + version('7.3', '5b9510a17074e2b37d8bb38ae09edbf2') + version('7.2', 'f0901284b338e448bfd79ccca0041254') + version('7.1', '44c6b4914f38d6f9aa959640b89da329') + version('7.0', '4ca69757678272f718b1041c810d82d8') + version('6.4', '774c14d93ce58674b3b2c880edd12d77') + version('6.3', '821fda8f14d674346b87e3ef9cb96389') + version('6.2', 'c49d360bbd069d00e2a57804f2a123d9') + version('6.1.405', 'd220ff58f2c72ed606e6d0297c2f2a7c') + version('6.1', '7fd0f915adc7c0dab89772884268b030') + version('6.0', '9d9ca84d489af6b3f54639dd97af3774') + + feature_sets = ('huge', 'big', 'normal', 'small', 'tiny') + for fs in feature_sets: + variant(fs, default=False, description="Use '%s' feature set" % fs) + + variant('python', default=False, description="build with Python") + depends_on('python', when='+python') + + variant('ruby', default=False, description="build with Ruby") + depends_on('ruby', when='+ruby') + + variant('cscope', default=False, description="build with cscope support") + depends_on('cscope', when='+cscope') + + variant('gui', default=False, description="build with gui (gvim)") + # virtual dependency? + + def install(self, spec, prefix): + feature_set = None + for fs in self.feature_sets: + if "+" + fs in spec: + if feature_set is not None: + tty.error("Only one feature set allowed, both %s and %s specified" + % (feature_set, fs)) + feature_set = fs + if '+gui' in spec: + if feature_set is not None: + if feature_set is not 'huge': + tty.error("+gui variant requires 'huge' feature set, %s was specified" + % feature_set) + feature_set = 'huge' + if feature_set is None: + feature_set = 'normal' + + configure_args = [] + configure_args.append("--with-features=" + feature_set) + + if '+python' in spec: + configure_args.append("--enable-pythoninterp=yes") + else: + configure_args.append("--enable-pythoninterp=dynamic") + + if '+ruby' in spec: + configure_args.append("--enable-rubyinterp=yes") + else: + configure_args.append("--enable-rubyinterp=dynamic") + + if '+gui' in spec: + configure_args.append("--enable-gui=auto") + + if '+cscope' in spec: + configure_args.append("--enable-cscope") + + configure("--prefix=%s" % prefix, *configure_args) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py new file mode 100644 index 0000000000..4a27a8fedb --- /dev/null +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -0,0 +1,40 @@ +from spack import * + +class Vtk(Package): + """The Visualization Toolkit (VTK) is an open-source, freely + available software system for 3D computer graphics, image + processing and visualization. """ + homepage = "http://www.vtk.org" + url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz" + + version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d') + + depends_on("qt") + + def install(self, spec, prefix): + with working_dir('spack-build', create=True): + cmake_args = [ + "..", + "-DBUILD_SHARED_LIBS=ON", + # Disable wrappers for other languages. + "-DVTK_WRAP_PYTHON=OFF", + "-DVTK_WRAP_JAVA=OFF", + "-DVTK_WRAP_TCL=OFF"] + cmake_args.extend(std_cmake_args) + + # Enable Qt support here. + cmake_args.extend([ + "-DQT_QMAKE_EXECUTABLE:PATH=%s/qmake" % spec['qt'].prefix.bin, + "-DVTK_Group_Qt:BOOL=ON", + # Ignore webkit because it's hard to build w/Qt + "-DVTK_Group_Qt=OFF", + "-DModule_vtkGUISupportQt:BOOL=ON", + "-DModule_vtkGUISupportQtOpenGL:BOOL=ON" + ]) + + if spec['qt'].satisfies('@5'): + cmake_args.append("-DVTK_QT_VERSION:STRING=5") + + cmake(*cmake_args) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py new file mode 100644 index 0000000000..c8fd025122 --- /dev/null +++ b/var/spack/repos/builtin/packages/wget/package.py @@ -0,0 +1,21 @@ +from spack import * + +class Wget(Package): + """GNU Wget is a free software package for retrieving files using + HTTP, HTTPS and FTP, the most widely-used Internet protocols. It + is a non-interactive commandline tool, so it may easily be called + from scripts, cron jobs, terminals without X-Windows support, + etc.""" + + homepage = "http://www.gnu.org/software/wget/" + url = "http://ftp.gnu.org/gnu/wget/wget-1.16.tar.xz" + + version('1.16', 'fe102975ab3a6c049777883f1bb9ad07') + + depends_on("openssl") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, + "--with-ssl=openssl") + make() + make("install") diff --git a/var/spack/repos/builtin/packages/wx/package.py b/var/spack/repos/builtin/packages/wx/package.py new file mode 100644 index 0000000000..1813a8c8a5 --- /dev/null +++ b/var/spack/repos/builtin/packages/wx/package.py @@ -0,0 +1,24 @@ +from spack import * + +class Wx(Package): + """wxWidgets is a C++ library that lets developers create + applications for Windows, Mac OS X, Linux and other platforms + with a single code base. It has popular language bindings for + Python, Perl, Ruby and many other languages, and unlike other + cross-platform toolkits, wxWidgets gives applications a truly + native look and feel because it uses the platform's native API + rather than emulating the GUI. It's also extensive, free, + open-source and mature.""" + homepage = "http://www.wxwidgets.org/" + + version('2.8.12', '2fa39da14bc06ea86fe902579fedc5b1', + url="https://sourceforge.net/projects/wxwindows/files/2.8.12/wxWidgets-2.8.12.tar.gz") + version('3.0.1', 'dad1f1cd9d4c370cbc22700dc492da31', + url="https://sourceforge.net/projects/wxwindows/files/3.0.1/wxWidgets-3.0.1.tar.bz2") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers") + + make(parallel=False) + make("install") + diff --git a/var/spack/repos/builtin/packages/wxpropgrid/package.py b/var/spack/repos/builtin/packages/wxpropgrid/package.py new file mode 100644 index 0000000000..790cead517 --- /dev/null +++ b/var/spack/repos/builtin/packages/wxpropgrid/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Wxpropgrid(Package): + """wxPropertyGrid is a property sheet control for wxWidgets. In + other words, it is a specialized two-column grid for editing + properties such as strings, numbers, flagsets, string arrays, + and colours.""" + homepage = "http://wxpropgrid.sourceforge.net/" + url = "http://prdownloads.sourceforge.net/wxpropgrid/wxpropgrid-1.4.15-src.tar.gz" + + version('1.4.15', 'f44b5cd6fd60718bacfabbf7994f1e93') + + depends_on("wx") + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wx'].prefix.bin, "--enable-unicode") + + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/xcb-proto/package.py b/var/spack/repos/builtin/packages/xcb-proto/package.py new file mode 100644 index 0000000000..17a94bd892 --- /dev/null +++ b/var/spack/repos/builtin/packages/xcb-proto/package.py @@ -0,0 +1,15 @@ +from spack import * + +class XcbProto(Package): + """Protocol for libxcb""" + + homepage = "http://xcb.freedesktop.org/" + url = "http://xcb.freedesktop.org/dist/xcb-proto-1.11.tar.gz" + + version('1.11', 'c8c6cb72c84f58270f4db1f39607f66a') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py new file mode 100644 index 0000000000..ba6c9733a7 --- /dev/null +++ b/var/spack/repos/builtin/packages/xz/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Xz(Package): + """XZ Utils is free general-purpose data compression software with + high compression ratio. XZ Utils were written for POSIX-like + systems, but also work on some not-so-POSIX systems. XZ Utils are + the successor to LZMA Utils.""" + homepage = "http://tukaani.org/xz/" + url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2" + + version('5.2.0', '867cc8611760240ebf3440bd6e170bb9', + url = 'http://tukaani.org/xz/xz-5.2.0.tar.bz2') + version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af', + url = 'http://tukaani.org/xz/xz-5.2.2.tar.bz2') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") + diff --git a/var/spack/repos/builtin/packages/yasm/package.py b/var/spack/repos/builtin/packages/yasm/package.py new file mode 100644 index 0000000000..d3a695b16d --- /dev/null +++ b/var/spack/repos/builtin/packages/yasm/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Yasm(Package): + """Yasm is a complete rewrite of the NASM-2.11.06 assembler. It + supports the x86 and AMD64 instruction sets, accepts NASM and + GAS assembler syntaxes and outputs binary, ELF32 and ELF64 + object formats.""" + homepage = "http://yasm.tortall.net" + url = "http://www.tortall.net/projects/yasm/releases/yasm-1.3.0.tar.gz" + + version('1.3.0', 'fc9e586751ff789b34b1f21d572d96af') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/repos/builtin/packages/zeromq/package.py b/var/spack/repos/builtin/packages/zeromq/package.py new file mode 100644 index 0000000000..b5a1e3d4cd --- /dev/null +++ b/var/spack/repos/builtin/packages/zeromq/package.py @@ -0,0 +1,20 @@ +from spack import * + +class Zeromq(Package): + """ The ZMQ networking/concurrency library and core API """ + homepage = "http://zguide.zeromq.org/" + url = "http://download.zeromq.org/zeromq-4.1.2.tar.gz" + + version('4.1.2', '159c0c56a895472f02668e692d122685') + version('4.1.1', '0a4b44aa085644f25c177f79dc13f253') + version('4.0.7', '9b46f7e7b0704b83638ef0d461fd59ab') + version('4.0.6', 'd47dd09ed7ae6e7fd6f9a816d7f5fdf6') + version('4.0.5', '73c39f5eb01b9d7eaf74a5d899f1d03d') + + depends_on("libsodium") + + def install(self, spec, prefix): + configure("--with-libsodium","--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py new file mode 100644 index 0000000000..2770f781ac --- /dev/null +++ b/var/spack/repos/builtin/packages/zlib/package.py @@ -0,0 +1,18 @@ +from spack import * + +class Zlib(Package): + """zlib is designed to be a free, general-purpose, legally unencumbered -- + that is, not covered by any patents -- lossless data-compression library for + use on virtually any computer hardware and operating system. + """ + + homepage = "http://zlib.net" + url = "http://zlib.net/zlib-1.2.8.tar.gz" + + version('1.2.8', '44d667c142d7cda120332623eab69f40') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/packages/zsh/package.py b/var/spack/repos/builtin/packages/zsh/package.py new file mode 100644 index 0000000000..99ef9de2e5 --- /dev/null +++ b/var/spack/repos/builtin/packages/zsh/package.py @@ -0,0 +1,16 @@ +from spack import * + +class Zsh(Package): + """ The ZSH shell """ + homepage = "http://www.zsh.org" + url = "http://www.zsh.org/pub/zsh-5.0.8.tar.bz2" + + version('5.0.8', 'e6759e8dd7b714d624feffd0a73ba0fe') + + depends_on("pcre") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/repos/builtin/repo.yaml b/var/spack/repos/builtin/repo.yaml new file mode 100644 index 0000000000..54b282db6b --- /dev/null +++ b/var/spack/repos/builtin/repo.yaml @@ -0,0 +1,2 @@ +repo: + namespace: builtin -- cgit v1.2.3-70-g09d2 From 763d850a0e1766a0b915686504bc07f8f2a92058 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 26 Nov 2015 14:23:12 -0800 Subject: Add tests for importing packages directly. Added test for, e.g.: import spack.pkg.builtin.mock.mpich import spack.pkg.builtin.mock.mpich as mpich from spack.pkg.builtin.mock.mpich import Mpich Among others. These ensure that direct package imports work so that packages can be extended. --- lib/spack/spack/test/packages.py | 42 ++++++++++++++++++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py index e39def2ff2..f023516eba 100644 --- a/lib/spack/spack/test/packages.py +++ b/lib/spack/spack/test/packages.py @@ -46,7 +46,8 @@ class PackagesTest(MockPackagesTest): def test_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('mpich') - self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py')) + self.assertEqual(filename, + join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py')) def test_package_name(self): @@ -57,7 +58,9 @@ class PackagesTest(MockPackagesTest): def test_nonexisting_package_filename(self): repo = Repo(spack.mock_packages_path) filename = repo.filename_for_package_name('some-nonexisting-package') - self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py')) + self.assertEqual( + filename, + join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py')) def test_package_class_names(self): @@ -66,3 +69,38 @@ class PackagesTest(MockPackagesTest): self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective')) self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective')) self.assertEqual('_3db', mod_to_class('3db')) + + + # + # Below tests target direct imports of spack packages from the + # spack.pkg namespace + # + + def test_import_package(self): + import spack.pkg.builtin.mock.mpich + + + def test_import_package_as(self): + import spack.pkg.builtin.mock.mpich as mp + + + def test_import_class_from_package(self): + from spack.pkg.builtin.mock.mpich import Mpich + + + def test_import_module_from_package(self): + from spack.pkg.builtin.mock import mpich + + + def test_import_namespace_container_modules(self): + import spack.pkg + import spack.pkg as p + from spack import pkg + + import spack.pkg.builtin + import spack.pkg.builtin as b + from spack.pkg import builtin + + import spack.pkg.builtin.mock + import spack.pkg.builtin.mock as m + from spack.pkg.builtin import mock -- cgit v1.2.3-70-g09d2 From a338e0efd5464a40d2e206ed00af99e5dcc53a79 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 Nov 2015 16:21:31 -0800 Subject: Fix spack.db -> spack.repo in bin/spack --- bin/spack | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/spack b/bin/spack index efa1ccc281..18fd1f0f34 100755 --- a/bin/spack +++ b/bin/spack @@ -113,8 +113,8 @@ def main(): spack.spack_working_dir = working_dir if args.mock: - from spack.packages import PackageFinder - spack.db.swap(PackageFinder(spack.mock_packages_path)) + from spack.repository import RepoPath + spack.repo.swap(RepoPath(spack.mock_packages_path)) # If the user asked for it, don't check ssl certs. if args.insecure: -- cgit v1.2.3-70-g09d2 From 73ef06018e2a6542d90f0b65a42e4a0287525b7a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 Nov 2015 16:26:23 -0800 Subject: Integrate namespace attribute into spec, spec DAG, spec YAML. --- lib/spack/spack/config.py | 22 ++++++++---- lib/spack/spack/database.py | 2 ++ lib/spack/spack/directory_layout.py | 9 +++-- lib/spack/spack/repository.py | 29 ++++++++++----- lib/spack/spack/spec.py | 65 ++++++++++++++++++++++++++-------- lib/spack/spack/test/spec_semantics.py | 33 +++++++++++++++-- 6 files changed, 125 insertions(+), 35 deletions(-) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 66da91f629..36bf8a7fc3 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -115,7 +115,7 @@ class _ConfigCategory: self.result_dict = {} _config_sections[name] = self -_ConfigCategory('config', 'config.yaml', True, False) +_ConfigCategory('repos', 'repos.yaml', True, True) _ConfigCategory('compilers', 'compilers.yaml', True, True) _ConfigCategory('mirrors', 'mirrors.yaml', True, True) _ConfigCategory('view', 'views.yaml', True, True) @@ -212,7 +212,7 @@ def substitute_spack_prefix(path): return path.replace('$spack', spack.prefix) -def get_config(category='config'): +def get_config(category): """Get the confguration tree for a category. Strips off the top-level category entry from the dict @@ -233,6 +233,10 @@ def get_config(category='config'): continue result = result[category.name] + # ignore empty sections for easy commenting of single-line configs. + if result is None: + continue + category.files_read_from.insert(0, path) if category.merge: category.result_dict = _merge_yaml(category.result_dict, result) @@ -266,12 +270,18 @@ def get_compilers_config(arch=None): def get_repos_config(): - config = get_config() - if 'repos' not in config: + repo_list = get_config('repos') + if repo_list is None: return [] - repo_list = config['repos'] - return [substitute_spack_prefix(repo) for repo in repo_list] + if not isinstance(repo_list, list): + tty.die("Bad repository configuration. 'repos' element does not contain a list.") + + def expand_repo_path(path): + path = substitute_spack_prefix(path) + path = os.path.expanduser(path) + return path + return [expand_repo_path(repo) for repo in repo_list] def get_mirror_config(): diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 8e380083f3..5b3bd7502f 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -211,6 +211,8 @@ class Database(object): child = self._read_spec_from_yaml(dep_hash, installs, hash_key) spec._add_dependency(child) + spec._normal = True + spec._concrete = True return spec diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index da8f4187cc..30e2c93950 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -211,9 +211,12 @@ class YamlDirectoryLayout(DirectoryLayout): with open(path) as f: spec = Spec.from_yaml(f) - # Specs read from actual installations are always concrete - spec._normal = True - spec._concrete = True + # Specs read from actual installs are always concrete, so mark + # all parts of the spec. + for s in spec.traverse(): + s._normal = True + s._concrete = True + return spec diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index a2c0bbe147..1b09d63cdd 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -238,7 +238,16 @@ class RepoPath(object): Raises UnknownPackageError if not found. """ - return self.repo_for_pkg(spec.name).get(spec) + # if the spec has a fully qualified namespace, we grab it + # directly and ignore overlay precedence. + if spec.namespace: + fullspace = '%s.%s' % (self.super_namespace, spec.namespace) + if not fullspace in self.by_namespace: + raise UnknownPackageError( + "No configured repository contains package %s." % spec.fullname) + return self.by_namespace[fullspace].get(spec) + else: + return self.repo_for_pkg(spec.name).get(spec) def dirname_for_package_name(self, pkg_name): @@ -454,20 +463,24 @@ class Repo(object): if spec.virtual: raise UnknownPackageError(spec.name) - if new and spec in self._instances: - del self._instances[spec] + if spec.namespace and spec.namespace != self.namespace: + raise UnknownPackageError("Repository %s does not contain package %s." + % (self.namespace, spec.fullname)) - if not spec in self._instances: + if new or spec not in self._instances: PackageClass = self._get_pkg_class(spec.name) try: - copy = spec.copy() - self._instances[copy] = PackageClass(copy) + package = PackageClass(spec.copy()) + self._instances[spec] = package + return package + except Exception, e: if spack.debug: sys.excepthook(*sys.exc_info()) - raise FailedConstructorError(spec.name, *sys.exc_info()) + raise FailedConstructorError(spec.fullname, *sys.exc_info()) - return self._instances[spec] + else: + return self._instances[spec] def purge(self): diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index bb0f194c13..303df6df38 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -465,6 +465,13 @@ class Spec(object): self.dependencies[spec.name] = spec spec.dependents[self.name] = self + # + # Public interface + # + @property + def fullname(self): + return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name + @property def root(self): @@ -518,6 +525,7 @@ class Spec(object): return True self._concrete = bool(not self.virtual + and self.namespace is not None and self.versions.concrete and self.variants.concrete and self.architecture @@ -658,6 +666,12 @@ class Spec(object): 'dependencies' : dict((d, self.dependencies[d].dag_hash()) for d in sorted(self.dependencies)) } + + # Older concrete specs do not have a namespace. Omit for + # consistent hashing. + if not self.concrete or self.namespace: + d['namespace'] = self.namespace + if self.compiler: d.update(self.compiler.to_dict()) else: @@ -682,6 +696,7 @@ class Spec(object): node = node[name] spec = Spec(name) + spec.namespace = node.get('namespace', None) spec.versions = VersionList.from_dict(node) spec.architecture = node['arch'] @@ -834,7 +849,20 @@ class Spec(object): changed = any(changes) force=True - self._concrete = True + for s in self.traverse(): + # After concretizing, assign namespaces to anything left. + # Note that this doesn't count as a "change". The repository + # configuration is constant throughout a spack run, and + # normalize and concretize evaluate Packages using Repo.get(), + # which respects precedence. So, a namespace assignment isn't + # changing how a package name would have been interpreted and + # we can do it as late as possible to allow as much + # compatibility across repositories as possible. + if s.namespace is None: + s.namespace = spack.repo.repo_for_pkg(s.name).namespace + + # Mark everything in the spec as concrete, as well. + s._concrete = True def concretized(self): @@ -909,7 +937,7 @@ class Spec(object): the dependency. If no conditions are True (and we don't depend on it), return None. """ - pkg = spack.repo.get(self.name) + pkg = spack.repo.get(self.fullname) conditions = pkg.dependencies[name] # evaluate when specs to figure out constraints on the dependency. @@ -1037,7 +1065,7 @@ class Spec(object): any_change = False changed = True - pkg = spack.repo.get(self.name) + pkg = spack.repo.get(self.fullname) while changed: changed = False for dep_name in pkg.dependencies: @@ -1058,18 +1086,17 @@ class Spec(object): the root, and ONLY the ones that were explicitly provided are there. Normalization turns a partial flat spec into a DAG, where: - 1. ALL dependencies of the root package are in the DAG. - 2. Each node's dependencies dict only contains its direct deps. + 1. Known dependencies of the root package are in the DAG. + 2. Each node's dependencies dict only contains its known direct deps. 3. There is only ONE unique spec for each package in the DAG. * This includes virtual packages. If there a non-virtual package that provides a virtual package that is in the spec, then we replace the virtual package with the non-virtual one. - 4. The spec DAG matches package DAG, including default variant values. - TODO: normalize should probably implement some form of cycle detection, to ensure that the spec is actually a DAG. + """ if self._normal and not force: return False @@ -1115,7 +1142,7 @@ class Spec(object): for spec in self.traverse(): # Don't get a package for a virtual name. if not spec.virtual: - spack.repo.get(spec.name) + spack.repo.get(spec.fullname) # validate compiler in addition to the package name. if spec.compiler: @@ -1138,6 +1165,10 @@ class Spec(object): if not self.name == other.name: raise UnsatisfiableSpecNameError(self.name, other.name) + if other.namespace is not None: + if self.namespace is not None and other.namespace != self.namespace: + raise UnsatisfiableSpecNameError(self.fullname, other.fullname) + if not self.versions.overlaps(other.versions): raise UnsatisfiableVersionSpecError(self.versions, other.versions) @@ -1181,7 +1212,7 @@ class Spec(object): # TODO: might want more detail than this, e.g. specific deps # in violation. if this becomes a priority get rid of this - # check and be more specici about what's wrong. + # check and be more specific about what's wrong. if not other.satisfies_dependencies(self): raise UnsatisfiableDependencySpecError(other, self) @@ -1247,7 +1278,7 @@ class Spec(object): # A concrete provider can satisfy a virtual dependency. if not self.virtual and other.virtual: - pkg = spack.repo.get(self.name) + pkg = spack.repo.get(self.fullname) if pkg.provides(other.name): for provided, when_spec in pkg.provided.items(): if self.satisfies(when_spec, deps=False, strict=strict): @@ -1259,6 +1290,11 @@ class Spec(object): if self.name != other.name: return False + # namespaces either match, or other doesn't require one. + if other.namespace is not None: + if self.namespace is not None and self.namespace != other.namespace: + return False + if self.versions and other.versions: if not self.versions.satisfies(other.versions, strict=strict): return False @@ -1476,8 +1512,8 @@ class Spec(object): def _cmp_node(self): """Comparison key for just *this node* and not its deps.""" - return (self.name, self.versions, self.variants, - self.architecture, self.compiler) + return (self.name, self.namespace, self.versions, + self.variants, self.architecture, self.compiler) def eq_node(self, other): @@ -1507,7 +1543,7 @@ class Spec(object): in the format string. The format strings you can provide are:: $_ Package name - $. Long package name + $. Full package name (with namespace) $@ Version $% Compiler $%@ Compiler & compiler version @@ -1556,8 +1592,7 @@ class Spec(object): if c == '_': out.write(fmt % self.name) elif c == '.': - longname = '%s.%s.%s' % (self.namespace, self.name) if self.namespace else self.name - out.write(fmt % longname) + out.write(fmt % self.fullname) elif c == '@': if self.versions and self.versions != _any_version: write(fmt % (c + str(self.versions)), c) diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 6666dbbb52..87b7b628ed 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -35,7 +35,10 @@ class SpecSematicsTest(MockPackagesTest): # ================================================================================ def check_satisfies(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) - right = parse_anonymous_spec(anon_spec, left.name) + try: + right = Spec(anon_spec) # if it's not anonymous, allow it. + except: + right = parse_anonymous_spec(anon_spec, left.name) # Satisfies is one-directional. self.assertTrue(left.satisfies(right)) @@ -48,7 +51,10 @@ class SpecSematicsTest(MockPackagesTest): def check_unsatisfiable(self, spec, anon_spec, concrete=False): left = Spec(spec, concrete=concrete) - right = parse_anonymous_spec(anon_spec, left.name) + try: + right = Spec(anon_spec) # if it's not anonymous, allow it. + except: + right = parse_anonymous_spec(anon_spec, left.name) self.assertFalse(left.satisfies(right)) self.assertFalse(left.satisfies(anon_spec)) @@ -88,6 +94,28 @@ class SpecSematicsTest(MockPackagesTest): self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1') + def test_satisfies_namespace(self): + self.check_satisfies('builtin.mpich', 'mpich') + self.check_satisfies('builtin.mock.mpich', 'mpich') + + # TODO: only works for deps now, but shouldn't we allow this for root spec? + # self.check_satisfies('builtin.mock.mpich', 'mpi') + + self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich') + + self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich') + + + def test_satisfies_namespaced_dep(self): + """Ensure spec from same or unspecified namespace satisfies namespace constraint.""" + self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich') + + self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi') + self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich') + + self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich') + + def test_satisfies_compiler(self): self.check_satisfies('foo%gcc', '%gcc') self.check_satisfies('foo%intel', '%intel') @@ -327,4 +355,3 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug') self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug') self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0') - -- cgit v1.2.3-70-g09d2 From dfcf567de0cc944f7cdd0bcdee3b841bd0fbb900 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 Nov 2015 19:00:24 -0800 Subject: Add a cleaned up repo command. --- lib/spack/spack/cmd/repo.py | 139 ++++++++++++++++++++++++-------------------- 1 file changed, 75 insertions(+), 64 deletions(-) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 395aa90bed..8a3ea5989e 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -22,103 +22,114 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import os +import re +import shutil + from external import argparse import llnl.util.tty as tty -from llnl.util.tty.color import colorize -from llnl.util.tty.colify import colify -from llnl.util.lang import index_by from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path -from spack.repository import repo_config_name - -import os -import exceptions -from contextlib import closing +from spack.repository import packages_dir_name, repo_config_name, Repo -description = "Manage package sources" +description = "Manage package source repositories." def setup_parser(subparser): - sp = subparser.add_subparsers( - metavar='SUBCOMMAND', dest='repo_command') - - add_parser = sp.add_parser('add', help=repo_add.__doc__) - add_parser.add_argument('directory', help="Directory containing the packages.") + sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') + # Create create_parser = sp.add_parser('create', help=repo_create.__doc__) - create_parser.add_argument('directory', help="Directory containing the packages.") - create_parser.add_argument('name', help="Name of new package repository.") - - remove_parser = sp.add_parser('remove', help=repo_remove.__doc__) - remove_parser.add_argument('name') + create_parser.add_argument( + 'namespace', help="Namespace to identify packages in the repository.") + create_parser.add_argument( + 'directory', help="Directory to create the repo in. Defaults to same as namespace.", nargs='?') + # List list_parser = sp.add_parser('list', help=repo_list.__doc__) -def add_to_config(dir): - config = spack.config.get_config() - user_config = spack.config.get_config('user') - orig = None - if config.has_value('repo', '', 'directories'): - orig = config.get_value('repo', '', 'directories') - if orig and dir in orig.split(':'): - return False +def repo_create(args): + """Create a new package repo for a particular namespace.""" + namespace = args.namespace + if not re.match(r'\w[\.\w-]*', namespace): + tty.die("Invalid namespace: '%s'" % namespace) + + root = args.directory + if not root: + root = namespace + + existed = False + if os.path.exists(root): + if os.path.isfile(root): + tty.die('File %s already exists and is not a directory' % root) + elif os.path.isdir(root): + if not os.access(root, os.R_OK | os.W_OK): + tty.die('Cannot create new repo in %s: cannot access directory.' % root) + if os.listdir(root): + tty.die('Cannot create new repo in %s: directory is not empty.' % root) + existed = True + + full_path = os.path.realpath(root) + parent = os.path.dirname(full_path) + if not os.access(parent, os.R_OK | os.W_OK): + tty.die("Cannot create repository in %s: can't access parent!" % root) - newsetting = orig + ':' + dir if orig else dir - user_config.set_value('repo', '', 'directories', newsetting) - user_config.write() - return True + try: + config_path = os.path.join(root, repo_config_name) + packages_path = os.path.join(root, packages_dir_name) + mkdirp(packages_path) + with open(config_path, 'w') as config: + config.write("repo:\n") + config.write(" namespace: '%s'\n" % namespace) -def repo_add(args): - """Add package sources to the Spack configuration.""" - if not add_to_config(args.directory): - tty.die('Repo directory %s already exists in the repo list' % dir) + except (IOError, OSError) as e: + tty.die('Failed to create new repository in %s.' % root, + "Caused by %s: %s" % (type(e), e)) + # try to clean up. + if existed: + shutil.rmtree(config_path, ignore_errors=True) + shutil.rmtree(packages_path, ignore_errors=True) + else: + shutil.rmtree(root, ignore_errors=True) -def repo_create(args): - """Create a new package repo at a directory and name""" - dir = args.directory - name = args.name - - if os.path.exists(dir) and not os.path.isdir(dir): - tty.die('File %s already exists and is not a directory' % dir) - if not os.path.exists(dir): - try: - mkdirp(dir) - except exceptions.OSError, e: - tty.die('Failed to create new directory %s' % dir) - path = os.path.join(dir, repo_config_filename) - try: - with closing(open(path, 'w')) as repofile: - repofile.write(name + '\n') - except exceptions.IOError, e: - tty.die('Could not create new file %s' % path) + tty.msg("Created repo with namespace '%s'." % namespace) + tty.msg("To register it with Spack, add a line like this to ~/.spack/repos.yaml:", + 'repos:', + ' - ' + full_path) - if not add_to_config(args.directory): - tty.warn('Repo directory %s already exists in the repo list' % dir) + +def repo_add(args): + """Remove a package source from the Spack configuration""" + # FIXME: how to deal with this with the current config architecture? + # FIXME: Repos do not have mnemonics, which I assumed would be simpler... should they have them after all? def repo_remove(args): """Remove a package source from the Spack configuration""" - pass + # FIXME: see above. def repo_list(args): """List package sources and their mnemoics""" - root_names = spack.repo.repos - max_len = max(len(s[0]) for s in root_names) - fmt = "%%-%ds%%s" % (max_len + 4) - for root in root_names: - print fmt % (root[0], root[1]) + roots = spack.config.get_repos_config() + repos = [Repo(r) for r in roots] + + msg = "%d package repositor" % len(repos) + msg += "y." if len(repos) == 1 else "ies." + tty.msg(msg) + max_ns_len = max(len(r.namespace) for r in repos) + for repo in repos: + fmt = "%%-%ds%%s" % (max_ns_len + 4) + print fmt % (repo.namespace, repo.root) def repo(parser, args): - action = { 'add' : repo_add, - 'create' : repo_create, - 'remove' : repo_remove, + action = { 'create' : repo_create, 'list' : repo_list } action[args.repo_command](args) -- cgit v1.2.3-70-g09d2 From cc349e9a32edcada79ada4b6c3cc71034cff7974 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 28 Nov 2015 19:11:51 -0800 Subject: Add default repos.yaml --- etc/spack/repos.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 etc/spack/repos.yaml diff --git a/etc/spack/repos.yaml b/etc/spack/repos.yaml new file mode 100644 index 0000000000..2d4ff54ce6 --- /dev/null +++ b/etc/spack/repos.yaml @@ -0,0 +1,8 @@ +# ------------------------------------------------------------------------- +# This is the default spack repository configuration. +# +# Changes to this file will affect all users of this spack install, +# although users can override these settings in their ~/.spack/repos.yaml. +# ------------------------------------------------------------------------- +repos: + - $spack/var/spack/repos/builtin -- cgit v1.2.3-70-g09d2 From 34401cf0c3ee019fd6b8fb739e2a511c6de0870c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 25 Dec 2015 14:00:33 -0800 Subject: Rework Spack config: keep user & site config in memory. - User and site config are now kept separately in memory. - Merging is done on demand when client code requests the configuration. - Allows user/site config to be updated independently of each other by commands. - simplifies config logic (no more tracking merged files) --- .gitignore | 1 + lib/spack/spack/cmd/compiler.py | 7 +- lib/spack/spack/cmd/config.py | 12 +- lib/spack/spack/cmd/mirror.py | 4 +- lib/spack/spack/compilers/__init__.py | 76 +++--- lib/spack/spack/config.py | 417 +++++++++++++++-------------- lib/spack/spack/spec.py | 1 + lib/spack/spack/stage.py | 5 +- lib/spack/spack/test/config.py | 21 +- lib/spack/spack/test/database.py | 3 +- lib/spack/spack/test/mock_packages_test.py | 6 +- 11 files changed, 285 insertions(+), 268 deletions(-) diff --git a/.gitignore b/.gitignore index 1c6ca4c99e..4b97de5d50 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ /etc/spackconfig /share/spack/dotkit /share/spack/modules +/TAGS diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 2a64dc914e..6efc9a3347 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -65,10 +65,11 @@ def compiler_add(args): if c.spec not in spack.compilers.all_compilers()] if compilers: - spack.compilers.add_compilers_to_config('user', *compilers) + spack.compilers.add_compilers_to_config('user', compilers) n = len(compilers) - tty.msg("Added %d new compiler%s to %s" % ( - n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers'))) + s = 's' if n > 1 else '' + filename = spack.config.get_config_filename('user', 'compilers') + tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers") diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py index 8c18f88b64..603023d891 100644 --- a/lib/spack/spack/cmd/config.py +++ b/lib/spack/spack/cmd/config.py @@ -44,22 +44,22 @@ def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command') get_parser = sp.add_parser('get', help='Print configuration values.') - get_parser.add_argument('category', help="Configuration category to print.") + get_parser.add_argument('section', help="Configuration section to print.") edit_parser = sp.add_parser('edit', help='Edit configuration file.') - edit_parser.add_argument('category', help="Configuration category to edit") + edit_parser.add_argument('section', help="Configuration section to edit") def config_get(args): - spack.config.print_category(args.category) + spack.config.print_section(args.section) def config_edit(args): if not args.scope: args.scope = 'user' - if not args.category: - args.category = None - config_file = spack.config.get_config_scope_filename(args.scope, args.category) + if not args.section: + args.section = None + config_file = spack.config.get_config_filename(args.scope, args.section) spack.editor(config_file) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 9a507e69db..2b25793927 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -76,7 +76,7 @@ def mirror_add(args): url = 'file://' + url mirror_dict = { args.name : url } - spack.config.add_to_mirror_config({ args.name : url }) + spack.config.update_config('mirrors', { args.name : url }, 'user') def mirror_remove(args): @@ -90,7 +90,7 @@ def mirror_remove(args): def mirror_list(args): """Print out available mirrors to the console.""" - sec_names = spack.config.get_mirror_config() + sec_names = spack.config.get_config('mirrors') if not sec_names: tty.msg("No mirrors configured.") return diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index b7b021a1ac..a1980f1cdf 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -35,6 +35,7 @@ import spack import spack.error import spack.spec import spack.config +import spack.architecture from spack.util.multiproc import parmap from spack.compiler import Compiler @@ -55,23 +56,48 @@ def _auto_compiler_spec(function): return converter -def _get_config(): - """Get a Spack config, but make sure it has compiler configuration - first.""" +def _to_dict(compiler): + """Return a dict version of compiler suitable to insert in YAML.""" + return { + str(compiler.spec) : dict( + (attr, getattr(compiler, attr, None)) + for attr in _required_instance_vars) + } + + +def get_compiler_config(arch=None): + """Return the compiler configuration for the specified architecture. + + If the compiler configuration designates some compilers for + 'all' architectures, those are merged into the result, as well. + + """ # If any configuration file has compilers, just stick with the # ones already configured. - config = spack.config.get_compilers_config() - existing = [spack.spec.CompilerSpec(s) - for s in config] - if existing: - return config + config = spack.config.get_config('compilers') + + if arch is None: + arch = spack.architecture.sys_type() - compilers = find_compilers(*get_path('PATH')) - add_compilers_to_config('user', *compilers) + if arch not in config: + config[arch] = {} + compilers = find_compilers(*get_path('PATH')) + for compiler in compilers: + config[arch].update(_to_dict(compiler)) + spack.config.update_config('compilers', config, 'user') - # After writing compilers to the user config, return a full config - # from all files. - return spack.config.get_compilers_config() + # Merge 'all' compilers with arch-specific ones. + merged_config = config.get('all', {}) + merged_config = spack.config._merge_yaml(merged_config, config[arch]) + + return merged_config + + +def all_compilers(arch=None): + """Return a set of specs for all the compiler versions currently + available to build with. These are instances of CompilerSpec. + """ + return [spack.spec.CompilerSpec(s) for s in get_compiler_config(arch)] _cached_default_compiler = None @@ -123,20 +149,6 @@ def find_compilers(*path): return clist -def add_compilers_to_config(scope, *compilers): - compiler_config_tree = {} - for compiler in compilers: - compiler_entry = {} - for c in _required_instance_vars: - val = getattr(compiler, c) - if not val: - val = "None" - compiler_entry[c] = val - compiler_config_tree[str(compiler.spec)] = compiler_entry - spack.config.add_to_compiler_config(compiler_config_tree, scope) - - - def supported_compilers(): """Return a set of names of compilers supported by Spack. @@ -152,14 +164,6 @@ def supported(compiler_spec): return compiler_spec.name in supported_compilers() -def all_compilers(): - """Return a set of specs for all the compiler versions currently - available to build with. These are instances of CompilerSpec. - """ - configuration = _get_config() - return [spack.spec.CompilerSpec(s) for s in configuration] - - @_auto_compiler_spec def find(compiler_spec): """Return specs of available compilers that match the supplied @@ -172,7 +176,7 @@ def compilers_for_spec(compiler_spec): """This gets all compilers that satisfy the supplied CompilerSpec. Returns an empty list if none are found. """ - config = _get_config() + config = get_compiler_config() def get_compiler(cspec): items = config[str(cspec)] diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 36bf8a7fc3..7d7a87c7dc 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -67,25 +67,54 @@ In this example, entries like ''compilers'' and ''xlc@12.1'' are used to categorize entries beneath them in the tree. At the root of the tree, entries like ''cc'' and ''cxx'' are specified as name/value pairs. -Spack returns these trees as nested dicts. The dict for the above example -would looks like: - - { 'compilers' : - { 'chaos_5_x86_64_ib' : - { 'gcc@4.4.7' : - { 'cc' : '/usr/bin/gcc', - 'cxx' : '/usr/bin/g++' - 'f77' : '/usr/bin/gfortran' - 'fc' : '/usr/bin/gfortran' } - } - { 'bgqos_0' : - { 'cc' : '/usr/local/bin/mpixlc' } - } - } - -Some convenience functions, like get_mirrors_config and -``get_compilers_config`` may strip off the top-levels of the tree and -return subtrees. +``config.get_config()`` returns these trees as nested dicts, but it +strips the first level off. So, ``config.get_config('compilers')`` +would return something like this for the above example: + + { 'chaos_5_x86_64_ib' : + { 'gcc@4.4.7' : + { 'cc' : '/usr/bin/gcc', + 'cxx' : '/usr/bin/g++' + 'f77' : '/usr/bin/gfortran' + 'fc' : '/usr/bin/gfortran' } + } + { 'bgqos_0' : + { 'cc' : '/usr/local/bin/mpixlc' } } + +Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``, +but ``get_config()`` strips that off too. + +Precedence +=============================== + +``config.py`` routines attempt to recursively merge configuration +across scopes. So if there are ``compilers.py`` files in both the +site scope and the user scope, ``get_config('compilers')`` will return +merged dictionaries of *all* the compilers available. If a user +compiler conflicts with a site compiler, Spack will overwrite the site +configuration wtih the user configuration. If both the user and site +``mirrors.yaml`` files contain lists of mirrors, then ``get_config()`` +will return a concatenated list of mirrors, with the user config items +first. + +Sometimes, it is useful to *completely* override a site setting with a +user one. To accomplish this, you can use *two* colons at the end of +a key in a configuration file. For example, this: + + compilers:: + chaos_5_x86_64_ib: + gcc@4.4.7: + cc: /usr/bin/gcc + cxx: /usr/bin/g++ + f77: /usr/bin/gfortran + fc: /usr/bin/gfortran + bgqos_0: + xlc@12.1: + cc: /usr/local/bin/mpixlc + ... + +Will make Spack take compilers *only* from the user configuration, and +the site configuration will be ignored. """ import os @@ -96,80 +125,132 @@ from external.yaml.error import MarkedYAMLError import llnl.util.tty as tty from llnl.util.filesystem import mkdirp -from llnl.util.lang import memoized import spack +from spack.error import SpackError +"""List of valid config sections.""" +valid_sections = ('compilers', 'mirrors', 'repos') -_config_sections = {} -class _ConfigCategory: - name = None - filename = None - merge = True - def __init__(self, name, filename, merge, strip): - self.name = name - self.filename = filename - self.merge = merge - self.strip = strip - self.files_read_from = [] - self.result_dict = {} - _config_sections[name] = self - -_ConfigCategory('repos', 'repos.yaml', True, True) -_ConfigCategory('compilers', 'compilers.yaml', True, True) -_ConfigCategory('mirrors', 'mirrors.yaml', True, True) -_ConfigCategory('view', 'views.yaml', True, True) -_ConfigCategory('order', 'orders.yaml', True, True) - -"""Names of scopes and their corresponding configuration files.""" -config_scopes = [('site', os.path.join(spack.etc_path, 'spack')), - ('user', os.path.expanduser('~/.spack'))] - -_compiler_by_arch = {} - -@memoized -def _read_config_file(filename): - """Read a YAML configuration file""" +def check_section(section): + """Raise a ValueError if the section is not a valid section.""" + if section not in valid_sections: + raise ValueError("Invalid config section: '%s'. Options are %s." + % (section, valid_sections)) + + +class ConfigScope(object): + """This class represents a configuration scope. + + A scope is one directory containing named configuration files. + Each file is a config "section" (e.g., mirrors, compilers, etc). + """ + def __init__(self, name, path): + self.name = name # scope name. + self.path = path # path to directory containing configs. + self.sections = {} # sections read from config files. + + + def get_section_filename(self, section): + check_section(section) + return os.path.join(self.path, "%s.yaml" % section) + + + def get_section(self, section): + if not section in self.sections: + path = self.get_section_filename(section) + data = _read_config_file(path) + self.sections[section] = {} if data is None else data + return self.sections[section] + + + def write_section(self, section): + filename = self.get_section_filename(section) + data = self.get_section(section) + try: + mkdirp(self.path) + with open(filename, 'w') as f: + yaml.dump(data, stream=f, default_flow_style=False) + except (yaml.YAMLError, IOError) as e: + raise ConfigFileError("Error writing to config file: '%s'" % str(e)) + + + def clear(self): + """Empty cached config information.""" + self.sections = {} + + +"""List of config scopes by name. + Later scopes in the list will override earlier scopes. +""" +config_scopes = [ + ConfigScope('site', os.path.join(spack.etc_path, 'spack')), + ConfigScope('user', os.path.expanduser('~/.spack'))] + +"""List of valid scopes, for convenience.""" +valid_scopes = (s.name for s in config_scopes) + + +def check_scope(scope): + if scope is None: + return 'user' + elif scope not in valid_scopes: + raise ValueError("Invalid config scope: '%s'. Must be one of %s." + % (scope, valid_scopes)) + return scope + + +def get_scope(scope): + scope = check_scope(scope) + return next(s for s in config_scopes if s.name == scope) + + +def _read_config_file(filename): + """Read a YAML configuration file.""" # Ignore nonexisting files. if not os.path.exists(filename): return None elif not os.path.isfile(filename): - tty.die("Invlaid configuration. %s exists but is not a file." % filename) + raise ConfigFileError( + "Invlaid configuration. %s exists but is not a file." % filename) elif not os.access(filename, os.R_OK): - tty.die("Configuration file %s is not readable." % filename) + raise ConfigFileError("Config file is not readable: %s." % filename) try: with open(filename) as f: return yaml.load(f) except MarkedYAMLError, e: - tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) + raise ConfigFileError( + "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) except IOError, e: - tty.die("Error reading configuration file %s: %s" % (filename, str(e))) + raise ConfigFileError( + "Error reading configuration file %s: %s" % (filename, str(e))) def clear_config_caches(): """Clears the caches for configuration files, which will cause them to be re-read upon the next request""" - for key,s in _config_sections.iteritems(): - s.files_read_from = [] - s.result_dict = {} - - _read_config_file.clear() - spack.config._compiler_by_arch = {} - spack.compilers._cached_default_compiler = None + for scope in config_scopes: + scope.clear() def _merge_yaml(dest, source): """Merges source into dest; entries in source take precedence over dest. + This routine may modify dest and should be assigned to dest, in + case dest was None to begin with, e.g.: + + dest = _merge_yaml(dest, source) + Config file authors can optionally end any attribute in a dict with `::` instead of `:`, and the key will override that of the parent instead of merging. + """ def they_are(t): return isinstance(dest, t) and isinstance(source, t) @@ -212,61 +293,31 @@ def substitute_spack_prefix(path): return path.replace('$spack', spack.prefix) -def get_config(category): - """Get the confguration tree for a category. +def get_config(section): + """Get configuration settings for a section. - Strips off the top-level category entry from the dict + Strips off the top-level section name from the YAML dict. """ - category = _config_sections[category] - if category.result_dict: - return category.result_dict - - category.result_dict = {} - for scope, scope_path in config_scopes: - path = os.path.join(scope_path, category.filename) - result = _read_config_file(path) - if not result: + check_section(section) + merged_section = {} + + for scope in config_scopes: + # read potentially cached data from the scope. + data = scope.get_section(section) + if not data or not section in data: continue - if category.strip: - if not category.name in result: - continue - result = result[category.name] - - # ignore empty sections for easy commenting of single-line configs. - if result is None: - continue - - category.files_read_from.insert(0, path) - if category.merge: - category.result_dict = _merge_yaml(category.result_dict, result) - else: - category.result_dict = result - - return category.result_dict - - -def get_compilers_config(arch=None): - """Get the compiler configuration from config files for the given - architecture. Strips off the architecture component of the - configuration""" - global _compiler_by_arch - if not arch: - arch = spack.architecture.sys_type() - if arch in _compiler_by_arch: - return _compiler_by_arch[arch] - - cc_config = get_config('compilers') - if arch in cc_config and 'all' in cc_config: - arch_compiler = dict(cc_config[arch]) - _compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all']) - elif arch in cc_config: - _compiler_by_arch[arch] = cc_config[arch] - elif 'all' in cc_config: - _compiler_by_arch[arch] = cc_config['all'] - else: - _compiler_by_arch[arch] = {} - return _compiler_by_arch[arch] + # extract data under the section name header + data = data[section] + + # ignore empty sections for easy commenting of single-line configs. + if not data: + continue + + # merge config data from scopes. + merged_section = _merge_yaml(merged_section, data) + + return merged_section def get_repos_config(): @@ -284,119 +335,71 @@ def get_repos_config(): return [expand_repo_path(repo) for repo in repo_list] -def get_mirror_config(): - """Get the mirror configuration from config files""" - return get_config('mirrors') - +def get_config_filename(scope, section): + """For some scope and section, get the name of the configuration file""" + scope = get_scope(scope) + return scope.get_section_filename(section) -def get_config_scope_dirname(scope): - """For a scope return the config directory""" - for s,p in config_scopes: - if s == scope: - return p - tty.die("Unknown scope %s. Valid options are %s" % - (scope, ", ".join([s for s,p in config_scopes]))) +def update_config(section, update_data, scope=None): + """Update the configuration file for a particular scope. -def get_config_scope_filename(scope, category_name): - """For some scope and category, get the name of the configuration file""" - if not category_name in _config_sections: - tty.die("Unknown config category %s. Valid options are: %s" % - (category_name, ", ".join([s for s in _config_sections]))) - return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename) + Merges contents of update_data into the scope's data for the + specified section, then writes out the config file. + update_data shoudl contain only the section's data, with the + top-level name stripped off. This can be a list, dict, or any + other yaml-ish structure. -def add_to_config(category_name, addition_dict, scope=None): - """Merge a new dict into a configuration tree and write the new - configuration to disk""" - get_config(category_name) - category = _config_sections[category_name] - - # If scope is specified, use it. Otherwise use the last config scope that - # we successfully parsed data from. - file = None - path = None - if not scope and not category.files_read_from: - scope = 'user' - - if scope: - try: - dir = get_config_scope_dirname(scope) - if not os.path.exists(dir): - mkdirp(dir) - path = os.path.join(dir, category.filename) - file = open(path, 'w') - except IOError, e: - pass - else: - for p in category.files_read_from: - try: - file = open(p, 'w') - except IOError, e: - pass - if file: - path = p - break; + """ + # read in the config to ensure we've got current data + get_config(section) - if not file: - tty.die('Unable to write to config file %s' % path) + check_section(section) # validate section name + scope = get_scope(scope) # get ConfigScope object from string. - # Merge the new information into the existing file info, then write to disk - new_dict = _read_config_file(path) + # read only the requested section's data. + data = scope.get_section(section) + data = _merge_yaml(data, { section : update_data }) + scope.write_section(section) - if new_dict and category_name in new_dict: - new_dict = new_dict[category_name] - new_dict = _merge_yaml(new_dict, addition_dict) - new_dict = { category_name : new_dict } +def remove_from_config(section, key_to_rm, scope=None): + """Remove a configuration key and write updated configuration to disk. - # Install new dict as memoized value, and dump to disk - _read_config_file.cache[path] = new_dict - yaml.dump(new_dict, stream=file, default_flow_style=False) - file.close() + Return True if something was removed, False otherwise. - # Merge the new information into the cached results - category.result_dict = _merge_yaml(category.result_dict, addition_dict) + """ + # ensure configs are current by reading in. + get_config(section) + # check args and get the objects we need. + scope = get_scope(scope) + data = scope.get_section(section) + filename = scope.get_section_filename(section) -def add_to_mirror_config(addition_dict, scope=None): - """Add mirrors to the configuration files""" - add_to_config('mirrors', addition_dict, scope) + # do some checks + if not data: + return False + if not section in data: + raise ConfigFileError("Invalid configuration file: '%s'" % filename) -def add_to_compiler_config(addition_dict, scope=None, arch=None): - """Add compilerss to the configuration files""" - if not arch: - arch = spack.architecture.sys_type() - add_to_config('compilers', { arch : addition_dict }, scope) - clear_config_caches() + if key_to_rm not in section[section]: + return False + # remove the key from the section's configuration + del data[section][key_to_rm] + scope.write_section(section) -def remove_from_config(category_name, key_to_rm, scope=None): - """Remove a configuration key and write a new configuration to disk""" - get_config(category_name) - scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes] - category = _config_sections[category_name] - rmd_something = False - for s in scopes_to_rm_from: - path = get_config_scope_filename(scope, category_name) - result = _read_config_file(path) - if not result: - continue - if not key_to_rm in result[category_name]: - continue - with open(path, 'w') as f: - result[category_name].pop(key_to_rm, None) - yaml.dump(result, stream=f, default_flow_style=False) - category.result_dict.pop(key_to_rm, None) - rmd_something = True - return rmd_something +"""Print a configuration to stdout""" +def print_section(section): + try: + yaml.dump(get_config(section), stream=sys.stdout, default_flow_style=False) + except (yaml.YAMLError, IOError) as e: + raise ConfigError("Error reading configuration: %s" % section) -"""Print a configuration to stdout""" -def print_category(category_name): - if not category_name in _config_sections: - tty.die("Unknown config category %s. Valid options are: %s" % - (category_name, ", ".join([s for s in _config_sections]))) - yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False) +class ConfigError(SpackError): pass +class ConfigFileError(ConfigError): pass diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 303df6df38..2f1b6e29ea 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -531,6 +531,7 @@ class Spec(object): and self.architecture and self.compiler and self.compiler.concrete and self.dependencies.concrete) + return self._concrete diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 78930ecb5b..da85bd6f21 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -242,7 +242,8 @@ class Stage(object): # TODO: move mirror logic out of here and clean it up! if self.mirror_path: - urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] + urls = ["%s/%s" % (u, self.mirror_path) + for name, u in spack.config.get_config('mirrors')] digest = None if isinstance(self.fetcher, fs.URLFetchStrategy): @@ -345,7 +346,7 @@ class DIYStage(object): def _get_mirrors(): """Get mirrors from spack configuration.""" - config = spack.config.get_mirror_config() + config = spack.config.get_config('mirrors') return [val for name, val in config.iteritems()] diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index b1195dfe4e..fe6cec82fe 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -27,6 +27,7 @@ import shutil import os from tempfile import mkdtemp import spack +import spack.config from spack.test.mock_packages_test import * # Some sample compiler config data @@ -72,9 +73,9 @@ class ConfigTest(MockPackagesTest): super(ConfigTest, self).setUp() self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') spack.config.config_scopes = [ - ('test_low_priority', os.path.join(self.tmp_dir, 'low')), - ('test_high_priority', os.path.join(self.tmp_dir, 'high'))] - + spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')), + spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))] + spack.config.valid_scopes = ('test_low_priority', 'test_high_priority') def tearDown(self): super(ConfigTest, self).tearDown() @@ -83,17 +84,19 @@ class ConfigTest(MockPackagesTest): def check_config(self, comps, *compiler_names): """Check that named compilers in comps match Spack's config.""" - config = spack.config.get_compilers_config() + config = spack.config.get_config('compilers') compiler_list = ['cc', 'cxx', 'f77', 'f90'] for key in compiler_names: for c in compiler_list: - self.assertEqual(comps[key][c], config[key][c]) + expected = comps[key][c] + actual = config[key][c] + self.assertEqual(expected, actual) def test_write_key_in_memory(self): # Write b_comps "on top of" a_comps. - spack.config.add_to_compiler_config(a_comps, 'test_low_priority') - spack.config.add_to_compiler_config(b_comps, 'test_high_priority') + spack.config.update_config('compilers', a_comps, 'test_low_priority') + spack.config.update_config('compilers', b_comps, 'test_high_priority') # Make sure the config looks how we expect. self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0') @@ -102,8 +105,8 @@ class ConfigTest(MockPackagesTest): def test_write_key_to_disk(self): # Write b_comps "on top of" a_comps. - spack.config.add_to_compiler_config(a_comps, 'test_low_priority') - spack.config.add_to_compiler_config(b_comps, 'test_high_priority') + spack.config.update_config('compilers', a_comps, 'test_low_priority') + spack.config.update_config('compilers', b_comps, 'test_high_priority') # Clear caches so we're forced to read from disk. spack.config.clear_config_caches() diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 5ce010ae8f..e1f7961bed 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -79,7 +79,8 @@ class DatabaseTest(MockPackagesTest): def _mock_install(self, spec): s = Spec(spec) - pkg = spack.repo.get(s.concretized()) + s.concretize() + pkg = spack.repo.get(s) pkg.do_install(fake=True) diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 2150b40876..6d92aacab9 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -41,9 +41,10 @@ class MockPackagesTest(unittest.TestCase): spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes + self.real_valid_scopes = spack.config.valid_scopes spack.config.config_scopes = [ - ('site', spack.mock_site_config), - ('user', spack.mock_user_config)] + spack.config.ConfigScope('site', spack.mock_site_config), + spack.config.ConfigScope('user', spack.mock_user_config)] # Store changes to the package's dependencies so we can # restore later. @@ -71,6 +72,7 @@ class MockPackagesTest(unittest.TestCase): """Restore the real packages path after any test.""" spack.repo.swap(self.db) spack.config.config_scopes = self.real_scopes + spack.config.valid_scopes = self.real_valid_scopes spack.config.clear_config_caches() # Restore dependency changes that happened during the test -- cgit v1.2.3-70-g09d2 From bef52570aee9d9e287eb46b7461fc2fbcda8a033 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 25 Dec 2015 18:42:06 -0800 Subject: Default to scope with highest precedence instead of user scope, - Generalizes config scopes a bit more: nothing assumes there is a 'user' scope (this would break testing sometimes). --- lib/spack/spack/compilers/__init__.py | 2 +- lib/spack/spack/config.py | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 321452fddb..a1b6d978df 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -84,7 +84,7 @@ def get_compiler_config(arch=None): compilers = find_compilers(*get_path('PATH')) for compiler in compilers: config[arch].update(_to_dict(compiler)) - spack.config.update_config('compilers', config, 'user') + spack.config.update_config('compilers', config) # Merge 'all' compilers with arch-specific ones. merged_config = config.get('all', {}) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index b401f59d7f..aa6afd183e 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -193,8 +193,16 @@ valid_scopes = (s.name for s in config_scopes) def check_scope(scope): + """Ensure that scope is valid, and return a valid scope if it is None. + + This should be used by routines in ``config.py`` to validate + scope name arguments, and to determine a default scope where no + scope is specified. + + """ if scope is None: - return 'user' + # default to the scope with highest precedence. + return config_scopes[-1] elif scope not in valid_scopes: raise ValueError("Invalid config scope: '%s'. Must be one of %s." % (scope, valid_scopes)) -- cgit v1.2.3-70-g09d2 From 4e8e298eb3242cb25abe0cca315a74a100514dbf Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Dec 2015 17:40:31 -0800 Subject: Uncomment disabled tests. --- lib/spack/spack/test/package_sanity.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py index 0b075d135d..ee09040d0d 100644 --- a/lib/spack/spack/test/package_sanity.py +++ b/lib/spack/spack/test/package_sanity.py @@ -44,7 +44,7 @@ class PackageSanityTest(unittest.TestCase): self.check_db() - def ztest_get_all_mock_packages(self): + def test_get_all_mock_packages(self): """Get the mock packages once each too.""" db = RepoPath(spack.mock_packages_path) spack.repo.swap(db) @@ -52,7 +52,7 @@ class PackageSanityTest(unittest.TestCase): spack.repo.swap(db) - def ztest_url_versions(self): + def test_url_versions(self): """Check URLs for regular packages, if they are explicitly defined.""" for pkg in spack.repo.all_packages(): for v, vattrs in pkg.versions.items(): -- cgit v1.2.3-70-g09d2 From 39e360f93a374314c28716ded3b1533d66cd62be Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Dec 2015 17:51:11 -0800 Subject: Add custom YAML loader & dumper to track lines & maintain dict order. - Configs are now parsed with `spack.util.spack_yaml.load/dump` - Parser annotates returned data with `_start_mark` and `_end_mark` properties, so that we can recover what lines/files they came from. - Parser uses `OrderedDict` instead of `dict`. This will help maintain some sanity when round-tripping config files. --- lib/spack/spack/config.py | 77 ++++++----- lib/spack/spack/test/__init__.py | 3 +- lib/spack/spack/test/config.py | 8 +- lib/spack/spack/test/mock_packages_test.py | 12 +- lib/spack/spack/test/yaml.py | 93 +++++++++++++ lib/spack/spack/util/spack_yaml.py | 201 +++++++++++++++++++++++++++++ 6 files changed, 351 insertions(+), 43 deletions(-) create mode 100644 lib/spack/spack/test/yaml.py create mode 100644 lib/spack/spack/util/spack_yaml.py diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index aa6afd183e..9e3b44085f 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -120,8 +120,10 @@ the site configuration will be ignored. import os import sys import copy + import yaml from yaml.error import MarkedYAMLError +from ordereddict_backport import OrderedDict import llnl.util.tty as tty from llnl.util.filesystem import mkdirp @@ -129,11 +131,20 @@ from llnl.util.filesystem import mkdirp import spack from spack.error import SpackError -"""List of valid config sections.""" -valid_sections = ('compilers', 'mirrors', 'repos') +# Hacked yaml for configuration files preserves line numbers. +import spack.util.spack_yaml as syaml + + +"""Dict from section names -> function to check section YAML format.""" +valid_sections = ['compilers', 'mirrors', 'repos'] + +"""OrderedDict of config scopes keyed by name. + Later scopes will override earlier scopes. +""" +config_scopes = OrderedDict() -def check_section(section): +def validate_section(section): """Raise a ValueError if the section is not a valid section.""" if section not in valid_sections: raise ValueError("Invalid config section: '%s'. Options are %s." @@ -146,14 +157,19 @@ class ConfigScope(object): A scope is one directory containing named configuration files. Each file is a config "section" (e.g., mirrors, compilers, etc). """ + def __init__(self, name, path): self.name = name # scope name. self.path = path # path to directory containing configs. self.sections = {} # sections read from config files. + # Register in a dict of all ConfigScopes + # TODO: make this cleaner. Mocking up for testing is brittle. + global config_scopes + config_scopes[name] = self def get_section_filename(self, section): - check_section(section) + validate_section(section) return os.path.join(self.path, "%s.yaml" % section) @@ -161,7 +177,10 @@ class ConfigScope(object): if not section in self.sections: path = self.get_section_filename(section) data = _read_config_file(path) - self.sections[section] = {} if data is None else data + if data is None: + self.sections[section] = {} + else: + self.sections[section] = data return self.sections[section] @@ -171,7 +190,7 @@ class ConfigScope(object): try: mkdirp(self.path) with open(filename, 'w') as f: - yaml.dump(data, stream=f, default_flow_style=False) + syaml.dump(data, stream=f, default_flow_style=False) except (yaml.YAMLError, IOError) as e: raise ConfigFileError("Error writing to config file: '%s'" % str(e)) @@ -181,18 +200,11 @@ class ConfigScope(object): self.sections = {} -"""List of config scopes by name. - Later scopes in the list will override earlier scopes. -""" -config_scopes = [ - ConfigScope('site', os.path.join(spack.etc_path, 'spack')), - ConfigScope('user', os.path.expanduser('~/.spack'))] - -"""List of valid scopes, for convenience.""" -valid_scopes = (s.name for s in config_scopes) +ConfigScope('site', os.path.join(spack.etc_path, 'spack')), +ConfigScope('user', os.path.expanduser('~/.spack')) -def check_scope(scope): +def validate_scope(scope): """Ensure that scope is valid, and return a valid scope if it is None. This should be used by routines in ``config.py`` to validate @@ -202,16 +214,14 @@ def check_scope(scope): """ if scope is None: # default to the scope with highest precedence. - return config_scopes[-1] - elif scope not in valid_scopes: - raise ValueError("Invalid config scope: '%s'. Must be one of %s." - % (scope, valid_scopes)) - return scope + return config_scopes.values()[-1] + elif scope in config_scopes: + return config_scopes[scope] -def get_scope(scope): - scope = check_scope(scope) - return next(s for s in config_scopes if s.name == scope) + else: + raise ValueError("Invalid config scope: '%s'. Must be one of %s." + % (scope, config_scopes.keys())) def _read_config_file(filename): @@ -229,7 +239,7 @@ def _read_config_file(filename): try: with open(filename) as f: - return yaml.load(f) + return syaml.load(f) except MarkedYAMLError, e: raise ConfigFileError( @@ -243,7 +253,7 @@ def _read_config_file(filename): def clear_config_caches(): """Clears the caches for configuration files, which will cause them to be re-read upon the next request""" - for scope in config_scopes: + for scope in config_scopes.values(): scope.clear() @@ -306,10 +316,10 @@ def get_config(section): Strips off the top-level section name from the YAML dict. """ - check_section(section) + validate_section(section) merged_section = {} - for scope in config_scopes: + for scope in config_scopes.values(): # read potentially cached data from the scope. data = scope.get_section(section) if not data or not section in data: @@ -345,7 +355,7 @@ def get_repos_config(): def get_config_filename(scope, section): """For some scope and section, get the name of the configuration file""" - scope = get_scope(scope) + scope = validate_scope(scope) return scope.get_section_filename(section) @@ -363,8 +373,8 @@ def update_config(section, update_data, scope=None): # read in the config to ensure we've got current data get_config(section) - check_section(section) # validate section name - scope = get_scope(scope) # get ConfigScope object from string. + validate_section(section) # validate section name + scope = validate_scope(scope) # get ConfigScope object from string. # read only the requested section's data. data = scope.get_section(section) @@ -382,7 +392,7 @@ def remove_from_config(section, key_to_rm, scope=None): get_config(section) # check args and get the objects we need. - scope = get_scope(scope) + scope = validate_scope(scope) data = scope.get_section(section) filename = scope.get_section_filename(section) @@ -411,3 +421,4 @@ def print_section(section): class ConfigError(SpackError): pass class ConfigFileError(ConfigError): pass +class ConfigFormatError(ConfigError): pass diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 081e6c7b06..9609fd2f36 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -60,7 +60,8 @@ test_names = ['versions', 'unit_install', 'lock', 'database', - 'namespace_trie'] + 'namespace_trie', + 'yaml'] def list_tests(): diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index f56287aa98..5f99dcb903 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -26,6 +26,7 @@ import unittest import shutil import os from tempfile import mkdtemp +from ordereddict_backport import OrderedDict import spack import spack.config from spack.test.mock_packages_test import * @@ -72,10 +73,9 @@ class ConfigTest(MockPackagesTest): def setUp(self): super(ConfigTest, self).setUp() self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-') - spack.config.config_scopes = [ - spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')), - spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))] - spack.config.valid_scopes = ('test_low_priority', 'test_high_priority') + spack.config.config_scopes = OrderedDict() + spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low')) + spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high')) def tearDown(self): super(ConfigTest, self).tearDown() diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index d000271960..320c2921b0 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -24,6 +24,7 @@ ############################################################################## import sys import unittest +from ordereddict_backport import OrderedDict import spack import spack.config @@ -41,10 +42,12 @@ class MockPackagesTest(unittest.TestCase): spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes - self.real_valid_scopes = spack.config.valid_scopes - spack.config.config_scopes = [ - spack.config.ConfigScope('site', spack.mock_site_config), - spack.config.ConfigScope('user', spack.mock_user_config)] + + # TODO: Mocking this up is kind of brittle b/c ConfigScope + # TODO: constructor modifies config_scopes. Make it cleaner. + spack.config.config_scopes = OrderedDict() + spack.config.ConfigScope('site', spack.mock_site_config) + spack.config.ConfigScope('user', spack.mock_user_config) # Store changes to the package's dependencies so we can # restore later. @@ -72,7 +75,6 @@ class MockPackagesTest(unittest.TestCase): """Restore the real packages path after any test.""" spack.repo.swap(self.db) spack.config.config_scopes = self.real_scopes - spack.config.valid_scopes = self.real_valid_scopes spack.config.clear_config_caches() # Restore dependency changes that happened during the test diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py new file mode 100644 index 0000000000..5a357b8e69 --- /dev/null +++ b/lib/spack/spack/test/yaml.py @@ -0,0 +1,93 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +Test Spack's custom YAML format. +""" +import unittest +import spack.util.spack_yaml as syaml + +test_file = """\ +config_file: + x86_64: + foo: /path/to/foo + bar: /path/to/bar + baz: /path/to/baz + some_list: + - item 1 + - item 2 + - item 3 + another_list: + [ 1, 2, 3 ] + some_key: some_string +""" + +test_data = { + 'config_file' : syaml.syaml_dict([ + ('x86_64', syaml.syaml_dict([ + ('foo', '/path/to/foo'), + ('bar', '/path/to/bar'), + ('baz', '/path/to/baz' )])), + ('some_list', [ 'item 1', 'item 2', 'item 3' ]), + ('another_list', [ 1, 2, 3 ]), + ('some_key', 'some_string') + ])} + +class YamlTest(unittest.TestCase): + + def setUp(self): + self.data = syaml.load(test_file) + + + def test_parse(self): + self.assertEqual(test_data, self.data) + + + def test_dict_order(self): + self.assertEqual( + ['x86_64', 'some_list', 'another_list', 'some_key'], + self.data['config_file'].keys()) + + self.assertEqual( + ['foo', 'bar', 'baz'], + self.data['config_file']['x86_64'].keys()) + + + def test_line_numbers(self): + def check(obj, start_line, end_line): + self.assertEqual(obj._start_mark.line, start_line) + self.assertEqual(obj._end_mark.line, end_line) + + check(self.data, 0, 12) + check(self.data['config_file'], 1, 12) + check(self.data['config_file']['x86_64'], 2, 5) + check(self.data['config_file']['x86_64']['foo'], 2, 2) + check(self.data['config_file']['x86_64']['bar'], 3, 3) + check(self.data['config_file']['x86_64']['baz'], 4, 4) + check(self.data['config_file']['some_list'], 6, 9) + check(self.data['config_file']['some_list'][0], 6, 6) + check(self.data['config_file']['some_list'][1], 7, 7) + check(self.data['config_file']['some_list'][2], 8, 8) + check(self.data['config_file']['another_list'], 10, 10) + check(self.data['config_file']['some_key'], 11, 11) diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py new file mode 100644 index 0000000000..728e86b8ee --- /dev/null +++ b/lib/spack/spack/util/spack_yaml.py @@ -0,0 +1,201 @@ +############################################################################## +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""Enhanced YAML parsing for Spack. + +- ``load()`` preserves YAML Marks on returned objects -- this allows + us to access file and line information later. + +- ``Our load methods use ``OrderedDict`` class instead of YAML's + default unorderd dict. + +""" +import yaml +from yaml.nodes import * +from yaml.constructor import ConstructorError +from yaml.representer import SafeRepresenter +from ordereddict_backport import OrderedDict + +# Only export load and dump +__all__ = ['load', 'dump'] + +# Make new classes so we can add custom attributes. +# Also, use OrderedDict instead of just dict. +class syaml_dict(OrderedDict): + def __repr__(self): + mappings = ('%r: %r' % (k,v) for k,v in self.items()) + return '{%s}' % ', '.join(mappings) +class syaml_list(list): + __repr__ = list.__repr__ +class syaml_str(str): + __repr__ = str.__repr__ + +def mark(obj, node): + """Add start and end markers to an object.""" + obj._start_mark = node.start_mark + obj._end_mark = node.end_mark + + +class OrderedLineLoader(yaml.Loader): + """YAML loader that preserves order and line numbers. + + Mappings read in by this loader behave like an ordered dict. + Sequences, mappings, and strings also have new attributes, + ``_start_mark`` and ``_end_mark``, that preserve YAML line + information in the output data. + + """ + # + # Override construct_yaml_* so that they build our derived types, + # which allows us to add new attributes to them. + # + # The standard YAML constructors return empty instances and fill + # in with mappings later. We preserve this behavior. + # + def construct_yaml_str(self, node): + value = self.construct_scalar(node) + try: + value = value.encode('ascii') + except UnicodeEncodeError: + pass + value = syaml_str(value) + mark(value, node) + return value + + + def construct_yaml_seq(self, node): + data = syaml_list() + mark(data, node) + yield data + data.extend(self.construct_sequence(node)) + + + def construct_yaml_map(self, node): + data = syaml_dict() + mark(data, node) + yield data + value = self.construct_mapping(node) + data.update(value) + + # + # Override the ``construct_*`` routines. These fill in empty + # objects after yielded by the above ``construct_yaml_*`` methods. + # + def construct_sequence(self, node, deep=False): + if not isinstance(node, SequenceNode): + raise ConstructorError(None, None, + "expected a sequence node, but found %s" % node.id, + node.start_mark) + value = syaml_list(self.construct_object(child, deep=deep) + for child in node.value) + mark(value, node) + return value + + + def construct_mapping(self, node, deep=False): + """Store mappings as OrderedDicts instead of as regular python + dictionaries to preserve file ordering.""" + if not isinstance(node, MappingNode): + raise ConstructorError(None, None, + "expected a mapping node, but found %s" % node.id, + node.start_mark) + + mapping = syaml_dict() + for key_node, value_node in node.value: + key = self.construct_object(key_node, deep=deep) + try: + hash(key) + except TypeError, exc: + raise ConstructorError("while constructing a mapping", node.start_mark, + "found unacceptable key (%s)" % exc, key_node.start_mark) + value = self.construct_object(value_node, deep=deep) + if key in mapping: + raise ConstructorError("while constructing a mapping", node.start_mark, + "found already in-use key (%s)" % key, key_node.start_mark) + mapping[key] = value + + mark(mapping, node) + return mapping + +# register above new constructors +OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map) +OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq) +OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str) + + + +class OrderedLineDumper(yaml.Dumper): + """Dumper that preserves ordering and formats ``syaml_*`` objects. + + This dumper preserves insertion ordering ``syaml_dict`` objects + when they're written out. It also has some custom formatters + for ``syaml_*`` objects so that they are formatted like their + regular Python equivalents, instead of ugly YAML pyobjects. + + """ + def represent_mapping(self, tag, mapping, flow_style=None): + value = [] + node = MappingNode(tag, value, flow_style=flow_style) + if self.alias_key is not None: + self.represented_objects[self.alias_key] = node + best_style = True + if hasattr(mapping, 'items'): + # if it's a syaml_dict, preserve OrderedDict order. + # Otherwise do the default thing. + sort = not isinstance(mapping, syaml_dict) + mapping = mapping.items() + if sort: + mapping.sort() + + for item_key, item_value in mapping: + node_key = self.represent_data(item_key) + node_value = self.represent_data(item_value) + if not (isinstance(node_key, ScalarNode) and not node_key.style): + best_style = False + if not (isinstance(node_value, ScalarNode) and not node_value.style): + best_style = False + value.append((node_key, node_value)) + if flow_style is None: + if self.default_flow_style is not None: + node.flow_style = self.default_flow_style + else: + node.flow_style = best_style + return node + +# Make our special objects look like normal YAML ones. +OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict) +OrderedLineDumper.add_representer(syaml_list, OrderedLineDumper.represent_list) +OrderedLineDumper.add_representer(syaml_str, OrderedLineDumper.represent_str) + + +def load(*args, **kwargs): + """Load but modify the loader instance so that it will add __line__ + atrributes to the returned object.""" + kwargs['Loader'] = OrderedLineLoader + return yaml.load(*args, **kwargs) + + +def dump(*args, **kwargs): + kwargs['Dumper'] = OrderedLineDumper + return yaml.dump(*args, **kwargs) -- cgit v1.2.3-70-g09d2 From ff0d871612c05d803fdabb8a5b870b7af961cdc2 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 27 Dec 2015 21:13:18 -0800 Subject: Remove mock_configs; tests no longer modify spack home directory. --- lib/spack/spack/__init__.py | 4 --- lib/spack/spack/test/mock_packages_test.py | 32 ++++++++++++++++++++-- .../mock_configs/site_spackconfig/compilers.yaml | 12 -------- 3 files changed, 30 insertions(+), 18 deletions(-) delete mode 100644 var/spack/mock_configs/site_spackconfig/compilers.yaml diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index de1a98d092..973ba64b96 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -78,10 +78,6 @@ installed_db = Database(install_path) packages_path = join_path(repos_path, "builtin") mock_packages_path = join_path(repos_path, "builtin.mock") -mock_config_path = join_path(var_path, "mock_configs") -mock_site_config = join_path(mock_config_path, "site_spackconfig") -mock_user_config = join_path(mock_config_path, "user_spackconfig") - # # This controls how spack lays out install prefixes and # stage directories. diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 320c2921b0..7642edcf4b 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -23,14 +23,32 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys +import os import unittest +import tempfile from ordereddict_backport import OrderedDict +from llnl.util.filesystem import mkdirp + import spack import spack.config from spack.repository import RepoPath from spack.spec import Spec +mock_compiler_config = """\ +compilers: + all: + clang@3.3: + cc: /path/to/clang + cxx: /path/to/clang++ + f77: None + fc: None + gcc@4.5.0: + cc: /path/to/gcc + cxx: /path/to/g++ + f77: /path/to/gfortran + fc: /path/to/gfortran +""" class MockPackagesTest(unittest.TestCase): def initmock(self): @@ -43,11 +61,21 @@ class MockPackagesTest(unittest.TestCase): spack.config.clear_config_caches() self.real_scopes = spack.config.config_scopes + # Mock up temporary configuration directories + self.temp_config = tempfile.mkdtemp() + self.mock_site_config = os.path.join(self.temp_config, 'site') + self.mock_user_config = os.path.join(self.temp_config, 'user') + mkdirp(self.mock_site_config) + mkdirp(self.mock_user_config) + comp_yaml = os.path.join(self.mock_site_config, 'compilers.yaml') + with open(comp_yaml, 'w') as f: + f.write(mock_compiler_config) + # TODO: Mocking this up is kind of brittle b/c ConfigScope # TODO: constructor modifies config_scopes. Make it cleaner. spack.config.config_scopes = OrderedDict() - spack.config.ConfigScope('site', spack.mock_site_config) - spack.config.ConfigScope('user', spack.mock_user_config) + spack.config.ConfigScope('site', self.mock_site_config) + spack.config.ConfigScope('user', self.mock_user_config) # Store changes to the package's dependencies so we can # restore later. diff --git a/var/spack/mock_configs/site_spackconfig/compilers.yaml b/var/spack/mock_configs/site_spackconfig/compilers.yaml deleted file mode 100644 index 0a2dc893e2..0000000000 --- a/var/spack/mock_configs/site_spackconfig/compilers.yaml +++ /dev/null @@ -1,12 +0,0 @@ -compilers: - all: - clang@3.3: - cc: /path/to/clang - cxx: /path/to/clang++ - f77: None - fc: None - gcc@4.5.0: - cc: /path/to/gcc - cxx: /path/to/g++ - f77: /path/to/gfortran - fc: /path/to/gfortran -- cgit v1.2.3-70-g09d2 From 1f8ba53ca7767ca452f553f71d49eaf90fa19db8 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 28 Dec 2015 00:33:17 -0800 Subject: Rework compiler configuration and simplify config.py logic. - `spack compiler` subcommands now take an optional --scope argument. - no more `remove_from_config` in `config.py` -- `update` just overwrites b/c it's easier to just call `get_config`, modify YAML structures directly, and then call `update`. - Implemented `spack compiler remove`. --- lib/spack/spack/cmd/compiler.py | 49 ++++++++++++++----- lib/spack/spack/cmd/compilers.py | 5 ++ lib/spack/spack/compilers/__init__.py | 92 ++++++++++++++++++++++++++--------- lib/spack/spack/config.py | 51 +++++-------------- 4 files changed, 124 insertions(+), 73 deletions(-) diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index af1a22c9dd..a3860abf76 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -22,6 +22,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import sys import argparse import llnl.util.tty as tty @@ -41,17 +42,26 @@ def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='compiler_command') - update_parser = sp.add_parser( - 'add', help='Add compilers to the Spack configuration.') - update_parser.add_argument('add_paths', nargs=argparse.REMAINDER) + add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.') + add_parser.add_argument('add_paths', nargs=argparse.REMAINDER) + add_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to modify.") - remove_parser = sp.add_parser('remove', help='remove compiler') - remove_parser.add_argument('path') + remove_parser = sp.add_parser('remove', help='Remove compiler by spec.') + remove_parser.add_argument( + '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.') + remove_parser.add_argument('compiler_spec') + remove_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to modify.") - list_parser = sp.add_parser('list', help='list available compilers') + list_parser = sp.add_parser('list', help='list available compilers') + list_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to read from.") - info_parser = sp.add_parser('info', help='Show compiler paths.') + info_parser = sp.add_parser('info', help='Show compiler paths.') info_parser.add_argument('compiler_spec') + info_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to read from.") def compiler_add(args): @@ -62,13 +72,13 @@ def compiler_add(args): paths = get_path('PATH') compilers = [c for c in spack.compilers.find_compilers(*args.add_paths) - if c.spec not in spack.compilers.all_compilers()] + if c.spec not in spack.compilers.all_compilers(scope=args.scope)] if compilers: - spack.compilers.add_compilers_to_config('user', compilers) + spack.compilers.add_compilers_to_config(compilers, scope=args.scope) n = len(compilers) s = 's' if n > 1 else '' - filename = spack.config.get_config_filename('user', 'compilers') + filename = spack.config.get_config_filename(args.scope, 'compilers') tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: @@ -76,13 +86,26 @@ def compiler_add(args): def compiler_remove(args): - pass + cspec = CompilerSpec(args.compiler_spec) + compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) + + if not compilers: + tty.die("No compilers match spec %s." % cspec) + elif not args.all and len(compilers) > 1: + tty.error("Multiple compilers match spec %s. Choose one:" % cspec) + colify(reversed(sorted([c.spec for c in compilers])), indent=4) + tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.") + sys.exit(1) + + for compiler in compilers: + spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope) + tty.msg("Removed compiler %s." % compiler.spec) def compiler_info(args): """Print info about all compilers matching a spec.""" cspec = CompilerSpec(args.compiler_spec) - compilers = spack.compilers.compilers_for_spec(cspec) + compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) if not compilers: tty.error("No compilers match spec %s." % cspec) @@ -97,7 +120,7 @@ def compiler_info(args): def compiler_list(args): tty.msg("Available compilers") - index = index_by(spack.compilers.all_compilers(), 'name') + index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name') for i, (name, compilers) in enumerate(index.items()): if i >= 1: print diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py index c485a910eb..7e09016f2d 100644 --- a/lib/spack/spack/cmd/compilers.py +++ b/lib/spack/spack/cmd/compilers.py @@ -26,9 +26,14 @@ import llnl.util.tty as tty from llnl.util.tty.colify import colify from llnl.util.lang import index_by +import spack from spack.cmd.compiler import compiler_list description = "List available compilers. Same as 'spack compiler list'." +def setup_parser(subparser): + subparser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to read/modify.") + def compilers(parser, args): compiler_list(args) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index a1b6d978df..67dfaa3ac9 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -49,10 +49,10 @@ _required_instance_vars = ['cc', 'cxx', 'f77', 'fc'] _default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc'] def _auto_compiler_spec(function): - def converter(cspec_like): + def converter(cspec_like, *args, **kwargs): if not isinstance(cspec_like, spack.spec.CompilerSpec): cspec_like = spack.spec.CompilerSpec(cspec_like) - return function(cspec_like) + return function(cspec_like, *args, **kwargs) return converter @@ -65,39 +65,87 @@ def _to_dict(compiler): } -def get_compiler_config(arch=None): +def get_compiler_config(arch=None, scope=None): """Return the compiler configuration for the specified architecture. - - If the compiler configuration designates some compilers for - 'all' architectures, those are merged into the result, as well. - """ # If any configuration file has compilers, just stick with the # ones already configured. - config = spack.config.get_config('compilers') + config = spack.config.get_config('compilers', scope=scope) + my_arch = spack.architecture.sys_type() if arch is None: - arch = spack.architecture.sys_type() + arch = my_arch - if arch not in config: + if arch in config: + return config[arch] + + # Only for the current arch in *highest* scope: automatically try to + # find compilers if none are configured yet. + if arch == my_arch and scope == 'user': config[arch] = {} compilers = find_compilers(*get_path('PATH')) for compiler in compilers: config[arch].update(_to_dict(compiler)) - spack.config.update_config('compilers', config) + spack.config.update_config('compilers', config, scope=scope) + return config[arch] - # Merge 'all' compilers with arch-specific ones. - merged_config = config.get('all', {}) - merged_config = spack.config._merge_yaml(merged_config, config[arch]) + return {} + + +def add_compilers_to_config(compilers, arch=None, scope=None): + """Add compilers to the config for the specified architecture. + + Arguments: + - compilers: a list of Compiler objects. + - arch: arch to add compilers for. + - scope: configuration scope to modify. + """ + if arch is None: + arch = spack.architecture.sys_type() + + compiler_config = get_compiler_config(arch, scope) + for compiler in compilers: + compiler_config[str(compiler.spec)] = dict( + (c, getattr(compiler, c, "None")) + for c in _required_instance_vars) - return merged_config + update = { arch : compiler_config } + spack.config.update_config('compilers', update, scope) -def all_compilers(arch=None): +@_auto_compiler_spec +def remove_compiler_from_config(compiler_spec, arch=None, scope=None): + """Remove compilers from the config, by spec. + + Arguments: + - compiler_specs: a list of CompilerSpec objects. + - arch: arch to add compilers for. + - scope: configuration scope to modify. + """ + if arch is None: + arch = spack.architecture.sys_type() + + compiler_config = get_compiler_config(arch, scope) + del compiler_config[str(compiler_spec)] + update = { arch : compiler_config } + + spack.config.update_config('compilers', update, scope) + + +def all_compilers(arch=None, scope=None): """Return a set of specs for all the compiler versions currently available to build with. These are instances of CompilerSpec. """ - return [spack.spec.CompilerSpec(s) for s in get_compiler_config(arch)] + # Get compilers for this architecture. + arch_config = get_compiler_config(arch, scope) + + # Merge 'all' compilers with arch-specific ones. + # Arch-specific compilers have higher precedence. + merged_config = get_compiler_config('all', scope=scope) + merged_config = spack.config._merge_yaml(merged_config, arch_config) + + # Return compiler specs for the result. + return [spack.spec.CompilerSpec(s) for s in merged_config] _cached_default_compiler = None @@ -165,18 +213,18 @@ def supported(compiler_spec): @_auto_compiler_spec -def find(compiler_spec): +def find(compiler_spec, arch=None, scope=None): """Return specs of available compilers that match the supplied compiler spec. Return an list if nothing found.""" - return [c for c in all_compilers() if c.satisfies(compiler_spec)] + return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)] @_auto_compiler_spec -def compilers_for_spec(compiler_spec): +def compilers_for_spec(compiler_spec, arch=None, scope=None): """This gets all compilers that satisfy the supplied CompilerSpec. Returns an empty list if none are found. """ - config = get_compiler_config() + config = get_compiler_config(arch, scope) def get_compiler(cspec): items = config[str(cspec)] @@ -195,7 +243,7 @@ def compilers_for_spec(compiler_spec): return cls(cspec, *compiler_paths) - matches = find(compiler_spec) + matches = find(compiler_spec, arch, scope) return [get_compiler(cspec) for cspec in matches] diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 9e3b44085f..d266d2e23f 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -311,7 +311,7 @@ def substitute_spack_prefix(path): return path.replace('$spack', spack.prefix) -def get_config(section): +def get_config(section, scope=None): """Get configuration settings for a section. Strips off the top-level section name from the YAML dict. @@ -319,7 +319,12 @@ def get_config(section): validate_section(section) merged_section = {} - for scope in config_scopes.values(): + if scope is None: + scopes = config_scopes.values() + else: + scopes = [validate_scope(scope)] + + for scope in scopes: # read potentially cached data from the scope. data = scope.get_section(section) if not data or not section in data: @@ -362,52 +367,22 @@ def get_config_filename(scope, section): def update_config(section, update_data, scope=None): """Update the configuration file for a particular scope. - Merges contents of update_data into the scope's data for the - specified section, then writes out the config file. + Overwrites contents of a section in a scope with update_data, + then writes out the config file. - update_data shoudl contain only the section's data, with the - top-level name stripped off. This can be a list, dict, or any + update_data should have the top-level section name stripped off + (it will be re-added). Data itself can be a list, dict, or any other yaml-ish structure. """ # read in the config to ensure we've got current data get_config(section) - validate_section(section) # validate section name + validate_section(section) # validate section name scope = validate_scope(scope) # get ConfigScope object from string. # read only the requested section's data. - data = scope.get_section(section) - data = _merge_yaml(data, { section : update_data }) - scope.write_section(section) - - -def remove_from_config(section, key_to_rm, scope=None): - """Remove a configuration key and write updated configuration to disk. - - Return True if something was removed, False otherwise. - - """ - # ensure configs are current by reading in. - get_config(section) - - # check args and get the objects we need. - scope = validate_scope(scope) - data = scope.get_section(section) - filename = scope.get_section_filename(section) - - # do some checks - if not data: - return False - - if not section in data: - raise ConfigFileError("Invalid configuration file: '%s'" % filename) - - if key_to_rm not in section[section]: - return False - - # remove the key from the section's configuration - del data[section][key_to_rm] + scope.sections[section] = { section : update_data } scope.write_section(section) -- cgit v1.2.3-70-g09d2 From e8e6368cc8ab03be4b635731ed43b0fcae47712a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Mon, 28 Dec 2015 01:14:41 -0800 Subject: Rework mirror configuration. - All of these work: - `spack mirror add` - `spack mirror remove` - `spack mirror list` - `spack mirror` subcommands (except create) now have their own --scope argument. - Mirror config is now stored sanely as an ordered list. --- lib/spack/spack/cmd/mirror.py | 50 ++++++++++++++++++++++++++++++++++--------- lib/spack/spack/stage.py | 6 ++++-- 2 files changed, 44 insertions(+), 12 deletions(-) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index a2d2e80f5e..946b50350b 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -60,15 +60,22 @@ def setup_parser(subparser): '-o', '--one-version-per-spec', action='store_const', const=1, default=0, help="Only fetch one 'preferred' version per spec, not all known versions.") + add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser.add_argument('name', help="Mnemonic name for mirror.") add_parser.add_argument( 'url', help="URL of mirror directory created by 'spack mirror create'.") + add_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to modify.") remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__) remove_parser.add_argument('name') + remove_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to modify.") list_parser = sp.add_parser('list', help=mirror_list.__doc__) + list_parser.add_argument('--scope', choices=spack.config.config_scopes, + help="Configuration scope to read from.") def mirror_add(args): @@ -77,31 +84,54 @@ def mirror_add(args): if url.startswith('/'): url = 'file://' + url - mirror_dict = { args.name : url } - spack.config.update_config('mirrors', { args.name : url }, 'user') + mirrors = spack.config.get_config('mirrors', scope=args.scope) + if not mirrors: + mirrors = [] + + for m in mirrors: + for name, u in m.items(): + if name == args.name: + tty.die("Mirror with name %s already exists." % name) + if u == url: + tty.die("Mirror with url %s already exists." % url) + # should only be one item per mirror dict. + + mirrors.insert(0, { args.name : url }) + spack.config.update_config('mirrors', mirrors, scope=args.scope) def mirror_remove(args): """Remove a mirror by name.""" name = args.name - rmd_something = spack.config.remove_from_config('mirrors', name) - if not rmd_something: - tty.die("No such mirror: %s" % name) + mirrors = spack.config.get_config('mirrors', scope=args.scope) + if not mirrors: + mirrors = [] + + names = [n for m in mirrors for n,u in m.items()] + if not name in names: + tty.die("No mirror with name %s" % name) + + old_mirror = mirrors.pop(names.index(name)) + spack.config.update_config('mirrors', mirrors, scope=args.scope) + tty.msg("Removed mirror %s with url %s." % old_mirror.popitem()) def mirror_list(args): """Print out available mirrors to the console.""" - sec_names = spack.config.get_config('mirrors') - if not sec_names: + mirrors = spack.config.get_config('mirrors', scope=args.scope) + if not mirrors: tty.msg("No mirrors configured.") return - max_len = max(len(s) for s in sec_names) + names = [n for m in mirrors for n,u in m.items()] + max_len = max(len(n) for n in names) fmt = "%%-%ds%%s" % (max_len + 4) - for name, val in sec_names.iteritems(): - print fmt % (name, val) + for m in mirrors: + for name, url in m.items(): + print fmt % (name, url) + # should only be one item per mirror dict. def _read_specs_from_file(filename): diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 543a3a6223..a9631d4b62 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -26,6 +26,7 @@ import os import re import shutil import tempfile +from urlparse import urljoin import llnl.util.tty as tty from llnl.util.filesystem import * @@ -242,8 +243,9 @@ class Stage(object): # TODO: move mirror logic out of here and clean it up! if self.mirror_path: - urls = ["%s/%s" % (u, self.mirror_path) - for name, u in spack.config.get_config('mirrors')] + mirrors = spack.config.get_config('mirrors') + mirrors = [(n,u) for m in mirrors for n,u in m.items()] + urls = [urljoin(u, self.mirror_path) for name, u in mirrors] digest = None if isinstance(self.fetcher, fs.URLFetchStrategy): -- cgit v1.2.3-70-g09d2 From 20b7f8a8e0ea186cde1e641c154566f279e67cd6 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 2 Jan 2016 14:25:10 -0800 Subject: Fix bug in tests. --- lib/spack/spack/compilers/__init__.py | 29 +++++++++++++++-------------- lib/spack/spack/test/mock_packages_test.py | 2 ++ 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 67dfaa3ac9..2a3d67c731 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -46,6 +46,7 @@ from spack.util.environment import get_path _imported_compilers_module = 'spack.compilers' _required_instance_vars = ['cc', 'cxx', 'f77', 'fc'] +# TODO: customize order in config file _default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc'] def _auto_compiler_spec(function): @@ -132,7 +133,7 @@ def remove_compiler_from_config(compiler_spec, arch=None, scope=None): spack.config.update_config('compilers', update, scope) -def all_compilers(arch=None, scope=None): +def all_compilers_config(arch=None, scope=None): """Return a set of specs for all the compiler versions currently available to build with. These are instances of CompilerSpec. """ @@ -144,25 +145,25 @@ def all_compilers(arch=None, scope=None): merged_config = get_compiler_config('all', scope=scope) merged_config = spack.config._merge_yaml(merged_config, arch_config) - # Return compiler specs for the result. - return [spack.spec.CompilerSpec(s) for s in merged_config] + return merged_config + + +def all_compilers(arch=None, scope=None): + # Return compiler specs from the merged config. + return [spack.spec.CompilerSpec(s) + for s in all_compilers_config(arch, scope)] -_cached_default_compiler = None def default_compiler(): - global _cached_default_compiler - if _cached_default_compiler: - return _cached_default_compiler versions = [] - for name in _default_order: # TODO: customize order. + for name in _default_order: versions = find(name) - if versions: break - - if not versions: + if versions: + break + else: raise NoCompilersError() - _cached_default_compiler = sorted(versions)[-1] - return _cached_default_compiler + return sorted(versions)[-1] def find_compilers(*path): @@ -224,7 +225,7 @@ def compilers_for_spec(compiler_spec, arch=None, scope=None): """This gets all compilers that satisfy the supplied CompilerSpec. Returns an empty list if none are found. """ - config = get_compiler_config(arch, scope) + config = all_compilers_config(arch, scope) def get_compiler(cspec): items = config[str(cspec)] diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index 7642edcf4b..e9f1f95df5 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -24,6 +24,7 @@ ############################################################################## import sys import os +import shutil import unittest import tempfile from ordereddict_backport import OrderedDict @@ -103,6 +104,7 @@ class MockPackagesTest(unittest.TestCase): """Restore the real packages path after any test.""" spack.repo.swap(self.db) spack.config.config_scopes = self.real_scopes + shutil.rmtree(self.temp_config, ignore_errors=True) spack.config.clear_config_caches() # Restore dependency changes that happened during the test -- cgit v1.2.3-70-g09d2 From 56937416a92787ba712bbc73c0089cc7eb0c5f78 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sat, 2 Jan 2016 18:18:00 -0800 Subject: All tests that call concretize() need to be MockPackagesTests. --- lib/spack/spack/test/directory_layout.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py index 107a7a6412..925cb648ed 100644 --- a/lib/spack/spack/test/directory_layout.py +++ b/lib/spack/spack/test/directory_layout.py @@ -36,21 +36,25 @@ import spack from spack.spec import Spec from spack.repository import RepoPath from spack.directory_layout import YamlDirectoryLayout +from spack.test.mock_packages_test import * + # number of packages to test (to reduce test time) max_packages = 10 -class DirectoryLayoutTest(unittest.TestCase): +class DirectoryLayoutTest(MockPackagesTest): """Tests that a directory layout works correctly and produces a consistent install path.""" def setUp(self): + super(DirectoryLayoutTest, self).setUp() self.tmpdir = tempfile.mkdtemp() self.layout = YamlDirectoryLayout(self.tmpdir) def tearDown(self): + super(DirectoryLayoutTest, self).tearDown() shutil.rmtree(self.tmpdir, ignore_errors=True) self.layout = None -- cgit v1.2.3-70-g09d2 From 21fae634a54b3aa0f0d8f22fcf1e9d1429636c5f Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 3 Jan 2016 01:19:03 -0800 Subject: Add Python 3 aliases to our argparse backport. --- lib/spack/external/argparse.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py index 394e5da152..ec9a9ee738 100644 --- a/lib/spack/external/argparse.py +++ b/lib/spack/external/argparse.py @@ -1067,9 +1067,13 @@ class _SubParsersAction(Action): class _ChoicesPseudoAction(Action): - def __init__(self, name, help): + def __init__(self, name, aliases, help): + metavar = dest = name + if aliases: + metavar += ' (%s)' % ', '.join(aliases) sup = super(_SubParsersAction._ChoicesPseudoAction, self) - sup.__init__(option_strings=[], dest=name, help=help) + sup.__init__(option_strings=[], dest=dest, help=help, + metavar=metavar) def __init__(self, option_strings, @@ -1097,15 +1101,22 @@ class _SubParsersAction(Action): if kwargs.get('prog') is None: kwargs['prog'] = '%s %s' % (self._prog_prefix, name) + aliases = kwargs.pop('aliases', ()) + # create a pseudo-action to hold the choice help if 'help' in kwargs: help = kwargs.pop('help') - choice_action = self._ChoicesPseudoAction(name, help) + choice_action = self._ChoicesPseudoAction(name, aliases, help) self._choices_actions.append(choice_action) # create the parser and add it to the map parser = self._parser_class(**kwargs) self._name_parser_map[name] = parser + + # make parser available under aliases also + for alias in aliases: + self._name_parser_map[alias] = parser + return parser def _get_subactions(self): @@ -1123,8 +1134,9 @@ class _SubParsersAction(Action): try: parser = self._name_parser_map[parser_name] except KeyError: - tup = parser_name, ', '.join(self._name_parser_map) - msg = _('unknown parser %r (choices: %s)' % tup) + args = {'parser_name': parser_name, + 'choices': ', '.join(self._name_parser_map)} + msg = _('unknown parser %(parser_name)r (choices: %(choices)s)') % args raise ArgumentError(self, msg) # parse all the remaining options into the namespace -- cgit v1.2.3-70-g09d2 From b02faf56411c734b91a7f51b60f8921a31e12c16 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 3 Jan 2016 02:27:50 -0800 Subject: add/remove/list working for new config format. - mirrors.yaml now uses dict order for precedence, instead of lists of dicts. - spack.cmd now specifies default scope for add/remove and for list with `default_modify_scope` and `default_list_scope`. - commands that only read or list default to all scopes (merged) - commands that modify configs modify user scope (highest precedence) by default - These vars are used in setup_paraser for mirror/repo/compiler. - Spack's argparse supports aliases now. - added 'rm' alias for `spack [repo|compiler|mirror] remove` --- lib/spack/spack/__init__.py | 11 ++--- lib/spack/spack/cmd/__init__.py | 9 ++++ lib/spack/spack/cmd/compiler.py | 17 +++++-- lib/spack/spack/cmd/mirror.py | 65 ++++++++++++++----------- lib/spack/spack/cmd/repo.py | 105 ++++++++++++++++++++++++++++++++++++---- lib/spack/spack/config.py | 57 ++++++++-------------- lib/spack/spack/repository.py | 63 ++++++++++++++++-------- lib/spack/spack/stage.py | 3 +- 8 files changed, 221 insertions(+), 109 deletions(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 973ba64b96..6c4a15aaab 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -56,15 +56,12 @@ etc_path = join_path(prefix, "etc") # Set up the default packages database. # import spack.repository -_repo_paths = spack.config.get_repos_config() -if not _repo_paths: - tty.die("Spack configuration contains no package repositories.") - try: - repo = spack.repository.RepoPath(*_repo_paths) + repo = spack.repository.RepoPath() sys.meta_path.append(repo) -except spack.repository.BadRepoError, e: - tty.die('Bad repository. %s' % e.message) +except spack.repository.RepoError, e: + tty.error('while initializing Spack RepoPath:') + tty.die(e.message) # # Set up the installed packages database diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 926e7ac14a..6c635a1e6c 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -31,6 +31,15 @@ from llnl.util.lang import attr_setdefault import spack import spack.spec +import spack.config + +# +# Settings for commands that modify configuration +# +# Commands that modify confguration By default modify the *highest* priority scope. +default_modify_scope = spack.config.highest_precedence_scope().name +# Commands that list confguration list *all* scopes by default. +default_list_scope = None # cmd has a submodule called "list" so preserve the python list module python_list = list diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index a3860abf76..75b51f6b49 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -42,25 +42,31 @@ def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='compiler_command') + scopes = spack.config.config_scopes + + # Add add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.') add_parser.add_argument('add_paths', nargs=argparse.REMAINDER) - add_parser.add_argument('--scope', choices=spack.config.config_scopes, + add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") - remove_parser = sp.add_parser('remove', help='Remove compiler by spec.') + # Remove + remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.') remove_parser.add_argument( '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.') remove_parser.add_argument('compiler_spec') - remove_parser.add_argument('--scope', choices=spack.config.config_scopes, + remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope, help="Configuration scope to modify.") + # List list_parser = sp.add_parser('list', help='list available compilers') - list_parser.add_argument('--scope', choices=spack.config.config_scopes, + list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, help="Configuration scope to read from.") + # Info info_parser = sp.add_parser('info', help='Show compiler paths.') info_parser.add_argument('compiler_spec') - info_parser.add_argument('--scope', choices=spack.config.config_scopes, + info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope, help="Configuration scope to read from.") @@ -132,6 +138,7 @@ def compiler_list(args): def compiler(parser, args): action = { 'add' : compiler_add, 'remove' : compiler_remove, + 'rm' : compiler_remove, 'info' : compiler_info, 'list' : compiler_list } action[args.compiler_command](args) diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py index 946b50350b..885483a840 100644 --- a/lib/spack/spack/cmd/mirror.py +++ b/lib/spack/spack/cmd/mirror.py @@ -36,6 +36,7 @@ import spack.config import spack.mirror from spack.spec import Spec from spack.error import SpackError +from spack.util.spack_yaml import syaml_dict description = "Manage mirrors." @@ -47,6 +48,7 @@ def setup_parser(subparser): sp = subparser.add_subparsers( metavar='SUBCOMMAND', dest='mirror_command') + # Create create_parser = sp.add_parser('create', help=mirror_create.__doc__) create_parser.add_argument('-d', '--directory', default=None, help="Directory in which to create mirror.") @@ -60,22 +62,29 @@ def setup_parser(subparser): '-o', '--one-version-per-spec', action='store_const', const=1, default=0, help="Only fetch one 'preferred' version per spec, not all known versions.") + scopes = spack.config.config_scopes + # Add add_parser = sp.add_parser('add', help=mirror_add.__doc__) add_parser.add_argument('name', help="Mnemonic name for mirror.") add_parser.add_argument( 'url', help="URL of mirror directory created by 'spack mirror create'.") - add_parser.add_argument('--scope', choices=spack.config.config_scopes, - help="Configuration scope to modify.") + add_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") - remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__) + # Remove + remove_parser = sp.add_parser('remove', aliases=['rm'], help=mirror_remove.__doc__) remove_parser.add_argument('name') - remove_parser.add_argument('--scope', choices=spack.config.config_scopes, - help="Configuration scope to modify.") + remove_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") + # List list_parser = sp.add_parser('list', help=mirror_list.__doc__) - list_parser.add_argument('--scope', choices=spack.config.config_scopes, - help="Configuration scope to read from.") + list_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") def mirror_add(args): @@ -86,17 +95,18 @@ def mirror_add(args): mirrors = spack.config.get_config('mirrors', scope=args.scope) if not mirrors: - mirrors = [] - - for m in mirrors: - for name, u in m.items(): - if name == args.name: - tty.die("Mirror with name %s already exists." % name) - if u == url: - tty.die("Mirror with url %s already exists." % url) - # should only be one item per mirror dict. - - mirrors.insert(0, { args.name : url }) + mirrors = syaml_dict() + + for name, u in mirrors.items(): + if name == args.name: + tty.die("Mirror with name %s already exists." % name) + if u == url: + tty.die("Mirror with url %s already exists." % url) + # should only be one item per mirror dict. + + items = [(n,u) for n,u in mirrors.items()] + items.insert(0, (args.name, url)) + mirrors = syaml_dict(items) spack.config.update_config('mirrors', mirrors, scope=args.scope) @@ -106,15 +116,14 @@ def mirror_remove(args): mirrors = spack.config.get_config('mirrors', scope=args.scope) if not mirrors: - mirrors = [] + mirrors = syaml_dict() - names = [n for m in mirrors for n,u in m.items()] - if not name in names: + if not name in mirrors: tty.die("No mirror with name %s" % name) - old_mirror = mirrors.pop(names.index(name)) + old_value = mirrors.pop(name) spack.config.update_config('mirrors', mirrors, scope=args.scope) - tty.msg("Removed mirror %s with url %s." % old_mirror.popitem()) + tty.msg("Removed mirror %s with url %s." % (name, old_value)) def mirror_list(args): @@ -124,14 +133,11 @@ def mirror_list(args): tty.msg("No mirrors configured.") return - names = [n for m in mirrors for n,u in m.items()] - max_len = max(len(n) for n in names) + max_len = max(len(n) for n in mirrors.keys()) fmt = "%%-%ds%%s" % (max_len + 4) - for m in mirrors: - for name, url in m.items(): - print fmt % (name, url) - # should only be one item per mirror dict. + for name in mirrors: + print fmt % (name, mirrors[name]) def _read_specs_from_file(filename): @@ -205,6 +211,7 @@ def mirror(parser, args): action = { 'create' : mirror_create, 'add' : mirror_add, 'remove' : mirror_remove, + 'rm' : mirror_remove, 'list' : mirror_list } action[args.mirror_command](args) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 991d306c04..ebe42d0138 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -1,5 +1,5 @@ ############################################################################## -# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. @@ -33,12 +33,13 @@ from llnl.util.filesystem import join_path, mkdirp import spack.spec import spack.config from spack.util.environment import get_path -from spack.repository import packages_dir_name, repo_config_name, Repo +from spack.repository import * description = "Manage package source repositories." def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command') + scopes = spack.config.config_scopes # Create create_parser = sp.add_parser('create', help=repo_create.__doc__) @@ -49,6 +50,25 @@ def setup_parser(subparser): # List list_parser = sp.add_parser('list', help=repo_list.__doc__) + list_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_list_scope, + help="Configuration scope to read from.") + + # Add + add_parser = sp.add_parser('add', help=repo_add.__doc__) + add_parser.add_argument('path', help="Path to a Spack package repository directory.") + add_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") + + # Remove + remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm']) + remove_parser.add_argument( + 'path_or_namespace', + help="Path or namespace of a Spack package repository.") + remove_parser.add_argument( + '--scope', choices=scopes, default=spack.cmd.default_modify_scope, + help="Configuration scope to modify.") def repo_create(args): @@ -104,25 +124,87 @@ def repo_create(args): def repo_add(args): - """Remove a package source from the Spack configuration""" - # FIXME: how to deal with this with the current config architecture? - # FIXME: Repos do not have mnemonics, which I assumed would be simpler... should they have them after all? + """Add a package source to the Spack configuration""" + path = args.path + + # check if the path is relative to the spack directory. + real_path = path + if path.startswith('$spack'): + real_path = spack.repository.substitute_spack_prefix(path) + elif not os.path.isabs(real_path): + real_path = os.path.abspath(real_path) + path = real_path + + # check if the path exists + if not os.path.exists(real_path): + tty.die("No such file or directory: '%s'." % path) + + # Make sure the path is a directory. + if not os.path.isdir(real_path): + tty.die("Not a Spack repository: '%s'." % path) + + # Make sure it's actually a spack repository by constructing it. + repo = Repo(real_path) + + # If that succeeds, finally add it to the configuration. + repos = spack.config.get_config('repos', args.scope) + if not repos: repos = [] + + if repo.root in repos or path in repos: + tty.die("Repository is already registered with Spack: '%s'" % path) + + repos.insert(0, path) + spack.config.update_config('repos', repos, args.scope) + tty.msg("Created repo with namespace '%s'." % repo.namespace) def repo_remove(args): - """Remove a package source from the Spack configuration""" - # FIXME: see above. + """Remove a repository from the Spack configuration.""" + repos = spack.config.get_config('repos', args.scope) + path_or_namespace = args.path_or_namespace + + # If the argument is a path, remove that repository from config. + path = os.path.abspath(path_or_namespace) + if path in repos: + repos.remove(path) + spack.config.update_config('repos', repos, args.scope) + tty.msg("Removed repository '%s'." % path) + return + + # If it is a namespace, remove corresponding repo + for path in repos: + try: + repo = Repo(path) + if repo.namespace == path_or_namespace: + repos.remove(repo.root) + spack.config.update_config('repos', repos, args.scope) + tty.msg("Removed repository '%s' with namespace %s." + % (repo.root, repo.namespace)) + return + except RepoError as e: + continue + + tty.die("No repository with path or namespace: '%s'" + % path_or_namespace) def repo_list(args): """List package sources and their mnemoics""" - roots = spack.config.get_repos_config() - repos = [Repo(r) for r in roots] + roots = spack.config.get_config('repos', args.scope) + repos = [] + for r in roots: + try: + repos.append(Repo(r)) + except RepoError as e: + continue msg = "%d package repositor" % len(repos) msg += "y." if len(repos) == 1 else "ies." tty.msg(msg) + if not repos: + return + max_ns_len = max(len(r.namespace) for r in repos) for repo in repos: fmt = "%%-%ds%%s" % (max_ns_len + 4) @@ -131,5 +213,8 @@ def repo_list(args): def repo(parser, args): action = { 'create' : repo_create, - 'list' : repo_list } + 'list' : repo_list, + 'add' : repo_add, + 'remove' : repo_remove, + 'rm' : repo_remove} action[args.repo_command](args) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index d266d2e23f..c53dcbc405 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -204,6 +204,11 @@ ConfigScope('site', os.path.join(spack.etc_path, 'spack')), ConfigScope('user', os.path.expanduser('~/.spack')) +def highest_precedence_scope(): + """Get the scope with highest precedence (prefs will override others).""" + return config_scopes.values()[-1] + + def validate_scope(scope): """Ensure that scope is valid, and return a valid scope if it is None. @@ -214,7 +219,7 @@ def validate_scope(scope): """ if scope is None: # default to the scope with highest precedence. - return config_scopes.values()[-1] + return highest_precedence_scope() elif scope in config_scopes: return config_scopes[scope] @@ -287,15 +292,10 @@ def _merge_yaml(dest, source): dest[:] = source + [x for x in dest if x not in seen] return dest - # Source dict is merged into dest. Extra ':' means overwrite. + # Source dict is merged into dest. elif they_are(dict): for sk, sv in source.iteritems(): - # allow total override with, e.g., repos:: - override = sk.endswith(':') - if override: - sk = sk.rstrip(':') - - if override or not sk in dest: + if not sk in dest: dest[sk] = copy.copy(sv) else: dest[sk] = _merge_yaml(dest[sk], source[sk]) @@ -306,18 +306,13 @@ def _merge_yaml(dest, source): return copy.copy(source) -def substitute_spack_prefix(path): - """Replaces instances of $spack with Spack's prefix.""" - return path.replace('$spack', spack.prefix) - - def get_config(section, scope=None): """Get configuration settings for a section. Strips off the top-level section name from the YAML dict. """ validate_section(section) - merged_section = {} + merged_section = syaml.syaml_dict() if scope is None: scopes = config_scopes.values() @@ -327,37 +322,25 @@ def get_config(section, scope=None): for scope in scopes: # read potentially cached data from the scope. data = scope.get_section(section) - if not data or not section in data: - continue - # extract data under the section name header - data = data[section] + # Skip empty configs + if not data or not isinstance(data, dict): + continue - # ignore empty sections for easy commenting of single-line configs. - if not data: + # Allow complete override of site config with '
::' + override_key = section + ':' + if not (section in data or override_key in data): + tty.warn("Skipping bad configuration file: '%s'" % scope.path) continue - # merge config data from scopes. - merged_section = _merge_yaml(merged_section, data) + if override_key in data: + merged_section = data[override_key] + else: + merged_section = _merge_yaml(merged_section, data[section]) return merged_section -def get_repos_config(): - repo_list = get_config('repos') - if repo_list is None: - return [] - - if not isinstance(repo_list, list): - tty.die("Bad repository configuration. 'repos' element does not contain a list.") - - def expand_repo_path(path): - path = substitute_spack_prefix(path) - path = os.path.expanduser(path) - return path - return [expand_repo_path(repo) for repo in repo_list] - - def get_config_filename(scope, section): """For some scope and section, get the name of the configuration file""" scope = validate_scope(scope) diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 4e91855db0..3367572ef5 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -36,6 +36,7 @@ import llnl.util.tty as tty from llnl.util.filesystem import join_path import spack.error +import spack.config import spack.spec from spack.virtual import ProviderIndex from spack.util.naming import * @@ -53,6 +54,7 @@ repo_config_name = 'repo.yaml' # Top-level filename for repo config. packages_dir_name = 'packages' # Top-level repo directory containing pkgs. package_file_name = 'package.py' # Filename for packages in a repository. + def _autospec(function): """Decorator that automatically converts the argument of a single-arg function to a Spec.""" @@ -71,6 +73,11 @@ def _make_namespace_module(ns): return module +def substitute_spack_prefix(path): + """Replaces instances of $spack with Spack's prefix.""" + return path.replace('$spack', spack.prefix) + + class RepoPath(object): """A RepoPath is a list of repos that function as one. @@ -89,10 +96,20 @@ class RepoPath(object): self._all_package_names = [] self._provider_index = None + # If repo_dirs is empty, just use the configuration + if not repo_dirs: + repo_dirs = spack.config.get_config('repos') + if not repo_dirs: + raise NoRepoConfiguredError( + "Spack configuration contains no package repositories.") + # Add each repo to this path. for root in repo_dirs: - repo = Repo(root, self.super_namespace) - self.put_last(repo) + try: + repo = Repo(root, self.super_namespace) + self.put_last(repo) + except RepoError as e: + tty.warn("Failed to initialize repository at '%s'." % root, e.message) def swap(self, other): @@ -121,12 +138,12 @@ class RepoPath(object): """ if repo.root in self.by_path: - raise DuplicateRepoError("Package repos are the same", - repo, self.by_path[repo.root]) + raise DuplicateRepoError("Duplicate repository: '%s'" % repo.root) if repo.namespace in self.by_namespace: - raise DuplicateRepoError("Package repos cannot provide the same namespace", - repo, self.by_namespace[repo.namespace]) + raise DuplicateRepoError( + "Package repos '%s' and '%s' both provide namespace %s." + % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace)) # Add repo to the pkg indexes self.by_namespace[repo.full_namespace] = repo @@ -292,7 +309,8 @@ class Repo(object): """ # Root directory, containing _repo.yaml and package dirs - self.root = root + # Allow roots to by spack-relative by starting with '$spack' + self.root = substitute_spack_prefix(root) # super-namespace for all packages in the Repo self.super_namespace = namespace @@ -629,13 +647,27 @@ class Repo(object): return self.exists(pkg_name) -class BadRepoError(spack.error.SpackError): +class RepoError(spack.error.SpackError): + """Superclass for repository-related errors.""" + + +class NoRepoConfiguredError(RepoError): + """Raised when there are no repositories configured.""" + + +class BadRepoError(RepoError): """Raised when repo layout is invalid.""" - def __init__(self, msg): - super(BadRepoError, self).__init__(msg) -class UnknownPackageError(spack.error.SpackError): +class DuplicateRepoError(RepoError): + """Raised when duplicate repos are added to a RepoPath.""" + + +class PackageLoadError(spack.error.SpackError): + """Superclass for errors related to loading packages.""" + + +class UnknownPackageError(PackageLoadError): """Raised when we encounter a package spack doesn't have.""" def __init__(self, name, repo=None): msg = None @@ -647,14 +679,7 @@ class UnknownPackageError(spack.error.SpackError): self.name = name -class DuplicateRepoError(spack.error.SpackError): - """Raised when duplicate repos are added to a RepoPath.""" - def __init__(self, msg, repo1, repo2): - super(UnknownPackageError, self).__init__( - "%s: %s, %s" % (msg, repo1, repo2)) - - -class FailedConstructorError(spack.error.SpackError): +class FailedConstructorError(PackageLoadError): """Raised when a package's class constructor fails.""" def __init__(self, name, exc_type, exc_obj, exc_tb): super(FailedConstructorError, self).__init__( diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index a9631d4b62..61f9846835 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -244,8 +244,7 @@ class Stage(object): # TODO: move mirror logic out of here and clean it up! if self.mirror_path: mirrors = spack.config.get_config('mirrors') - mirrors = [(n,u) for m in mirrors for n,u in m.items()] - urls = [urljoin(u, self.mirror_path) for name, u in mirrors] + urls = [urljoin(u, self.mirror_path) for name, u in mirrors.items()] digest = None if isinstance(self.fetcher, fs.URLFetchStrategy): -- cgit v1.2.3-70-g09d2 From 05b30bf83e0bc8723472b6d609c692c3b83b486c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 14 Jan 2016 10:26:31 -0800 Subject: Make text wrapping off by default in tty, add a kwarg for it. --- lib/spack/llnl/util/tty/__init__.py | 39 +++++++++++++++++++++++------------- lib/spack/spack/config.py | 40 +++++++++++++++++++++++++++++++------ lib/spack/spack/package.py | 2 +- 3 files changed, 60 insertions(+), 21 deletions(-) diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index 203f429a48..3ecd3a4ac2 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -63,35 +63,46 @@ def msg(message, *args): def info(message, *args, **kwargs): format = kwargs.get('format', '*b') stream = kwargs.get('stream', sys.stdout) + wrap = kwargs.get('wrap', False) cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream) for arg in args: - lines = textwrap.wrap( - str(arg), initial_indent=indent, subsequent_indent=indent) - for line in lines: - stream.write(line + '\n') + if wrap: + lines = textwrap.wrap( + str(arg), initial_indent=indent, subsequent_indent=indent) + for line in lines: + stream.write(line + '\n') + else: + stream.write(indent + str(arg) + '\n') -def verbose(message, *args): +def verbose(message, *args, **kwargs): if _verbose: - info(message, *args, format='c') + kwargs.setdefault('format', 'c') + info(message, *args, **kwargs) -def debug(message, *args): +def debug(message, *args, **kwargs): if _debug: - info(message, *args, format='g', stream=sys.stderr) + kwargs.setdefault('format', 'g') + kwargs.setdefault('stream', sys.stderr) + info(message, *args, **kwargs) -def error(message, *args): - info("Error: " + str(message), *args, format='*r', stream=sys.stderr) +def error(message, *args, **kwargs): + kwargs.setdefault('format', '*r') + kwargs.setdefault('stream', sys.stderr) + info("Error: " + str(message), *args, **kwargs) -def warn(message, *args): - info("Warning: " + str(message), *args, format='*Y', stream=sys.stderr) +def warn(message, *args, **kwargs): + kwargs.setdefault('format', '*Y') + kwargs.setdefault('stream', sys.stderr) + info("Warning: " + str(message), *args, **kwargs) -def die(message, *args): - error(message, *args) +def die(message, *args, **kwargs): + error(message, *args, **kwargs) sys.exit(1) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index c53dcbc405..3ff83ae529 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -135,8 +135,34 @@ from spack.error import SpackError import spack.util.spack_yaml as syaml -"""Dict from section names -> function to check section YAML format.""" -valid_sections = ['compilers', 'mirrors', 'repos'] +"""Dict from section names -> schema for that section.""" +section_schemas = { + 'compilers' : { + '$schema': 'http://json-schema.org/schema#', + 'title' : 'Spack compiler configuration file schema', + 'type' : 'object', + 'properties' : { + 'compilers' : { + 'type' : 'map', + }, + }, + }, + + 'mirrors' : { + '$schema': 'http://json-schema.org/schema#', + 'title' : 'Spack mirror configuration file schema', + 'type' : 'map', + 'properties' : { + 'mirrors' : { + + } + }, + }, + + 'repos' : { + '$schema': 'http://json-schema.org/schema#', + 'title' : 'Spack repository configuration file schema', + }} """OrderedDict of config scopes keyed by name. Later scopes will override earlier scopes. @@ -146,9 +172,9 @@ config_scopes = OrderedDict() def validate_section(section): """Raise a ValueError if the section is not a valid section.""" - if section not in valid_sections: + if section not in section_schemas: raise ValueError("Invalid config section: '%s'. Options are %s." - % (section, valid_sections)) + % (section, section_schemas)) class ConfigScope(object): @@ -369,10 +395,12 @@ def update_config(section, update_data, scope=None): scope.write_section(section) -"""Print a configuration to stdout""" def print_section(section): + """Print a configuration to stdout.""" try: - yaml.dump(get_config(section), stream=sys.stdout, default_flow_style=False) + data = syaml.syaml_dict() + data[section] = get_config(section) + syaml.dump(data, stream=sys.stdout, default_flow_style=False) except (yaml.YAMLError, IOError) as e: raise ConfigError("Error reading configuration: %s" % section) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 5db83064b5..f926a14206 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -858,7 +858,7 @@ class Package(object): tty.warn("Keeping install prefix in place despite error.", "Spack will think this package is installed." + "Manually remove this directory to fix:", - self.prefix) + self.prefix, wrap=True) def real_work(): -- cgit v1.2.3-70-g09d2 From b567cb57e1c74dcb876b0ed935295a848ec45438 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 14 Jan 2016 10:40:34 -0800 Subject: Add jsonschema validation for config files. --- lib/spack/external/jsonschema/COPYING | 19 + lib/spack/external/jsonschema/README.rst | 104 +++ lib/spack/external/jsonschema/__init__.py | 26 + lib/spack/external/jsonschema/__main__.py | 2 + lib/spack/external/jsonschema/_format.py | 240 +++++++ lib/spack/external/jsonschema/_reflect.py | 155 ++++ lib/spack/external/jsonschema/_utils.py | 213 ++++++ lib/spack/external/jsonschema/_validators.py | 358 ++++++++++ lib/spack/external/jsonschema/cli.py | 72 ++ lib/spack/external/jsonschema/compat.py | 53 ++ lib/spack/external/jsonschema/exceptions.py | 264 +++++++ lib/spack/external/jsonschema/schemas/draft3.json | 201 ++++++ lib/spack/external/jsonschema/schemas/draft4.json | 221 ++++++ lib/spack/external/jsonschema/tests/__init__.py | 0 lib/spack/external/jsonschema/tests/compat.py | 15 + lib/spack/external/jsonschema/tests/test_cli.py | 110 +++ .../external/jsonschema/tests/test_exceptions.py | 382 ++++++++++ lib/spack/external/jsonschema/tests/test_format.py | 63 ++ .../jsonschema/tests/test_jsonschema_test_suite.py | 290 ++++++++ .../external/jsonschema/tests/test_validators.py | 786 +++++++++++++++++++++ lib/spack/external/jsonschema/validators.py | 428 +++++++++++ lib/spack/spack/__init__.py | 5 +- lib/spack/spack/cmd/repo.py | 5 +- lib/spack/spack/config.py | 199 +++++- 24 files changed, 4168 insertions(+), 43 deletions(-) create mode 100644 lib/spack/external/jsonschema/COPYING create mode 100644 lib/spack/external/jsonschema/README.rst create mode 100644 lib/spack/external/jsonschema/__init__.py create mode 100644 lib/spack/external/jsonschema/__main__.py create mode 100644 lib/spack/external/jsonschema/_format.py create mode 100644 lib/spack/external/jsonschema/_reflect.py create mode 100644 lib/spack/external/jsonschema/_utils.py create mode 100644 lib/spack/external/jsonschema/_validators.py create mode 100644 lib/spack/external/jsonschema/cli.py create mode 100644 lib/spack/external/jsonschema/compat.py create mode 100644 lib/spack/external/jsonschema/exceptions.py create mode 100644 lib/spack/external/jsonschema/schemas/draft3.json create mode 100644 lib/spack/external/jsonschema/schemas/draft4.json create mode 100644 lib/spack/external/jsonschema/tests/__init__.py create mode 100644 lib/spack/external/jsonschema/tests/compat.py create mode 100644 lib/spack/external/jsonschema/tests/test_cli.py create mode 100644 lib/spack/external/jsonschema/tests/test_exceptions.py create mode 100644 lib/spack/external/jsonschema/tests/test_format.py create mode 100644 lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py create mode 100644 lib/spack/external/jsonschema/tests/test_validators.py create mode 100644 lib/spack/external/jsonschema/validators.py diff --git a/lib/spack/external/jsonschema/COPYING b/lib/spack/external/jsonschema/COPYING new file mode 100644 index 0000000000..af9cfbdb13 --- /dev/null +++ b/lib/spack/external/jsonschema/COPYING @@ -0,0 +1,19 @@ +Copyright (c) 2013 Julian Berman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/lib/spack/external/jsonschema/README.rst b/lib/spack/external/jsonschema/README.rst new file mode 100644 index 0000000000..20c2fe6266 --- /dev/null +++ b/lib/spack/external/jsonschema/README.rst @@ -0,0 +1,104 @@ +========== +jsonschema +========== + +``jsonschema`` is an implementation of `JSON Schema `_ +for Python (supporting 2.6+ including Python 3). + +.. code-block:: python + + >>> from jsonschema import validate + + >>> # A sample schema, like what we'd get from json.load() + >>> schema = { + ... "type" : "object", + ... "properties" : { + ... "price" : {"type" : "number"}, + ... "name" : {"type" : "string"}, + ... }, + ... } + + >>> # If no exception is raised by validate(), the instance is valid. + >>> validate({"name" : "Eggs", "price" : 34.99}, schema) + + >>> validate( + ... {"name" : "Eggs", "price" : "Invalid"}, schema + ... ) # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValidationError: 'Invalid' is not of type 'number' + + +Features +-------- + +* Full support for + `Draft 3 `_ + **and** `Draft 4 `_ + of the schema. + +* `Lazy validation `_ + that can iteratively report *all* validation errors. + +* Small and extensible + +* `Programmatic querying `_ + of which properties or items failed validation. + + +Release Notes +------------- + +* A simple CLI was added for validation +* Validation errors now keep full absolute paths and absolute schema paths in + their ``absolute_path`` and ``absolute_schema_path`` attributes. The ``path`` + and ``schema_path`` attributes are deprecated in favor of ``relative_path`` + and ``relative_schema_path``\ . + +*Note:* Support for Python 3.2 was dropped in this release, and installation +now uses setuptools. + + +Running the Test Suite +---------------------- + +``jsonschema`` uses the wonderful `Tox `_ for its +test suite. (It really is wonderful, if for some reason you haven't heard of +it, you really should use it for your projects). + +Assuming you have ``tox`` installed (perhaps via ``pip install tox`` or your +package manager), just run ``tox`` in the directory of your source checkout to +run ``jsonschema``'s test suite on all of the versions of Python ``jsonschema`` +supports. Note that you'll need to have all of those versions installed in +order to run the tests on each of them, otherwise ``tox`` will skip (and fail) +the tests on that version. + +Of course you're also free to just run the tests on a single version with your +favorite test runner. The tests live in the ``jsonschema.tests`` package. + + +Community +--------- + +There's a `mailing list `_ +for this implementation on Google Groups. + +Please join, and feel free to send questions there. + + +Contributing +------------ + +I'm Julian Berman. + +``jsonschema`` is on `GitHub `_. + +Get in touch, via GitHub or otherwise, if you've got something to contribute, +it'd be most welcome! + +You can also generally find me on Freenode (nick: ``tos9``) in various +channels, including ``#python``. + +If you feel overwhelmingly grateful, you can woo me with beer money on +`Gittip `_ or via Google Wallet with the email +in my GitHub profile. diff --git a/lib/spack/external/jsonschema/__init__.py b/lib/spack/external/jsonschema/__init__.py new file mode 100644 index 0000000000..6c099f1d8b --- /dev/null +++ b/lib/spack/external/jsonschema/__init__.py @@ -0,0 +1,26 @@ +""" +An implementation of JSON Schema for Python + +The main functionality is provided by the validator classes for each of the +supported JSON Schema versions. + +Most commonly, :func:`validate` is the quickest way to simply validate a given +instance under a schema, and will create a validator for you. + +""" + +from jsonschema.exceptions import ( + ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError +) +from jsonschema._format import ( + FormatChecker, draft3_format_checker, draft4_format_checker, +) +from jsonschema.validators import ( + Draft3Validator, Draft4Validator, RefResolver, validate +) + + +__version__ = "2.4.0" + + +# flake8: noqa diff --git a/lib/spack/external/jsonschema/__main__.py b/lib/spack/external/jsonschema/__main__.py new file mode 100644 index 0000000000..82c29fd39e --- /dev/null +++ b/lib/spack/external/jsonschema/__main__.py @@ -0,0 +1,2 @@ +from jsonschema.cli import main +main() diff --git a/lib/spack/external/jsonschema/_format.py b/lib/spack/external/jsonschema/_format.py new file mode 100644 index 0000000000..bb52d183ad --- /dev/null +++ b/lib/spack/external/jsonschema/_format.py @@ -0,0 +1,240 @@ +import datetime +import re +import socket + +from jsonschema.compat import str_types +from jsonschema.exceptions import FormatError + + +class FormatChecker(object): + """ + A ``format`` property checker. + + JSON Schema does not mandate that the ``format`` property actually do any + validation. If validation is desired however, instances of this class can + be hooked into validators to enable format validation. + + :class:`FormatChecker` objects always return ``True`` when asked about + formats that they do not know how to validate. + + To check a custom format using a function that takes an instance and + returns a ``bool``, use the :meth:`FormatChecker.checks` or + :meth:`FormatChecker.cls_checks` decorators. + + :argument iterable formats: the known formats to validate. This argument + can be used to limit which formats will be used + during validation. + + """ + + checkers = {} + + def __init__(self, formats=None): + if formats is None: + self.checkers = self.checkers.copy() + else: + self.checkers = dict((k, self.checkers[k]) for k in formats) + + def checks(self, format, raises=()): + """ + Register a decorated function as validating a new format. + + :argument str format: the format that the decorated function will check + :argument Exception raises: the exception(s) raised by the decorated + function when an invalid instance is found. The exception object + will be accessible as the :attr:`ValidationError.cause` attribute + of the resulting validation error. + + """ + + def _checks(func): + self.checkers[format] = (func, raises) + return func + return _checks + + cls_checks = classmethod(checks) + + def check(self, instance, format): + """ + Check whether the instance conforms to the given format. + + :argument instance: the instance to check + :type: any primitive type (str, number, bool) + :argument str format: the format that instance should conform to + :raises: :exc:`FormatError` if instance does not conform to format + + """ + + if format not in self.checkers: + return + + func, raises = self.checkers[format] + result, cause = None, None + try: + result = func(instance) + except raises as e: + cause = e + if not result: + raise FormatError( + "%r is not a %r" % (instance, format), cause=cause, + ) + + def conforms(self, instance, format): + """ + Check whether the instance conforms to the given format. + + :argument instance: the instance to check + :type: any primitive type (str, number, bool) + :argument str format: the format that instance should conform to + :rtype: bool + + """ + + try: + self.check(instance, format) + except FormatError: + return False + else: + return True + + +_draft_checkers = {"draft3": [], "draft4": []} + + +def _checks_drafts(both=None, draft3=None, draft4=None, raises=()): + draft3 = draft3 or both + draft4 = draft4 or both + + def wrap(func): + if draft3: + _draft_checkers["draft3"].append(draft3) + func = FormatChecker.cls_checks(draft3, raises)(func) + if draft4: + _draft_checkers["draft4"].append(draft4) + func = FormatChecker.cls_checks(draft4, raises)(func) + return func + return wrap + + +@_checks_drafts("email") +def is_email(instance): + if not isinstance(instance, str_types): + return True + return "@" in instance + + +_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") + +@_checks_drafts(draft3="ip-address", draft4="ipv4") +def is_ipv4(instance): + if not isinstance(instance, str_types): + return True + if not _ipv4_re.match(instance): + return False + return all(0 <= int(component) <= 255 for component in instance.split(".")) + + +if hasattr(socket, "inet_pton"): + @_checks_drafts("ipv6", raises=socket.error) + def is_ipv6(instance): + if not isinstance(instance, str_types): + return True + return socket.inet_pton(socket.AF_INET6, instance) + + +_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$") + +@_checks_drafts(draft3="host-name", draft4="hostname") +def is_host_name(instance): + if not isinstance(instance, str_types): + return True + if not _host_name_re.match(instance): + return False + components = instance.split(".") + for component in components: + if len(component) > 63: + return False + return True + + +try: + import rfc3987 +except ImportError: + pass +else: + @_checks_drafts("uri", raises=ValueError) + def is_uri(instance): + if not isinstance(instance, str_types): + return True + return rfc3987.parse(instance, rule="URI") + + +try: + import strict_rfc3339 +except ImportError: + try: + import isodate + except ImportError: + pass + else: + @_checks_drafts("date-time", raises=(ValueError, isodate.ISO8601Error)) + def is_date(instance): + if not isinstance(instance, str_types): + return True + return isodate.parse_datetime(instance) +else: + @_checks_drafts("date-time") + def is_date(instance): + if not isinstance(instance, str_types): + return True + return strict_rfc3339.validate_rfc3339(instance) + + +@_checks_drafts("regex", raises=re.error) +def is_regex(instance): + if not isinstance(instance, str_types): + return True + return re.compile(instance) + + +@_checks_drafts(draft3="date", raises=ValueError) +def is_date(instance): + if not isinstance(instance, str_types): + return True + return datetime.datetime.strptime(instance, "%Y-%m-%d") + + +@_checks_drafts(draft3="time", raises=ValueError) +def is_time(instance): + if not isinstance(instance, str_types): + return True + return datetime.datetime.strptime(instance, "%H:%M:%S") + + +try: + import webcolors +except ImportError: + pass +else: + def is_css_color_code(instance): + return webcolors.normalize_hex(instance) + + + @_checks_drafts(draft3="color", raises=(ValueError, TypeError)) + def is_css21_color(instance): + if ( + not isinstance(instance, str_types) or + instance.lower() in webcolors.css21_names_to_hex + ): + return True + return is_css_color_code(instance) + + + def is_css3_color(instance): + if instance.lower() in webcolors.css3_names_to_hex: + return True + return is_css_color_code(instance) + + +draft3_format_checker = FormatChecker(_draft_checkers["draft3"]) +draft4_format_checker = FormatChecker(_draft_checkers["draft4"]) diff --git a/lib/spack/external/jsonschema/_reflect.py b/lib/spack/external/jsonschema/_reflect.py new file mode 100644 index 0000000000..d09e38fbdc --- /dev/null +++ b/lib/spack/external/jsonschema/_reflect.py @@ -0,0 +1,155 @@ +# -*- test-case-name: twisted.test.test_reflect -*- +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +Standardized versions of various cool and/or strange things that you can do +with Python's reflection capabilities. +""" + +import sys + +from jsonschema.compat import PY3 + + +class _NoModuleFound(Exception): + """ + No module was found because none exists. + """ + + + +class InvalidName(ValueError): + """ + The given name is not a dot-separated list of Python objects. + """ + + + +class ModuleNotFound(InvalidName): + """ + The module associated with the given name doesn't exist and it can't be + imported. + """ + + + +class ObjectNotFound(InvalidName): + """ + The object associated with the given name doesn't exist and it can't be + imported. + """ + + + +if PY3: + def reraise(exception, traceback): + raise exception.with_traceback(traceback) +else: + exec("""def reraise(exception, traceback): + raise exception.__class__, exception, traceback""") + +reraise.__doc__ = """ +Re-raise an exception, with an optional traceback, in a way that is compatible +with both Python 2 and Python 3. + +Note that on Python 3, re-raised exceptions will be mutated, with their +C{__traceback__} attribute being set. + +@param exception: The exception instance. +@param traceback: The traceback to use, or C{None} indicating a new traceback. +""" + + +def _importAndCheckStack(importName): + """ + Import the given name as a module, then walk the stack to determine whether + the failure was the module not existing, or some code in the module (for + example a dependent import) failing. This can be helpful to determine + whether any actual application code was run. For example, to distiguish + administrative error (entering the wrong module name), from programmer + error (writing buggy code in a module that fails to import). + + @param importName: The name of the module to import. + @type importName: C{str} + @raise Exception: if something bad happens. This can be any type of + exception, since nobody knows what loading some arbitrary code might + do. + @raise _NoModuleFound: if no module was found. + """ + try: + return __import__(importName) + except ImportError: + excType, excValue, excTraceback = sys.exc_info() + while excTraceback: + execName = excTraceback.tb_frame.f_globals["__name__"] + # in Python 2 execName is None when an ImportError is encountered, + # where in Python 3 execName is equal to the importName. + if execName is None or execName == importName: + reraise(excValue, excTraceback) + excTraceback = excTraceback.tb_next + raise _NoModuleFound() + + + +def namedAny(name): + """ + Retrieve a Python object by its fully qualified name from the global Python + module namespace. The first part of the name, that describes a module, + will be discovered and imported. Each subsequent part of the name is + treated as the name of an attribute of the object specified by all of the + name which came before it. For example, the fully-qualified name of this + object is 'twisted.python.reflect.namedAny'. + + @type name: L{str} + @param name: The name of the object to return. + + @raise InvalidName: If the name is an empty string, starts or ends with + a '.', or is otherwise syntactically incorrect. + + @raise ModuleNotFound: If the name is syntactically correct but the + module it specifies cannot be imported because it does not appear to + exist. + + @raise ObjectNotFound: If the name is syntactically correct, includes at + least one '.', but the module it specifies cannot be imported because + it does not appear to exist. + + @raise AttributeError: If an attribute of an object along the way cannot be + accessed, or a module along the way is not found. + + @return: the Python object identified by 'name'. + """ + if not name: + raise InvalidName('Empty module name') + + names = name.split('.') + + # if the name starts or ends with a '.' or contains '..', the __import__ + # will raise an 'Empty module name' error. This will provide a better error + # message. + if '' in names: + raise InvalidName( + "name must be a string giving a '.'-separated list of Python " + "identifiers, not %r" % (name,)) + + topLevelPackage = None + moduleNames = names[:] + while not topLevelPackage: + if moduleNames: + trialname = '.'.join(moduleNames) + try: + topLevelPackage = _importAndCheckStack(trialname) + except _NoModuleFound: + moduleNames.pop() + else: + if len(names) == 1: + raise ModuleNotFound("No module named %r" % (name,)) + else: + raise ObjectNotFound('%r does not name an object' % (name,)) + + obj = topLevelPackage + for n in names[1:]: + obj = getattr(obj, n) + + return obj diff --git a/lib/spack/external/jsonschema/_utils.py b/lib/spack/external/jsonschema/_utils.py new file mode 100644 index 0000000000..2262f3305d --- /dev/null +++ b/lib/spack/external/jsonschema/_utils.py @@ -0,0 +1,213 @@ +import itertools +import json +import pkgutil +import re + +from jsonschema.compat import str_types, MutableMapping, urlsplit + + +class URIDict(MutableMapping): + """ + Dictionary which uses normalized URIs as keys. + + """ + + def normalize(self, uri): + return urlsplit(uri).geturl() + + def __init__(self, *args, **kwargs): + self.store = dict() + self.store.update(*args, **kwargs) + + def __getitem__(self, uri): + return self.store[self.normalize(uri)] + + def __setitem__(self, uri, value): + self.store[self.normalize(uri)] = value + + def __delitem__(self, uri): + del self.store[self.normalize(uri)] + + def __iter__(self): + return iter(self.store) + + def __len__(self): + return len(self.store) + + def __repr__(self): + return repr(self.store) + + +class Unset(object): + """ + An as-of-yet unset attribute or unprovided default parameter. + + """ + + def __repr__(self): + return "" + + +def load_schema(name): + """ + Load a schema from ./schemas/``name``.json and return it. + + """ + + data = pkgutil.get_data(__package__, "schemas/{0}.json".format(name)) + return json.loads(data.decode("utf-8")) + + +def indent(string, times=1): + """ + A dumb version of :func:`textwrap.indent` from Python 3.3. + + """ + + return "\n".join(" " * (4 * times) + line for line in string.splitlines()) + + +def format_as_index(indices): + """ + Construct a single string containing indexing operations for the indices. + + For example, [1, 2, "foo"] -> [1][2]["foo"] + + :type indices: sequence + + """ + + if not indices: + return "" + return "[%s]" % "][".join(repr(index) for index in indices) + + +def find_additional_properties(instance, schema): + """ + Return the set of additional properties for the given ``instance``. + + Weeds out properties that should have been validated by ``properties`` and + / or ``patternProperties``. + + Assumes ``instance`` is dict-like already. + + """ + + properties = schema.get("properties", {}) + patterns = "|".join(schema.get("patternProperties", {})) + for property in instance: + if property not in properties: + if patterns and re.search(patterns, property): + continue + yield property + + +def extras_msg(extras): + """ + Create an error message for extra items or properties. + + """ + + if len(extras) == 1: + verb = "was" + else: + verb = "were" + return ", ".join(repr(extra) for extra in extras), verb + + +def types_msg(instance, types): + """ + Create an error message for a failure to match the given types. + + If the ``instance`` is an object and contains a ``name`` property, it will + be considered to be a description of that object and used as its type. + + Otherwise the message is simply the reprs of the given ``types``. + + """ + + reprs = [] + for type in types: + try: + reprs.append(repr(type["name"])) + except Exception: + reprs.append(repr(type)) + return "%r is not of type %s" % (instance, ", ".join(reprs)) + + +def flatten(suitable_for_isinstance): + """ + isinstance() can accept a bunch of really annoying different types: + * a single type + * a tuple of types + * an arbitrary nested tree of tuples + + Return a flattened tuple of the given argument. + + """ + + types = set() + + if not isinstance(suitable_for_isinstance, tuple): + suitable_for_isinstance = (suitable_for_isinstance,) + for thing in suitable_for_isinstance: + if isinstance(thing, tuple): + types.update(flatten(thing)) + else: + types.add(thing) + return tuple(types) + + +def ensure_list(thing): + """ + Wrap ``thing`` in a list if it's a single str. + + Otherwise, return it unchanged. + + """ + + if isinstance(thing, str_types): + return [thing] + return thing + + +def unbool(element, true=object(), false=object()): + """ + A hack to make True and 1 and False and 0 unique for ``uniq``. + + """ + + if element is True: + return true + elif element is False: + return false + return element + + +def uniq(container): + """ + Check if all of a container's elements are unique. + + Successively tries first to rely that the elements are hashable, then + falls back on them being sortable, and finally falls back on brute + force. + + """ + + try: + return len(set(unbool(i) for i in container)) == len(container) + except TypeError: + try: + sort = sorted(unbool(i) for i in container) + sliced = itertools.islice(sort, 1, None) + for i, j in zip(sort, sliced): + if i == j: + return False + except (NotImplementedError, TypeError): + seen = [] + for e in container: + e = unbool(e) + if e in seen: + return False + seen.append(e) + return True diff --git a/lib/spack/external/jsonschema/_validators.py b/lib/spack/external/jsonschema/_validators.py new file mode 100644 index 0000000000..c6e801ccb2 --- /dev/null +++ b/lib/spack/external/jsonschema/_validators.py @@ -0,0 +1,358 @@ +import re + +from jsonschema import _utils +from jsonschema.exceptions import FormatError, ValidationError +from jsonschema.compat import iteritems + + +FLOAT_TOLERANCE = 10 ** -15 + + +def patternProperties(validator, patternProperties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for pattern, subschema in iteritems(patternProperties): + for k, v in iteritems(instance): + if re.search(pattern, k): + for error in validator.descend( + v, subschema, path=k, schema_path=pattern, + ): + yield error + + +def additionalProperties(validator, aP, instance, schema): + if not validator.is_type(instance, "object"): + return + + extras = set(_utils.find_additional_properties(instance, schema)) + + if validator.is_type(aP, "object"): + for extra in extras: + for error in validator.descend(instance[extra], aP, path=extra): + yield error + elif not aP and extras: + error = "Additional properties are not allowed (%s %s unexpected)" + yield ValidationError(error % _utils.extras_msg(extras)) + + +def items(validator, items, instance, schema): + if not validator.is_type(instance, "array"): + return + + if validator.is_type(items, "object"): + for index, item in enumerate(instance): + for error in validator.descend(item, items, path=index): + yield error + else: + for (index, item), subschema in zip(enumerate(instance), items): + for error in validator.descend( + item, subschema, path=index, schema_path=index, + ): + yield error + + +def additionalItems(validator, aI, instance, schema): + if ( + not validator.is_type(instance, "array") or + validator.is_type(schema.get("items", {}), "object") + ): + return + + len_items = len(schema.get("items", [])) + if validator.is_type(aI, "object"): + for index, item in enumerate(instance[len_items:], start=len_items): + for error in validator.descend(item, aI, path=index): + yield error + elif not aI and len(instance) > len(schema.get("items", [])): + error = "Additional items are not allowed (%s %s unexpected)" + yield ValidationError( + error % + _utils.extras_msg(instance[len(schema.get("items", [])):]) + ) + + +def minimum(validator, minimum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMinimum", False): + failed = float(instance) <= minimum + cmp = "less than or equal to" + else: + failed = float(instance) < minimum + cmp = "less than" + + if failed: + yield ValidationError( + "%r is %s the minimum of %r" % (instance, cmp, minimum) + ) + + +def maximum(validator, maximum, instance, schema): + if not validator.is_type(instance, "number"): + return + + if schema.get("exclusiveMaximum", False): + failed = instance >= maximum + cmp = "greater than or equal to" + else: + failed = instance > maximum + cmp = "greater than" + + if failed: + yield ValidationError( + "%r is %s the maximum of %r" % (instance, cmp, maximum) + ) + + +def multipleOf(validator, dB, instance, schema): + if not validator.is_type(instance, "number"): + return + + if isinstance(dB, float): + mod = instance % dB + failed = (mod > FLOAT_TOLERANCE) and (dB - mod) > FLOAT_TOLERANCE + else: + failed = instance % dB + + if failed: + yield ValidationError("%r is not a multiple of %r" % (instance, dB)) + + +def minItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) < mI: + yield ValidationError("%r is too short" % (instance,)) + + +def maxItems(validator, mI, instance, schema): + if validator.is_type(instance, "array") and len(instance) > mI: + yield ValidationError("%r is too long" % (instance,)) + + +def uniqueItems(validator, uI, instance, schema): + if ( + uI and + validator.is_type(instance, "array") and + not _utils.uniq(instance) + ): + yield ValidationError("%r has non-unique elements" % instance) + + +def pattern(validator, patrn, instance, schema): + if ( + validator.is_type(instance, "string") and + not re.search(patrn, instance) + ): + yield ValidationError("%r does not match %r" % (instance, patrn)) + + +def format(validator, format, instance, schema): + if validator.format_checker is not None: + try: + validator.format_checker.check(instance, format) + except FormatError as error: + yield ValidationError(error.message, cause=error.cause) + + +def minLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) < mL: + yield ValidationError("%r is too short" % (instance,)) + + +def maxLength(validator, mL, instance, schema): + if validator.is_type(instance, "string") and len(instance) > mL: + yield ValidationError("%r is too long" % (instance,)) + + +def dependencies(validator, dependencies, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, dependency in iteritems(dependencies): + if property not in instance: + continue + + if validator.is_type(dependency, "object"): + for error in validator.descend( + instance, dependency, schema_path=property, + ): + yield error + else: + dependencies = _utils.ensure_list(dependency) + for dependency in dependencies: + if dependency not in instance: + yield ValidationError( + "%r is a dependency of %r" % (dependency, property) + ) + + +def enum(validator, enums, instance, schema): + if instance not in enums: + yield ValidationError("%r is not one of %r" % (instance, enums)) + + +def ref(validator, ref, instance, schema): + with validator.resolver.resolving(ref) as resolved: + for error in validator.descend(instance, resolved): + yield error + + +def type_draft3(validator, types, instance, schema): + types = _utils.ensure_list(types) + + all_errors = [] + for index, type in enumerate(types): + if type == "any": + return + if validator.is_type(type, "object"): + errors = list(validator.descend(instance, type, schema_path=index)) + if not errors: + return + all_errors.extend(errors) + else: + if validator.is_type(instance, type): + return + else: + yield ValidationError( + _utils.types_msg(instance, types), context=all_errors, + ) + + +def properties_draft3(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in iteritems(properties): + if property in instance: + for error in validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ): + yield error + elif subschema.get("required", False): + error = ValidationError("%r is a required property" % property) + error._set( + validator="required", + validator_value=subschema["required"], + instance=instance, + schema=schema, + ) + error.path.appendleft(property) + error.schema_path.extend([property, "required"]) + yield error + + +def disallow_draft3(validator, disallow, instance, schema): + for disallowed in _utils.ensure_list(disallow): + if validator.is_valid(instance, {"type" : [disallowed]}): + yield ValidationError( + "%r is disallowed for %r" % (disallowed, instance) + ) + + +def extends_draft3(validator, extends, instance, schema): + if validator.is_type(extends, "object"): + for error in validator.descend(instance, extends): + yield error + return + for index, subschema in enumerate(extends): + for error in validator.descend(instance, subschema, schema_path=index): + yield error + + +def type_draft4(validator, types, instance, schema): + types = _utils.ensure_list(types) + + if not any(validator.is_type(instance, type) for type in types): + yield ValidationError(_utils.types_msg(instance, types)) + + +def properties_draft4(validator, properties, instance, schema): + if not validator.is_type(instance, "object"): + return + + for property, subschema in iteritems(properties): + if property in instance: + for error in validator.descend( + instance[property], + subschema, + path=property, + schema_path=property, + ): + yield error + + +def required_draft4(validator, required, instance, schema): + if not validator.is_type(instance, "object"): + return + for property in required: + if property not in instance: + yield ValidationError("%r is a required property" % property) + + +def minProperties_draft4(validator, mP, instance, schema): + if validator.is_type(instance, "object") and len(instance) < mP: + yield ValidationError( + "%r does not have enough properties" % (instance,) + ) + + +def maxProperties_draft4(validator, mP, instance, schema): + if not validator.is_type(instance, "object"): + return + if validator.is_type(instance, "object") and len(instance) > mP: + yield ValidationError("%r has too many properties" % (instance,)) + + +def allOf_draft4(validator, allOf, instance, schema): + for index, subschema in enumerate(allOf): + for error in validator.descend(instance, subschema, schema_path=index): + yield error + + +def oneOf_draft4(validator, oneOf, instance, schema): + subschemas = enumerate(oneOf) + all_errors = [] + for index, subschema in subschemas: + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + first_valid = subschema + break + all_errors.extend(errs) + else: + yield ValidationError( + "%r is not valid under any of the given schemas" % (instance,), + context=all_errors, + ) + + more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)] + if more_valid: + more_valid.append(first_valid) + reprs = ", ".join(repr(schema) for schema in more_valid) + yield ValidationError( + "%r is valid under each of %s" % (instance, reprs) + ) + + +def anyOf_draft4(validator, anyOf, instance, schema): + all_errors = [] + for index, subschema in enumerate(anyOf): + errs = list(validator.descend(instance, subschema, schema_path=index)) + if not errs: + break + all_errors.extend(errs) + else: + yield ValidationError( + "%r is not valid under any of the given schemas" % (instance,), + context=all_errors, + ) + + +def not_draft4(validator, not_schema, instance, schema): + if validator.is_valid(instance, not_schema): + yield ValidationError( + "%r is not allowed for %r" % (not_schema, instance) + ) diff --git a/lib/spack/external/jsonschema/cli.py b/lib/spack/external/jsonschema/cli.py new file mode 100644 index 0000000000..0126564f46 --- /dev/null +++ b/lib/spack/external/jsonschema/cli.py @@ -0,0 +1,72 @@ +from __future__ import absolute_import +import argparse +import json +import sys + +from jsonschema._reflect import namedAny +from jsonschema.validators import validator_for + + +def _namedAnyWithDefault(name): + if "." not in name: + name = "jsonschema." + name + return namedAny(name) + + +def _json_file(path): + with open(path) as file: + return json.load(file) + + +parser = argparse.ArgumentParser( + description="JSON Schema Validation CLI", +) +parser.add_argument( + "-i", "--instance", + action="append", + dest="instances", + type=_json_file, + help="a path to a JSON instance to validate " + "(may be specified multiple times)", +) +parser.add_argument( + "-F", "--error-format", + default="{error.instance}: {error.message}\n", + help="the format to use for each error output message, specified in " + "a form suitable for passing to str.format, which will be called " + "with 'error' for each error", +) +parser.add_argument( + "-V", "--validator", + type=_namedAnyWithDefault, + help="the fully qualified object name of a validator to use, or, for " + "validators that are registered with jsonschema, simply the name " + "of the class.", +) +parser.add_argument( + "schema", + help="the JSON Schema to validate with", + type=_json_file, +) + + +def parse_args(args): + arguments = vars(parser.parse_args(args=args or ["--help"])) + if arguments["validator"] is None: + arguments["validator"] = validator_for(arguments["schema"]) + return arguments + + +def main(args=sys.argv[1:]): + sys.exit(run(arguments=parse_args(args=args))) + + +def run(arguments, stdout=sys.stdout, stderr=sys.stderr): + error_format = arguments["error_format"] + validator = arguments["validator"](schema=arguments["schema"]) + errored = False + for instance in arguments["instances"] or (): + for error in validator.iter_errors(instance): + stderr.write(error_format.format(error=error)) + errored = True + return errored diff --git a/lib/spack/external/jsonschema/compat.py b/lib/spack/external/jsonschema/compat.py new file mode 100644 index 0000000000..6ca49ab6be --- /dev/null +++ b/lib/spack/external/jsonschema/compat.py @@ -0,0 +1,53 @@ +from __future__ import unicode_literals +import sys +import operator + +try: + from collections import MutableMapping, Sequence # noqa +except ImportError: + from collections.abc import MutableMapping, Sequence # noqa + +PY3 = sys.version_info[0] >= 3 + +if PY3: + zip = zip + from io import StringIO + from urllib.parse import ( + unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit + ) + from urllib.request import urlopen + str_types = str, + int_types = int, + iteritems = operator.methodcaller("items") +else: + from itertools import izip as zip # noqa + from StringIO import StringIO + from urlparse import ( + urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa + ) + from urllib import unquote # noqa + from urllib2 import urlopen # noqa + str_types = basestring + int_types = int, long + iteritems = operator.methodcaller("iteritems") + + +# On python < 3.3 fragments are not handled properly with unknown schemes +def urlsplit(url): + scheme, netloc, path, query, fragment = _urlsplit(url) + if "#" in path: + path, fragment = path.split("#", 1) + return SplitResult(scheme, netloc, path, query, fragment) + + +def urldefrag(url): + if "#" in url: + s, n, p, q, frag = urlsplit(url) + defrag = urlunsplit((s, n, p, q, '')) + else: + defrag = url + frag = '' + return defrag, frag + + +# flake8: noqa diff --git a/lib/spack/external/jsonschema/exceptions.py b/lib/spack/external/jsonschema/exceptions.py new file mode 100644 index 0000000000..478e59c531 --- /dev/null +++ b/lib/spack/external/jsonschema/exceptions.py @@ -0,0 +1,264 @@ +from collections import defaultdict, deque +import itertools +import pprint +import textwrap + +from jsonschema import _utils +from jsonschema.compat import PY3, iteritems + + +WEAK_MATCHES = frozenset(["anyOf", "oneOf"]) +STRONG_MATCHES = frozenset() + +_unset = _utils.Unset() + + +class _Error(Exception): + def __init__( + self, + message, + validator=_unset, + path=(), + cause=None, + context=(), + validator_value=_unset, + instance=_unset, + schema=_unset, + schema_path=(), + parent=None, + ): + self.message = message + self.path = self.relative_path = deque(path) + self.schema_path = self.relative_schema_path = deque(schema_path) + self.context = list(context) + self.cause = self.__cause__ = cause + self.validator = validator + self.validator_value = validator_value + self.instance = instance + self.schema = schema + self.parent = parent + + for error in context: + error.parent = self + + def __repr__(self): + return "<%s: %r>" % (self.__class__.__name__, self.message) + + def __str__(self): + return unicode(self).encode("utf-8") + + def __unicode__(self): + essential_for_verbose = ( + self.validator, self.validator_value, self.instance, self.schema, + ) + if any(m is _unset for m in essential_for_verbose): + return self.message + + pschema = pprint.pformat(self.schema, width=72) + pinstance = pprint.pformat(self.instance, width=72) + return self.message + textwrap.dedent(""" + + Failed validating %r in schema%s: + %s + + On instance%s: + %s + """.rstrip() + ) % ( + self.validator, + _utils.format_as_index(list(self.relative_schema_path)[:-1]), + _utils.indent(pschema), + _utils.format_as_index(self.relative_path), + _utils.indent(pinstance), + ) + + if PY3: + __str__ = __unicode__ + + @classmethod + def create_from(cls, other): + return cls(**other._contents()) + + @property + def absolute_path(self): + parent = self.parent + if parent is None: + return self.relative_path + + path = deque(self.relative_path) + path.extendleft(parent.absolute_path) + return path + + @property + def absolute_schema_path(self): + parent = self.parent + if parent is None: + return self.relative_schema_path + + path = deque(self.relative_schema_path) + path.extendleft(parent.absolute_schema_path) + return path + + def _set(self, **kwargs): + for k, v in iteritems(kwargs): + if getattr(self, k) is _unset: + setattr(self, k, v) + + def _contents(self): + attrs = ( + "message", "cause", "context", "validator", "validator_value", + "path", "schema_path", "instance", "schema", "parent", + ) + return dict((attr, getattr(self, attr)) for attr in attrs) + + +class ValidationError(_Error): + pass + + +class SchemaError(_Error): + pass + + +class RefResolutionError(Exception): + pass + + +class UnknownType(Exception): + def __init__(self, type, instance, schema): + self.type = type + self.instance = instance + self.schema = schema + + def __str__(self): + return unicode(self).encode("utf-8") + + def __unicode__(self): + pschema = pprint.pformat(self.schema, width=72) + pinstance = pprint.pformat(self.instance, width=72) + return textwrap.dedent(""" + Unknown type %r for validator with schema: + %s + + While checking instance: + %s + """.rstrip() + ) % (self.type, _utils.indent(pschema), _utils.indent(pinstance)) + + if PY3: + __str__ = __unicode__ + + + +class FormatError(Exception): + def __init__(self, message, cause=None): + super(FormatError, self).__init__(message, cause) + self.message = message + self.cause = self.__cause__ = cause + + def __str__(self): + return self.message.encode("utf-8") + + def __unicode__(self): + return self.message + + if PY3: + __str__ = __unicode__ + + +class ErrorTree(object): + """ + ErrorTrees make it easier to check which validations failed. + + """ + + _instance = _unset + + def __init__(self, errors=()): + self.errors = {} + self._contents = defaultdict(self.__class__) + + for error in errors: + container = self + for element in error.path: + container = container[element] + container.errors[error.validator] = error + + self._instance = error.instance + + def __contains__(self, index): + """ + Check whether ``instance[index]`` has any errors. + + """ + + return index in self._contents + + def __getitem__(self, index): + """ + Retrieve the child tree one level down at the given ``index``. + + If the index is not in the instance that this tree corresponds to and + is not known by this tree, whatever error would be raised by + ``instance.__getitem__`` will be propagated (usually this is some + subclass of :class:`LookupError`. + + """ + + if self._instance is not _unset and index not in self: + self._instance[index] + return self._contents[index] + + def __setitem__(self, index, value): + self._contents[index] = value + + def __iter__(self): + """ + Iterate (non-recursively) over the indices in the instance with errors. + + """ + + return iter(self._contents) + + def __len__(self): + """ + Same as :attr:`total_errors`. + + """ + + return self.total_errors + + def __repr__(self): + return "<%s (%s total errors)>" % (self.__class__.__name__, len(self)) + + @property + def total_errors(self): + """ + The total number of errors in the entire tree, including children. + + """ + + child_errors = sum(len(tree) for _, tree in iteritems(self._contents)) + return len(self.errors) + child_errors + + +def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES): + def relevance(error): + validator = error.validator + return -len(error.path), validator not in weak, validator in strong + return relevance + + +relevance = by_relevance() + + +def best_match(errors, key=relevance): + errors = iter(errors) + best = next(errors, None) + if best is None: + return + best = max(itertools.chain([best], errors), key=key) + + while best.context: + best = min(best.context, key=key) + return best diff --git a/lib/spack/external/jsonschema/schemas/draft3.json b/lib/spack/external/jsonschema/schemas/draft3.json new file mode 100644 index 0000000000..5bcefe30d5 --- /dev/null +++ b/lib/spack/external/jsonschema/schemas/draft3.json @@ -0,0 +1,201 @@ +{ + "$schema": "http://json-schema.org/draft-03/schema#", + "dependencies": { + "exclusiveMaximum": "maximum", + "exclusiveMinimum": "minimum" + }, + "id": "http://json-schema.org/draft-03/schema#", + "properties": { + "$ref": { + "format": "uri", + "type": "string" + }, + "$schema": { + "format": "uri", + "type": "string" + }, + "additionalItems": { + "default": {}, + "type": [ + { + "$ref": "#" + }, + "boolean" + ] + }, + "additionalProperties": { + "default": {}, + "type": [ + { + "$ref": "#" + }, + "boolean" + ] + }, + "default": { + "type": "any" + }, + "dependencies": { + "additionalProperties": { + "items": { + "type": "string" + }, + "type": [ + "string", + "array", + { + "$ref": "#" + } + ] + }, + "default": {}, + "type": [ + "string", + "array", + "object" + ] + }, + "description": { + "type": "string" + }, + "disallow": { + "items": { + "type": [ + "string", + { + "$ref": "#" + } + ] + }, + "type": [ + "string", + "array" + ], + "uniqueItems": true + }, + "divisibleBy": { + "default": 1, + "exclusiveMinimum": true, + "minimum": 0, + "type": "number" + }, + "enum": { + "minItems": 1, + "type": "array", + "uniqueItems": true + }, + "exclusiveMaximum": { + "default": false, + "type": "boolean" + }, + "exclusiveMinimum": { + "default": false, + "type": "boolean" + }, + "extends": { + "default": {}, + "items": { + "$ref": "#" + }, + "type": [ + { + "$ref": "#" + }, + "array" + ] + }, + "format": { + "type": "string" + }, + "id": { + "format": "uri", + "type": "string" + }, + "items": { + "default": {}, + "items": { + "$ref": "#" + }, + "type": [ + { + "$ref": "#" + }, + "array" + ] + }, + "maxDecimal": { + "minimum": 0, + "type": "number" + }, + "maxItems": { + "minimum": 0, + "type": "integer" + }, + "maxLength": { + "type": "integer" + }, + "maximum": { + "type": "number" + }, + "minItems": { + "default": 0, + "minimum": 0, + "type": "integer" + }, + "minLength": { + "default": 0, + "minimum": 0, + "type": "integer" + }, + "minimum": { + "type": "number" + }, + "pattern": { + "format": "regex", + "type": "string" + }, + "patternProperties": { + "additionalProperties": { + "$ref": "#" + }, + "default": {}, + "type": "object" + }, + "properties": { + "additionalProperties": { + "$ref": "#", + "type": "object" + }, + "default": {}, + "type": "object" + }, + "required": { + "default": false, + "type": "boolean" + }, + "title": { + "type": "string" + }, + "type": { + "default": "any", + "items": { + "type": [ + "string", + { + "$ref": "#" + } + ] + }, + "type": [ + "string", + "array" + ], + "uniqueItems": true + }, + "uniqueItems": { + "default": false, + "type": "boolean" + } + }, + "type": "object" +} diff --git a/lib/spack/external/jsonschema/schemas/draft4.json b/lib/spack/external/jsonschema/schemas/draft4.json new file mode 100644 index 0000000000..fead5cefab --- /dev/null +++ b/lib/spack/external/jsonschema/schemas/draft4.json @@ -0,0 +1,221 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "default": {}, + "definitions": { + "positiveInteger": { + "minimum": 0, + "type": "integer" + }, + "positiveIntegerDefault0": { + "allOf": [ + { + "$ref": "#/definitions/positiveInteger" + }, + { + "default": 0 + } + ] + }, + "schemaArray": { + "items": { + "$ref": "#" + }, + "minItems": 1, + "type": "array" + }, + "simpleTypes": { + "enum": [ + "array", + "boolean", + "integer", + "null", + "number", + "object", + "string" + ] + }, + "stringArray": { + "items": { + "type": "string" + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + } + }, + "dependencies": { + "exclusiveMaximum": [ + "maximum" + ], + "exclusiveMinimum": [ + "minimum" + ] + }, + "description": "Core schema meta-schema", + "id": "http://json-schema.org/draft-04/schema#", + "properties": { + "$schema": { + "format": "uri", + "type": "string" + }, + "additionalItems": { + "anyOf": [ + { + "type": "boolean" + }, + { + "$ref": "#" + } + ], + "default": {} + }, + "additionalProperties": { + "anyOf": [ + { + "type": "boolean" + }, + { + "$ref": "#" + } + ], + "default": {} + }, + "allOf": { + "$ref": "#/definitions/schemaArray" + }, + "anyOf": { + "$ref": "#/definitions/schemaArray" + }, + "default": {}, + "definitions": { + "additionalProperties": { + "$ref": "#" + }, + "default": {}, + "type": "object" + }, + "dependencies": { + "additionalProperties": { + "anyOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/stringArray" + } + ] + }, + "type": "object" + }, + "description": { + "type": "string" + }, + "enum": { + "minItems": 1, + "type": "array", + "uniqueItems": true + }, + "exclusiveMaximum": { + "default": false, + "type": "boolean" + }, + "exclusiveMinimum": { + "default": false, + "type": "boolean" + }, + "id": { + "format": "uri", + "type": "string" + }, + "items": { + "anyOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/schemaArray" + } + ], + "default": {} + }, + "maxItems": { + "$ref": "#/definitions/positiveInteger" + }, + "maxLength": { + "$ref": "#/definitions/positiveInteger" + }, + "maxProperties": { + "$ref": "#/definitions/positiveInteger" + }, + "maximum": { + "type": "number" + }, + "minItems": { + "$ref": "#/definitions/positiveIntegerDefault0" + }, + "minLength": { + "$ref": "#/definitions/positiveIntegerDefault0" + }, + "minProperties": { + "$ref": "#/definitions/positiveIntegerDefault0" + }, + "minimum": { + "type": "number" + }, + "multipleOf": { + "exclusiveMinimum": true, + "minimum": 0, + "type": "number" + }, + "not": { + "$ref": "#" + }, + "oneOf": { + "$ref": "#/definitions/schemaArray" + }, + "pattern": { + "format": "regex", + "type": "string" + }, + "patternProperties": { + "additionalProperties": { + "$ref": "#" + }, + "default": {}, + "type": "object" + }, + "properties": { + "additionalProperties": { + "$ref": "#" + }, + "default": {}, + "type": "object" + }, + "required": { + "$ref": "#/definitions/stringArray" + }, + "title": { + "type": "string" + }, + "type": { + "anyOf": [ + { + "$ref": "#/definitions/simpleTypes" + }, + { + "items": { + "$ref": "#/definitions/simpleTypes" + }, + "minItems": 1, + "type": "array", + "uniqueItems": true + } + ] + }, + "uniqueItems": { + "default": false, + "type": "boolean" + } + }, + "type": "object" +} diff --git a/lib/spack/external/jsonschema/tests/__init__.py b/lib/spack/external/jsonschema/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lib/spack/external/jsonschema/tests/compat.py b/lib/spack/external/jsonschema/tests/compat.py new file mode 100644 index 0000000000..b37483f5dd --- /dev/null +++ b/lib/spack/external/jsonschema/tests/compat.py @@ -0,0 +1,15 @@ +import sys + + +if sys.version_info[:2] < (2, 7): # pragma: no cover + import unittest2 as unittest +else: + import unittest + +try: + from unittest import mock +except ImportError: + import mock + + +# flake8: noqa diff --git a/lib/spack/external/jsonschema/tests/test_cli.py b/lib/spack/external/jsonschema/tests/test_cli.py new file mode 100644 index 0000000000..f625ca989d --- /dev/null +++ b/lib/spack/external/jsonschema/tests/test_cli.py @@ -0,0 +1,110 @@ +from jsonschema import Draft4Validator, ValidationError, cli +from jsonschema.compat import StringIO +from jsonschema.tests.compat import mock, unittest + + +def fake_validator(*errors): + errors = list(reversed(errors)) + + class FakeValidator(object): + def __init__(self, *args, **kwargs): + pass + + def iter_errors(self, instance): + if errors: + return errors.pop() + return [] + return FakeValidator + + +class TestParser(unittest.TestCase): + FakeValidator = fake_validator() + + def setUp(self): + mock_open = mock.mock_open() + patch_open = mock.patch.object(cli, "open", mock_open, create=True) + patch_open.start() + self.addCleanup(patch_open.stop) + + mock_json_load = mock.Mock() + mock_json_load.return_value = {} + patch_json_load = mock.patch("json.load") + patch_json_load.start() + self.addCleanup(patch_json_load.stop) + + def test_find_validator_by_fully_qualified_object_name(self): + arguments = cli.parse_args( + [ + "--validator", + "jsonschema.tests.test_cli.TestParser.FakeValidator", + "--instance", "foo.json", + "schema.json", + ] + ) + self.assertIs(arguments["validator"], self.FakeValidator) + + def test_find_validator_in_jsonschema(self): + arguments = cli.parse_args( + [ + "--validator", "Draft4Validator", + "--instance", "foo.json", + "schema.json", + ] + ) + self.assertIs(arguments["validator"], Draft4Validator) + + +class TestCLI(unittest.TestCase): + def test_successful_validation(self): + stdout, stderr = StringIO(), StringIO() + exit_code = cli.run( + { + "validator": fake_validator(), + "schema": {}, + "instances": [1], + "error_format": "{error.message}", + }, + stdout=stdout, + stderr=stderr, + ) + self.assertFalse(stdout.getvalue()) + self.assertFalse(stderr.getvalue()) + self.assertEqual(exit_code, 0) + + def test_unsuccessful_validation(self): + error = ValidationError("I am an error!", instance=1) + stdout, stderr = StringIO(), StringIO() + exit_code = cli.run( + { + "validator": fake_validator([error]), + "schema": {}, + "instances": [1], + "error_format": "{error.instance} - {error.message}", + }, + stdout=stdout, + stderr=stderr, + ) + self.assertFalse(stdout.getvalue()) + self.assertEqual(stderr.getvalue(), "1 - I am an error!") + self.assertEqual(exit_code, 1) + + def test_unsuccessful_validation_multiple_instances(self): + first_errors = [ + ValidationError("9", instance=1), + ValidationError("8", instance=1), + ] + second_errors = [ValidationError("7", instance=2)] + stdout, stderr = StringIO(), StringIO() + exit_code = cli.run( + { + "validator": fake_validator(first_errors, second_errors), + "schema": {}, + "instances": [1, 2], + "error_format": "{error.instance} - {error.message}\t", + }, + stdout=stdout, + stderr=stderr, + ) + self.assertFalse(stdout.getvalue()) + self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t") + self.assertEqual(exit_code, 1) diff --git a/lib/spack/external/jsonschema/tests/test_exceptions.py b/lib/spack/external/jsonschema/tests/test_exceptions.py new file mode 100644 index 0000000000..9e5793c628 --- /dev/null +++ b/lib/spack/external/jsonschema/tests/test_exceptions.py @@ -0,0 +1,382 @@ +import textwrap + +from jsonschema import Draft4Validator, exceptions +from jsonschema.compat import PY3 +from jsonschema.tests.compat import mock, unittest + + +class TestBestMatch(unittest.TestCase): + def best_match(self, errors): + errors = list(errors) + best = exceptions.best_match(errors) + reversed_best = exceptions.best_match(reversed(errors)) + self.assertEqual( + best, + reversed_best, + msg="Didn't return a consistent best match!\n" + "Got: {0}\n\nThen: {1}".format(best, reversed_best), + ) + return best + + def test_shallower_errors_are_better_matches(self): + validator = Draft4Validator( + { + "properties" : { + "foo" : { + "minProperties" : 2, + "properties" : {"bar" : {"type" : "object"}}, + } + } + } + ) + best = self.best_match(validator.iter_errors({"foo" : {"bar" : []}})) + self.assertEqual(best.validator, "minProperties") + + def test_oneOf_and_anyOf_are_weak_matches(self): + """ + A property you *must* match is probably better than one you have to + match a part of. + + """ + + validator = Draft4Validator( + { + "minProperties" : 2, + "anyOf" : [{"type" : "string"}, {"type" : "number"}], + "oneOf" : [{"type" : "string"}, {"type" : "number"}], + } + ) + best = self.best_match(validator.iter_errors({})) + self.assertEqual(best.validator, "minProperties") + + def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self): + """ + If the most relevant error is an anyOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + + """ + + validator = Draft4Validator( + { + "properties" : { + "foo" : { + "anyOf" : [ + {"type" : "string"}, + {"properties" : {"bar" : {"type" : "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}})) + self.assertEqual(best.validator_value, "array") + + def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self): + """ + If the most relevant error is an oneOf, then we traverse its context + and select the otherwise *least* relevant error, since in this case + that means the most specific, deep, error inside the instance. + + I.e. since only one of the schemas must match, we look for the most + relevant one. + + """ + + validator = Draft4Validator( + { + "properties" : { + "foo" : { + "oneOf" : [ + {"type" : "string"}, + {"properties" : {"bar" : {"type" : "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}})) + self.assertEqual(best.validator_value, "array") + + def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self): + """ + Now, if the error is allOf, we traverse but select the *most* relevant + error from the context, because all schemas here must match anyways. + + """ + + validator = Draft4Validator( + { + "properties" : { + "foo" : { + "allOf" : [ + {"type" : "string"}, + {"properties" : {"bar" : {"type" : "array"}}}, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}})) + self.assertEqual(best.validator_value, "string") + + def test_nested_context_for_oneOf(self): + validator = Draft4Validator( + { + "properties" : { + "foo" : { + "oneOf" : [ + {"type" : "string"}, + { + "oneOf" : [ + {"type" : "string"}, + { + "properties" : { + "bar" : {"type" : "array"} + }, + }, + ], + }, + ], + }, + }, + }, + ) + best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}})) + self.assertEqual(best.validator_value, "array") + + def test_one_error(self): + validator = Draft4Validator({"minProperties" : 2}) + error, = validator.iter_errors({}) + self.assertEqual( + exceptions.best_match(validator.iter_errors({})).validator, + "minProperties", + ) + + def test_no_errors(self): + validator = Draft4Validator({}) + self.assertIsNone(exceptions.best_match(validator.iter_errors({}))) + + +class TestByRelevance(unittest.TestCase): + def test_short_paths_are_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=["baz"]) + deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"]) + match = max([shallow, deep], key=exceptions.relevance) + self.assertIs(match, shallow) + + match = max([deep, shallow], key=exceptions.relevance) + self.assertIs(match, shallow) + + def test_global_errors_are_even_better_matches(self): + shallow = exceptions.ValidationError("Oh no!", path=[]) + deep = exceptions.ValidationError("Oh yes!", path=["foo"]) + + errors = sorted([shallow, deep], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + errors = sorted([deep, shallow], key=exceptions.relevance) + self.assertEqual( + [list(error.path) for error in errors], + [["foo"], []], + ) + + def test_weak_validators_are_lower_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + + best_match = exceptions.by_relevance(weak="a") + + match = max([weak, normal], key=best_match) + self.assertIs(match, normal) + + match = max([normal, weak], key=best_match) + self.assertIs(match, normal) + + def test_strong_validators_are_higher_priority(self): + weak = exceptions.ValidationError("Oh no!", path=[], validator="a") + normal = exceptions.ValidationError("Oh yes!", path=[], validator="b") + strong = exceptions.ValidationError("Oh fine!", path=[], validator="c") + + best_match = exceptions.by_relevance(weak="a", strong="c") + + match = max([weak, normal, strong], key=best_match) + self.assertIs(match, strong) + + match = max([strong, normal, weak], key=best_match) + self.assertIs(match, strong) + + +class TestErrorTree(unittest.TestCase): + def test_it_knows_how_many_total_errors_it_contains(self): + errors = [mock.MagicMock() for _ in range(8)] + tree = exceptions.ErrorTree(errors) + self.assertEqual(tree.total_errors, 8) + + def test_it_contains_an_item_if_the_item_had_an_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertIn("bar", tree) + + def test_it_does_not_contain_an_item_if_the_item_had_no_error(self): + errors = [exceptions.ValidationError("a message", path=["bar"])] + tree = exceptions.ErrorTree(errors) + self.assertNotIn("foo", tree) + + def test_validators_that_failed_appear_in_errors_dict(self): + error = exceptions.ValidationError("a message", validator="foo") + tree = exceptions.ErrorTree([error]) + self.assertEqual(tree.errors, {"foo" : error}) + + def test_it_creates_a_child_tree_for_each_nested_path(self): + errors = [ + exceptions.ValidationError("a bar message", path=["bar"]), + exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]), + ] + tree = exceptions.ErrorTree(errors) + self.assertIn(0, tree["bar"]) + self.assertNotIn(1, tree["bar"]) + + def test_children_have_their_errors_dicts_built(self): + e1, e2 = ( + exceptions.ValidationError("1", validator="foo", path=["bar", 0]), + exceptions.ValidationError("2", validator="quux", path=["bar", 0]), + ) + tree = exceptions.ErrorTree([e1, e2]) + self.assertEqual(tree["bar"][0].errors, {"foo" : e1, "quux" : e2}) + + def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self): + error = exceptions.ValidationError("123", validator="foo", instance=[]) + tree = exceptions.ErrorTree([error]) + + with self.assertRaises(IndexError): + tree[0] + + def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self): + """ + If a validator is dumb (like :validator:`required` in draft 3) and + refers to a path that isn't in the instance, the tree still properly + returns a subtree for that path. + + """ + + error = exceptions.ValidationError( + "a message", validator="foo", instance={}, path=["foo"], + ) + tree = exceptions.ErrorTree([error]) + self.assertIsInstance(tree["foo"], exceptions.ErrorTree) + + +class TestErrorReprStr(unittest.TestCase): + def make_error(self, **kwargs): + defaults = dict( + message=u"hello", + validator=u"type", + validator_value=u"string", + instance=5, + schema={u"type": u"string"}, + ) + defaults.update(kwargs) + return exceptions.ValidationError(**defaults) + + def assertShows(self, expected, **kwargs): + if PY3: + expected = expected.replace("u'", "'") + expected = textwrap.dedent(expected).rstrip("\n") + + error = self.make_error(**kwargs) + message_line, _, rest = str(error).partition("\n") + self.assertEqual(message_line, error.message) + self.assertEqual(rest, expected) + + def test_repr(self): + self.assertEqual( + repr(exceptions.ValidationError(message="Hello!")), + "" % "Hello!", + ) + + def test_unset_error(self): + error = exceptions.ValidationError("message") + self.assertEqual(str(error), "message") + + kwargs = { + "validator": "type", + "validator_value": "string", + "instance": 5, + "schema": {"type": "string"} + } + # Just the message should show if any of the attributes are unset + for attr in kwargs: + k = dict(kwargs) + del k[attr] + error = exceptions.ValidationError("message", **k) + self.assertEqual(str(error), "message") + + def test_empty_paths(self): + self.assertShows( + """ + Failed validating u'type' in schema: + {u'type': u'string'} + + On instance: + 5 + """, + path=[], + schema_path=[], + ) + + def test_one_item_paths(self): + self.assertShows( + """ + Failed validating u'type' in schema: + {u'type': u'string'} + + On instance[0]: + 5 + """, + path=[0], + schema_path=["items"], + ) + + def test_multiple_item_paths(self): + self.assertShows( + """ + Failed validating u'type' in schema[u'items'][0]: + {u'type': u'string'} + + On instance[0][u'a']: + 5 + """, + path=[0, u"a"], + schema_path=[u"items", 0, 1], + ) + + def test_uses_pprint(self): + with mock.patch("pprint.pformat") as pformat: + str(self.make_error()) + self.assertEqual(pformat.call_count, 2) # schema + instance + + def test_str_works_with_instances_having_overriden_eq_operator(self): + """ + Check for https://github.com/Julian/jsonschema/issues/164 which + rendered exceptions unusable when a `ValidationError` involved + instances with an `__eq__` method that returned truthy values. + + """ + + instance = mock.MagicMock() + error = exceptions.ValidationError( + "a message", + validator="foo", + instance=instance, + validator_value="some", + schema="schema", + ) + str(error) + self.assertFalse(instance.__eq__.called) diff --git a/lib/spack/external/jsonschema/tests/test_format.py b/lib/spack/external/jsonschema/tests/test_format.py new file mode 100644 index 0000000000..8392ca1de3 --- /dev/null +++ b/lib/spack/external/jsonschema/tests/test_format.py @@ -0,0 +1,63 @@ +""" +Tests for the parts of jsonschema related to the :validator:`format` property. + +""" + +from jsonschema.tests.compat import mock, unittest + +from jsonschema import FormatError, ValidationError, FormatChecker +from jsonschema.validators import Draft4Validator + + +class TestFormatChecker(unittest.TestCase): + def setUp(self): + self.fn = mock.Mock() + + def test_it_can_validate_no_formats(self): + checker = FormatChecker(formats=()) + self.assertFalse(checker.checkers) + + def test_it_raises_a_key_error_for_unknown_formats(self): + with self.assertRaises(KeyError): + FormatChecker(formats=["o noes"]) + + def test_it_can_register_cls_checkers(self): + with mock.patch.dict(FormatChecker.checkers, clear=True): + FormatChecker.cls_checks("new")(self.fn) + self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())}) + + def test_it_can_register_checkers(self): + checker = FormatChecker() + checker.checks("new")(self.fn) + self.assertEqual( + checker.checkers, + dict(FormatChecker.checkers, new=(self.fn, ())) + ) + + def test_it_catches_registered_errors(self): + checker = FormatChecker() + cause = self.fn.side_effect = ValueError() + + checker.checks("foo", raises=ValueError)(self.fn) + + with self.assertRaises(FormatError) as cm: + checker.check("bar", "foo") + + self.assertIs(cm.exception.cause, cause) + self.assertIs(cm.exception.__cause__, cause) + + # Unregistered errors should not be caught + self.fn.side_effect = AttributeError + with self.assertRaises(AttributeError): + checker.check("bar", "foo") + + def test_format_error_causes_become_validation_error_causes(self): + checker = FormatChecker() + checker.checks("foo", raises=ValueError)(self.fn) + cause = self.fn.side_effect = ValueError() + validator = Draft4Validator({"format" : "foo"}, format_checker=checker) + + with self.assertRaises(ValidationError) as cm: + validator.validate("bar") + + self.assertIs(cm.exception.__cause__, cause) diff --git a/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py b/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py new file mode 100644 index 0000000000..75c6857bc0 --- /dev/null +++ b/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py @@ -0,0 +1,290 @@ +""" +Test runner for the JSON Schema official test suite + +Tests comprehensive correctness of each draft's validator. + +See https://github.com/json-schema/JSON-Schema-Test-Suite for details. + +""" + +from contextlib import closing +from decimal import Decimal +import glob +import json +import io +import itertools +import os +import re +import subprocess +import sys + +try: + from sys import pypy_version_info +except ImportError: + pypy_version_info = None + +from jsonschema import ( + FormatError, SchemaError, ValidationError, Draft3Validator, + Draft4Validator, FormatChecker, draft3_format_checker, + draft4_format_checker, validate, +) +from jsonschema.compat import PY3 +from jsonschema.tests.compat import mock, unittest +import jsonschema + + +REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir) +SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json")) + +if not os.path.isdir(SUITE): + raise ValueError( + "Can't find the JSON-Schema-Test-Suite directory. Set the " + "'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from " + "alongside a checkout of the suite." + ) + +TESTS_DIR = os.path.join(SUITE, "tests") +JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite") + +remotes_stdout = subprocess.Popen( + ["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE, +).stdout + +with closing(remotes_stdout): + if PY3: + remotes_stdout = io.TextIOWrapper(remotes_stdout) + REMOTES = json.load(remotes_stdout) + + +def make_case(schema, data, valid, name): + if valid: + def test_case(self): + kwargs = getattr(self, "validator_kwargs", {}) + validate(data, schema, cls=self.validator_class, **kwargs) + else: + def test_case(self): + kwargs = getattr(self, "validator_kwargs", {}) + with self.assertRaises(ValidationError): + validate(data, schema, cls=self.validator_class, **kwargs) + + if not PY3: + name = name.encode("utf-8") + test_case.__name__ = name + + return test_case + + +def maybe_skip(skip, test_case, case, test): + if skip is not None: + reason = skip(case, test) + if reason is not None: + test_case = unittest.skip(reason)(test_case) + return test_case + + +def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None): + if ignore_glob: + ignore_glob = os.path.join(basedir, ignore_glob) + + def add_test_methods(test_class): + ignored = set(glob.iglob(ignore_glob)) + + for filename in glob.iglob(os.path.join(basedir, tests_glob)): + if filename in ignored: + continue + + validating, _ = os.path.splitext(os.path.basename(filename)) + id = itertools.count(1) + + with open(filename) as test_file: + for case in json.load(test_file): + for test in case["tests"]: + name = "test_%s_%s_%s" % ( + validating, + next(id), + re.sub(r"[\W ]+", "_", test["description"]), + ) + assert not hasattr(test_class, name), name + + test_case = make_case( + data=test["data"], + schema=case["schema"], + valid=test["valid"], + name=name, + ) + test_case = maybe_skip(skip, test_case, case, test) + setattr(test_class, name, test_case) + + return test_class + return add_test_methods + + +class TypesMixin(object): + @unittest.skipIf(PY3, "In Python 3 json.load always produces unicode") + def test_string_a_bytestring_is_a_string(self): + self.validator_class({"type" : "string"}).validate(b"foo") + + +class DecimalMixin(object): + def test_it_can_validate_with_decimals(self): + schema = {"type" : "number"} + validator = self.validator_class( + schema, types={"number" : (int, float, Decimal)} + ) + + for valid in [1, 1.1, Decimal(1) / Decimal(8)]: + validator.validate(valid) + + for invalid in ["foo", {}, [], True, None]: + with self.assertRaises(ValidationError): + validator.validate(invalid) + + +def missing_format(checker): + def missing_format(case, test): + format = case["schema"].get("format") + if format not in checker.checkers: + return "Format checker {0!r} not found.".format(format) + elif ( + format == "date-time" and + pypy_version_info is not None and + pypy_version_info[:2] <= (1, 9) + ): + # datetime.datetime is overzealous about typechecking in <=1.9 + return "datetime.datetime is broken on this version of PyPy." + return missing_format + + +class FormatMixin(object): + def test_it_returns_true_for_formats_it_does_not_know_about(self): + validator = self.validator_class( + {"format" : "carrot"}, format_checker=FormatChecker(), + ) + validator.validate("bugs") + + def test_it_does_not_validate_formats_by_default(self): + validator = self.validator_class({}) + self.assertIsNone(validator.format_checker) + + def test_it_validates_formats_if_a_checker_is_provided(self): + checker = mock.Mock(spec=FormatChecker) + validator = self.validator_class( + {"format" : "foo"}, format_checker=checker, + ) + + validator.validate("bar") + + checker.check.assert_called_once_with("bar", "foo") + + cause = ValueError() + checker.check.side_effect = FormatError('aoeu', cause=cause) + + with self.assertRaises(ValidationError) as cm: + validator.validate("bar") + # Make sure original cause is attached + self.assertIs(cm.exception.cause, cause) + + def test_it_validates_formats_of_any_type(self): + checker = mock.Mock(spec=FormatChecker) + validator = self.validator_class( + {"format" : "foo"}, format_checker=checker, + ) + + validator.validate([1, 2, 3]) + + checker.check.assert_called_once_with([1, 2, 3], "foo") + + cause = ValueError() + checker.check.side_effect = FormatError('aoeu', cause=cause) + + with self.assertRaises(ValidationError) as cm: + validator.validate([1, 2, 3]) + # Make sure original cause is attached + self.assertIs(cm.exception.cause, cause) + + +if sys.maxunicode == 2 ** 16 - 1: # This is a narrow build. + def narrow_unicode_build(case, test): + if "supplementary Unicode" in test["description"]: + return "Not running surrogate Unicode case, this Python is narrow." +else: + def narrow_unicode_build(case, test): # This isn't, skip nothing. + return + + +@load_json_cases( + "draft3/*.json", + skip=narrow_unicode_build, + ignore_glob="draft3/refRemote.json", +) +@load_json_cases( + "draft3/optional/format.json", skip=missing_format(draft3_format_checker) +) +@load_json_cases("draft3/optional/bignum.json") +@load_json_cases("draft3/optional/zeroTerminatedFloats.json") +class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin): + validator_class = Draft3Validator + validator_kwargs = {"format_checker" : draft3_format_checker} + + def test_any_type_is_valid_for_type_any(self): + validator = self.validator_class({"type" : "any"}) + validator.validate(mock.Mock()) + + # TODO: we're in need of more meta schema tests + def test_invalid_properties(self): + with self.assertRaises(SchemaError): + validate({}, {"properties": {"test": True}}, + cls=self.validator_class) + + def test_minItems_invalid_string(self): + with self.assertRaises(SchemaError): + # needs to be an integer + validate([1], {"minItems" : "1"}, cls=self.validator_class) + + +@load_json_cases( + "draft4/*.json", + skip=narrow_unicode_build, + ignore_glob="draft4/refRemote.json", +) +@load_json_cases( + "draft4/optional/format.json", skip=missing_format(draft4_format_checker) +) +@load_json_cases("draft4/optional/bignum.json") +@load_json_cases("draft4/optional/zeroTerminatedFloats.json") +class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin): + validator_class = Draft4Validator + validator_kwargs = {"format_checker" : draft4_format_checker} + + # TODO: we're in need of more meta schema tests + def test_invalid_properties(self): + with self.assertRaises(SchemaError): + validate({}, {"properties": {"test": True}}, + cls=self.validator_class) + + def test_minItems_invalid_string(self): + with self.assertRaises(SchemaError): + # needs to be an integer + validate([1], {"minItems" : "1"}, cls=self.validator_class) + + +class RemoteRefResolutionMixin(object): + def setUp(self): + patch = mock.patch("jsonschema.validators.requests") + requests = patch.start() + requests.get.side_effect = self.resolve + self.addCleanup(patch.stop) + + def resolve(self, reference): + _, _, reference = reference.partition("http://localhost:1234/") + return mock.Mock(**{"json.return_value" : REMOTES.get(reference)}) + + +@load_json_cases("draft3/refRemote.json") +class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase): + validator_class = Draft3Validator + + +@load_json_cases("draft4/refRemote.json") +class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase): + validator_class = Draft4Validator diff --git a/lib/spack/external/jsonschema/tests/test_validators.py b/lib/spack/external/jsonschema/tests/test_validators.py new file mode 100644 index 0000000000..f8692388ea --- /dev/null +++ b/lib/spack/external/jsonschema/tests/test_validators.py @@ -0,0 +1,786 @@ +from collections import deque +from contextlib import contextmanager +import json + +from jsonschema import FormatChecker, ValidationError +from jsonschema.tests.compat import mock, unittest +from jsonschema.validators import ( + RefResolutionError, UnknownType, Draft3Validator, + Draft4Validator, RefResolver, create, extend, validator_for, validate, +) + + +class TestCreateAndExtend(unittest.TestCase): + def setUp(self): + self.meta_schema = {u"properties" : {u"smelly" : {}}} + self.smelly = mock.MagicMock() + self.validators = {u"smelly" : self.smelly} + self.types = {u"dict" : dict} + self.Validator = create( + meta_schema=self.meta_schema, + validators=self.validators, + default_types=self.types, + ) + + self.validator_value = 12 + self.schema = {u"smelly" : self.validator_value} + self.validator = self.Validator(self.schema) + + def test_attrs(self): + self.assertEqual(self.Validator.VALIDATORS, self.validators) + self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema) + self.assertEqual(self.Validator.DEFAULT_TYPES, self.types) + + def test_init(self): + self.assertEqual(self.validator.schema, self.schema) + + def test_iter_errors(self): + instance = "hello" + + self.smelly.return_value = [] + self.assertEqual(list(self.validator.iter_errors(instance)), []) + + error = mock.Mock() + self.smelly.return_value = [error] + self.assertEqual(list(self.validator.iter_errors(instance)), [error]) + + self.smelly.assert_called_with( + self.validator, self.validator_value, instance, self.schema, + ) + + def test_if_a_version_is_provided_it_is_registered(self): + with mock.patch("jsonschema.validators.validates") as validates: + validates.side_effect = lambda version : lambda cls : cls + Validator = create(meta_schema={u"id" : ""}, version="my version") + validates.assert_called_once_with("my version") + self.assertEqual(Validator.__name__, "MyVersionValidator") + + def test_if_a_version_is_not_provided_it_is_not_registered(self): + with mock.patch("jsonschema.validators.validates") as validates: + create(meta_schema={u"id" : "id"}) + self.assertFalse(validates.called) + + def test_extend(self): + validators = dict(self.Validator.VALIDATORS) + new = mock.Mock() + + Extended = extend(self.Validator, validators={u"a new one" : new}) + + validators.update([(u"a new one", new)]) + self.assertEqual(Extended.VALIDATORS, validators) + self.assertNotIn(u"a new one", self.Validator.VALIDATORS) + + self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA) + self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES) + + +class TestIterErrors(unittest.TestCase): + def setUp(self): + self.validator = Draft3Validator({}) + + def test_iter_errors(self): + instance = [1, 2] + schema = { + u"disallow" : u"array", + u"enum" : [["a", "b", "c"], ["d", "e", "f"]], + u"minItems" : 3 + } + + got = (e.message for e in self.validator.iter_errors(instance, schema)) + expected = [ + "%r is disallowed for [1, 2]" % (schema["disallow"],), + "[1, 2] is too short", + "[1, 2] is not one of %r" % (schema["enum"],), + ] + self.assertEqual(sorted(got), sorted(expected)) + + def test_iter_errors_multiple_failures_one_validator(self): + instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"} + schema = { + u"properties" : { + "foo" : {u"type" : "string"}, + "bar" : {u"minItems" : 2}, + "baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]}, + } + } + + errors = list(self.validator.iter_errors(instance, schema)) + self.assertEqual(len(errors), 4) + + +class TestValidationErrorMessages(unittest.TestCase): + def message_for(self, instance, schema, *args, **kwargs): + kwargs.setdefault("cls", Draft3Validator) + with self.assertRaises(ValidationError) as e: + validate(instance, schema, *args, **kwargs) + return e.exception.message + + def test_single_type_failure(self): + message = self.message_for(instance=1, schema={u"type" : u"string"}) + self.assertEqual(message, "1 is not of type %r" % u"string") + + def test_single_type_list_failure(self): + message = self.message_for(instance=1, schema={u"type" : [u"string"]}) + self.assertEqual(message, "1 is not of type %r" % u"string") + + def test_multiple_type_failure(self): + types = u"string", u"object" + message = self.message_for(instance=1, schema={u"type" : list(types)}) + self.assertEqual(message, "1 is not of type %r, %r" % types) + + def test_object_without_title_type_failure(self): + type = {u"type" : [{u"minimum" : 3}]} + message = self.message_for(instance=1, schema={u"type" : [type]}) + self.assertEqual(message, "1 is not of type %r" % (type,)) + + def test_object_with_name_type_failure(self): + name = "Foo" + schema = {u"type" : [{u"name" : name, u"minimum" : 3}]} + message = self.message_for(instance=1, schema=schema) + self.assertEqual(message, "1 is not of type %r" % (name,)) + + def test_minimum(self): + message = self.message_for(instance=1, schema={"minimum" : 2}) + self.assertEqual(message, "1 is less than the minimum of 2") + + def test_maximum(self): + message = self.message_for(instance=1, schema={"maximum" : 0}) + self.assertEqual(message, "1 is greater than the maximum of 0") + + def test_dependencies_failure_has_single_element_not_list(self): + depend, on = "bar", "foo" + schema = {u"dependencies" : {depend : on}} + message = self.message_for({"bar" : 2}, schema) + self.assertEqual(message, "%r is a dependency of %r" % (on, depend)) + + def test_additionalItems_single_failure(self): + message = self.message_for( + [2], {u"items" : [], u"additionalItems" : False}, + ) + self.assertIn("(2 was unexpected)", message) + + def test_additionalItems_multiple_failures(self): + message = self.message_for( + [1, 2, 3], {u"items" : [], u"additionalItems" : False} + ) + self.assertIn("(1, 2, 3 were unexpected)", message) + + def test_additionalProperties_single_failure(self): + additional = "foo" + schema = {u"additionalProperties" : False} + message = self.message_for({additional : 2}, schema) + self.assertIn("(%r was unexpected)" % (additional,), message) + + def test_additionalProperties_multiple_failures(self): + schema = {u"additionalProperties" : False} + message = self.message_for(dict.fromkeys(["foo", "bar"]), schema) + + self.assertIn(repr("foo"), message) + self.assertIn(repr("bar"), message) + self.assertIn("were unexpected)", message) + + def test_invalid_format_default_message(self): + checker = FormatChecker(formats=()) + check_fn = mock.Mock(return_value=False) + checker.checks(u"thing")(check_fn) + + schema = {u"format" : u"thing"} + message = self.message_for("bla", schema, format_checker=checker) + + self.assertIn(repr("bla"), message) + self.assertIn(repr("thing"), message) + self.assertIn("is not a", message) + + +class TestValidationErrorDetails(unittest.TestCase): + # TODO: These really need unit tests for each individual validator, rather + # than just these higher level tests. + def test_anyOf(self): + instance = 5 + schema = { + "anyOf": [ + {"minimum": 20}, + {"type": "string"} + ] + } + + validator = Draft4Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "anyOf") + self.assertEqual(e.validator_value, schema["anyOf"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + + self.assertEqual(e.schema_path, deque(["anyOf"])) + self.assertEqual(e.relative_schema_path, deque(["anyOf"])) + self.assertEqual(e.absolute_schema_path, deque(["anyOf"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "minimum") + self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["anyOf"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + + self.assertEqual(e1.schema_path, deque([0, "minimum"])) + self.assertEqual(e1.relative_schema_path, deque([0, "minimum"])) + self.assertEqual( + e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]), + ) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "type") + self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"]) + self.assertEqual(e2.instance, instance) + self.assertEqual(e2.schema, schema["anyOf"][1]) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque([])) + self.assertEqual(e2.relative_path, deque([])) + self.assertEqual(e2.absolute_path, deque([])) + + self.assertEqual(e2.schema_path, deque([1, "type"])) + self.assertEqual(e2.relative_schema_path, deque([1, "type"])) + self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"])) + + self.assertEqual(len(e2.context), 0) + + def test_type(self): + instance = {"foo": 1} + schema = { + "type": [ + {"type": "integer"}, + { + "type": "object", + "properties": { + "foo": {"enum": [2]} + } + } + ] + } + + validator = Draft3Validator(schema) + errors = list(validator.iter_errors(instance)) + self.assertEqual(len(errors), 1) + e = errors[0] + + self.assertEqual(e.validator, "type") + self.assertEqual(e.validator_value, schema["type"]) + self.assertEqual(e.instance, instance) + self.assertEqual(e.schema, schema) + self.assertIsNone(e.parent) + + self.assertEqual(e.path, deque([])) + self.assertEqual(e.relative_path, deque([])) + self.assertEqual(e.absolute_path, deque([])) + + self.assertEqual(e.schema_path, deque(["type"])) + self.assertEqual(e.relative_schema_path, deque(["type"])) + self.assertEqual(e.absolute_schema_path, deque(["type"])) + + self.assertEqual(len(e.context), 2) + + e1, e2 = sorted_errors(e.context) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e1.validator_value, schema["type"][0]["type"]) + self.assertEqual(e1.instance, instance) + self.assertEqual(e1.schema, schema["type"][0]) + self.assertIs(e1.parent, e) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e1.relative_path, deque([])) + self.assertEqual(e1.absolute_path, deque([])) + + self.assertEqual(e1.schema_path, deque([0, "type"])) + self.assertEqual(e1.relative_schema_path, deque([0, "type"])) + self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"])) + + self.assertFalse(e1.context) + + self.assertEqual(e2.validator, "enum") + self.assertEqual(e2.validator_value, [2]) + self.assertEqual(e2.instance, 1) + self.assertEqual(e2.schema, {u"enum" : [2]}) + self.assertIs(e2.parent, e) + + self.assertEqual(e2.path, deque(["foo"])) + self.assertEqual(e2.relative_path, deque(["foo"])) + self.assertEqual(e2.absolute_path, deque(["foo"])) + + self.assertEqual( + e2.schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.relative_schema_path, deque([1, "properties", "foo", "enum"]), + ) + self.assertEqual( + e2.absolute_schema_path, + deque(["type", 1, "properties", "foo", "enum"]), + ) + + self.assertFalse(e2.context) + + def test_single_nesting(self): + instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"} + schema = { + "properties" : { + "foo" : {"type" : "string"}, + "bar" : {"minItems" : 2}, + "baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]}, + } + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["baz"])) + self.assertEqual(e3.path, deque(["baz"])) + self.assertEqual(e4.path, deque(["foo"])) + + self.assertEqual(e1.relative_path, deque(["bar"])) + self.assertEqual(e2.relative_path, deque(["baz"])) + self.assertEqual(e3.relative_path, deque(["baz"])) + self.assertEqual(e4.relative_path, deque(["foo"])) + + self.assertEqual(e1.absolute_path, deque(["bar"])) + self.assertEqual(e2.absolute_path, deque(["baz"])) + self.assertEqual(e3.absolute_path, deque(["baz"])) + self.assertEqual(e4.absolute_path, deque(["foo"])) + + self.assertEqual(e1.validator, "minItems") + self.assertEqual(e2.validator, "enum") + self.assertEqual(e3.validator, "maximum") + self.assertEqual(e4.validator, "type") + + def test_multiple_nesting(self): + instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"] + schema = { + "type" : "string", + "items" : { + "type" : ["string", "object"], + "properties" : { + "foo" : {"enum" : [1, 3]}, + "bar" : { + "type" : "array", + "properties" : { + "bar" : {"required" : True}, + "baz" : {"minItems" : 2}, + } + } + } + } + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2, e3, e4, e5, e6 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([])) + self.assertEqual(e2.path, deque([0])) + self.assertEqual(e3.path, deque([1, "bar"])) + self.assertEqual(e4.path, deque([1, "bar", "bar"])) + self.assertEqual(e5.path, deque([1, "bar", "baz"])) + self.assertEqual(e6.path, deque([1, "foo"])) + + self.assertEqual(e1.schema_path, deque(["type"])) + self.assertEqual(e2.schema_path, deque(["items", "type"])) + self.assertEqual( + list(e3.schema_path), ["items", "properties", "bar", "type"], + ) + self.assertEqual( + list(e4.schema_path), + ["items", "properties", "bar", "properties", "bar", "required"], + ) + self.assertEqual( + list(e5.schema_path), + ["items", "properties", "bar", "properties", "baz", "minItems"] + ) + self.assertEqual( + list(e6.schema_path), ["items", "properties", "foo", "enum"], + ) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "type") + self.assertEqual(e3.validator, "type") + self.assertEqual(e4.validator, "required") + self.assertEqual(e5.validator, "minItems") + self.assertEqual(e6.validator, "enum") + + def test_additionalProperties(self): + instance = {"bar": "bar", "foo": 2} + schema = { + "additionalProperties" : {"type": "integer", "minimum": 5} + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_patternProperties(self): + instance = {"bar": 1, "foo": 2} + schema = { + "patternProperties" : { + "bar": {"type": "string"}, + "foo": {"minimum": 5} + } + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque(["bar"])) + self.assertEqual(e2.path, deque(["foo"])) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems(self): + instance = ["foo", 1] + schema = { + "items": [], + "additionalItems" : {"type": "integer", "minimum": 5} + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([0])) + self.assertEqual(e2.path, deque([1])) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + def test_additionalItems_with_items(self): + instance = ["foo", "bar", 1] + schema = { + "items": [{}], + "additionalItems" : {"type": "integer", "minimum": 5} + } + + validator = Draft3Validator(schema) + errors = validator.iter_errors(instance) + e1, e2 = sorted_errors(errors) + + self.assertEqual(e1.path, deque([1])) + self.assertEqual(e2.path, deque([2])) + + self.assertEqual(e1.validator, "type") + self.assertEqual(e2.validator, "minimum") + + +class ValidatorTestMixin(object): + def setUp(self): + self.instance = mock.Mock() + self.schema = {} + self.resolver = mock.Mock() + self.validator = self.validator_class(self.schema) + + def test_valid_instances_are_valid(self): + errors = iter([]) + + with mock.patch.object( + self.validator, "iter_errors", return_value=errors, + ): + self.assertTrue( + self.validator.is_valid(self.instance, self.schema) + ) + + def test_invalid_instances_are_not_valid(self): + errors = iter([mock.Mock()]) + + with mock.patch.object( + self.validator, "iter_errors", return_value=errors, + ): + self.assertFalse( + self.validator.is_valid(self.instance, self.schema) + ) + + def test_non_existent_properties_are_ignored(self): + instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock() + validate(instance=instance, schema={my_property : my_value}) + + def test_it_creates_a_ref_resolver_if_not_provided(self): + self.assertIsInstance(self.validator.resolver, RefResolver) + + def test_it_delegates_to_a_ref_resolver(self): + resolver = RefResolver("", {}) + schema = {"$ref" : mock.Mock()} + + @contextmanager + def resolving(): + yield {"type": "integer"} + + with mock.patch.object(resolver, "resolving") as resolve: + resolve.return_value = resolving() + with self.assertRaises(ValidationError): + self.validator_class(schema, resolver=resolver).validate(None) + + resolve.assert_called_once_with(schema["$ref"]) + + def test_is_type_is_true_for_valid_type(self): + self.assertTrue(self.validator.is_type("foo", "string")) + + def test_is_type_is_false_for_invalid_type(self): + self.assertFalse(self.validator.is_type("foo", "array")) + + def test_is_type_evades_bool_inheriting_from_int(self): + self.assertFalse(self.validator.is_type(True, "integer")) + self.assertFalse(self.validator.is_type(True, "number")) + + def test_is_type_raises_exception_for_unknown_type(self): + with self.assertRaises(UnknownType): + self.validator.is_type("foo", object()) + + +class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase): + validator_class = Draft3Validator + + def test_is_type_is_true_for_any_type(self): + self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"})) + + def test_is_type_does_not_evade_bool_if_it_is_being_tested(self): + self.assertTrue(self.validator.is_type(True, "boolean")) + self.assertTrue(self.validator.is_valid(True, {"type": "any"})) + + def test_non_string_custom_types(self): + schema = {'type': [None]} + cls = self.validator_class(schema, types={None: type(None)}) + cls.validate(None, schema) + + +class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase): + validator_class = Draft4Validator + + +class TestBuiltinFormats(unittest.TestCase): + """ + The built-in (specification-defined) formats do not raise type errors. + + If an instance or value is not a string, it should be ignored. + + """ + + +for format in FormatChecker.checkers: + def test(self, format=format): + v = Draft4Validator({"format": format}, format_checker=FormatChecker()) + v.validate(123) + + name = "test_{0}_ignores_non_strings".format(format) + test.__name__ = name + setattr(TestBuiltinFormats, name, test) + del test # Ugh py.test. Stop discovering top level tests. + + +class TestValidatorFor(unittest.TestCase): + def test_draft_3(self): + schema = {"$schema" : "http://json-schema.org/draft-03/schema"} + self.assertIs(validator_for(schema), Draft3Validator) + + schema = {"$schema" : "http://json-schema.org/draft-03/schema#"} + self.assertIs(validator_for(schema), Draft3Validator) + + def test_draft_4(self): + schema = {"$schema" : "http://json-schema.org/draft-04/schema"} + self.assertIs(validator_for(schema), Draft4Validator) + + schema = {"$schema" : "http://json-schema.org/draft-04/schema#"} + self.assertIs(validator_for(schema), Draft4Validator) + + def test_custom_validator(self): + Validator = create(meta_schema={"id" : "meta schema id"}, version="12") + schema = {"$schema" : "meta schema id"} + self.assertIs(validator_for(schema), Validator) + + def test_validator_for_jsonschema_default(self): + self.assertIs(validator_for({}), Draft4Validator) + + def test_validator_for_custom_default(self): + self.assertIs(validator_for({}, default=None), None) + + +class TestValidate(unittest.TestCase): + def test_draft3_validator_is_chosen(self): + schema = {"$schema" : "http://json-schema.org/draft-03/schema#"} + with mock.patch.object(Draft3Validator, "check_schema") as chk_schema: + validate({}, schema) + chk_schema.assert_called_once_with(schema) + # Make sure it works without the empty fragment + schema = {"$schema" : "http://json-schema.org/draft-03/schema"} + with mock.patch.object(Draft3Validator, "check_schema") as chk_schema: + validate({}, schema) + chk_schema.assert_called_once_with(schema) + + def test_draft4_validator_is_chosen(self): + schema = {"$schema" : "http://json-schema.org/draft-04/schema#"} + with mock.patch.object(Draft4Validator, "check_schema") as chk_schema: + validate({}, schema) + chk_schema.assert_called_once_with(schema) + + def test_draft4_validator_is_the_default(self): + with mock.patch.object(Draft4Validator, "check_schema") as chk_schema: + validate({}, {}) + chk_schema.assert_called_once_with({}) + + +class TestRefResolver(unittest.TestCase): + + base_uri = "" + stored_uri = "foo://stored" + stored_schema = {"stored" : "schema"} + + def setUp(self): + self.referrer = {} + self.store = {self.stored_uri : self.stored_schema} + self.resolver = RefResolver(self.base_uri, self.referrer, self.store) + + def test_it_does_not_retrieve_schema_urls_from_the_network(self): + ref = Draft3Validator.META_SCHEMA["id"] + with mock.patch.object(self.resolver, "resolve_remote") as remote: + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, Draft3Validator.META_SCHEMA) + self.assertFalse(remote.called) + + def test_it_resolves_local_refs(self): + ref = "#/properties/foo" + self.referrer["properties"] = {"foo" : object()} + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, self.referrer["properties"]["foo"]) + + def test_it_resolves_local_refs_with_id(self): + schema = {"id": "foo://bar/schema#", "a": {"foo": "bar"}} + resolver = RefResolver.from_schema(schema) + with resolver.resolving("#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + with resolver.resolving("foo://bar/schema#/a") as resolved: + self.assertEqual(resolved, schema["a"]) + + def test_it_retrieves_stored_refs(self): + with self.resolver.resolving(self.stored_uri) as resolved: + self.assertIs(resolved, self.stored_schema) + + self.resolver.store["cached_ref"] = {"foo" : 12} + with self.resolver.resolving("cached_ref#/foo") as resolved: + self.assertEqual(resolved, 12) + + def test_it_retrieves_unstored_refs_via_requests(self): + ref = "http://bar#baz" + schema = {"baz" : 12} + + with mock.patch("jsonschema.validators.requests") as requests: + requests.get.return_value.json.return_value = schema + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, 12) + requests.get.assert_called_once_with("http://bar") + + def test_it_retrieves_unstored_refs_via_urlopen(self): + ref = "http://bar#baz" + schema = {"baz" : 12} + + with mock.patch("jsonschema.validators.requests", None): + with mock.patch("jsonschema.validators.urlopen") as urlopen: + urlopen.return_value.read.return_value = ( + json.dumps(schema).encode("utf8")) + with self.resolver.resolving(ref) as resolved: + self.assertEqual(resolved, 12) + urlopen.assert_called_once_with("http://bar") + + def test_it_can_construct_a_base_uri_from_a_schema(self): + schema = {"id" : "foo"} + resolver = RefResolver.from_schema(schema) + self.assertEqual(resolver.base_uri, "foo") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("foo#") as resolved: + self.assertEqual(resolved, schema) + + def test_it_can_construct_a_base_uri_from_a_schema_without_id(self): + schema = {} + resolver = RefResolver.from_schema(schema) + self.assertEqual(resolver.base_uri, "") + with resolver.resolving("") as resolved: + self.assertEqual(resolved, schema) + with resolver.resolving("#") as resolved: + self.assertEqual(resolved, schema) + + def test_custom_uri_scheme_handlers(self): + schema = {"foo": "bar"} + ref = "foo://bar" + foo_handler = mock.Mock(return_value=schema) + resolver = RefResolver("", {}, handlers={"foo": foo_handler}) + with resolver.resolving(ref) as resolved: + self.assertEqual(resolved, schema) + foo_handler.assert_called_once_with(ref) + + def test_cache_remote_on(self): + ref = "foo://bar" + foo_handler = mock.Mock() + resolver = RefResolver( + "", {}, cache_remote=True, handlers={"foo" : foo_handler}, + ) + with resolver.resolving(ref): + pass + with resolver.resolving(ref): + pass + foo_handler.assert_called_once_with(ref) + + def test_cache_remote_off(self): + ref = "foo://bar" + foo_handler = mock.Mock() + resolver = RefResolver( + "", {}, cache_remote=False, handlers={"foo" : foo_handler}, + ) + with resolver.resolving(ref): + pass + with resolver.resolving(ref): + pass + self.assertEqual(foo_handler.call_count, 2) + + def test_if_you_give_it_junk_you_get_a_resolution_error(self): + ref = "foo://bar" + foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?")) + resolver = RefResolver("", {}, handlers={"foo" : foo_handler}) + with self.assertRaises(RefResolutionError) as err: + with resolver.resolving(ref): + pass + self.assertEqual(str(err.exception), "Oh no! What's this?") + + +def sorted_errors(errors): + def key(error): + return ( + [str(e) for e in error.path], + [str(e) for e in error.schema_path] + ) + return sorted(errors, key=key) diff --git a/lib/spack/external/jsonschema/validators.py b/lib/spack/external/jsonschema/validators.py new file mode 100644 index 0000000000..3e326844f4 --- /dev/null +++ b/lib/spack/external/jsonschema/validators.py @@ -0,0 +1,428 @@ +from __future__ import division + +import contextlib +import json +import numbers + +try: + import requests +except ImportError: + requests = None + +from jsonschema import _utils, _validators +from jsonschema.compat import ( + Sequence, urljoin, urlsplit, urldefrag, unquote, urlopen, + str_types, int_types, iteritems, +) +from jsonschema.exceptions import ErrorTree # Backwards compatibility # noqa +from jsonschema.exceptions import RefResolutionError, SchemaError, UnknownType + + +_unset = _utils.Unset() + +validators = {} +meta_schemas = _utils.URIDict() + + +def validates(version): + """ + Register the decorated validator for a ``version`` of the specification. + + Registered validators and their meta schemas will be considered when + parsing ``$schema`` properties' URIs. + + :argument str version: an identifier to use as the version's name + :returns: a class decorator to decorate the validator with the version + + """ + + def _validates(cls): + validators[version] = cls + if u"id" in cls.META_SCHEMA: + meta_schemas[cls.META_SCHEMA[u"id"]] = cls + return cls + return _validates + + +def create(meta_schema, validators=(), version=None, default_types=None): # noqa + if default_types is None: + default_types = { + u"array" : list, u"boolean" : bool, u"integer" : int_types, + u"null" : type(None), u"number" : numbers.Number, u"object" : dict, + u"string" : str_types, + } + + class Validator(object): + VALIDATORS = dict(validators) + META_SCHEMA = dict(meta_schema) + DEFAULT_TYPES = dict(default_types) + + def __init__( + self, schema, types=(), resolver=None, format_checker=None, + ): + self._types = dict(self.DEFAULT_TYPES) + self._types.update(types) + + if resolver is None: + resolver = RefResolver.from_schema(schema) + + self.resolver = resolver + self.format_checker = format_checker + self.schema = schema + + @classmethod + def check_schema(cls, schema): + for error in cls(cls.META_SCHEMA).iter_errors(schema): + raise SchemaError.create_from(error) + + def iter_errors(self, instance, _schema=None): + if _schema is None: + _schema = self.schema + + with self.resolver.in_scope(_schema.get(u"id", u"")): + ref = _schema.get(u"$ref") + if ref is not None: + validators = [(u"$ref", ref)] + else: + validators = iteritems(_schema) + + for k, v in validators: + validator = self.VALIDATORS.get(k) + if validator is None: + continue + + errors = validator(self, v, instance, _schema) or () + for error in errors: + # set details if not already set by the called fn + error._set( + validator=k, + validator_value=v, + instance=instance, + schema=_schema, + ) + if k != u"$ref": + error.schema_path.appendleft(k) + yield error + + def descend(self, instance, schema, path=None, schema_path=None): + for error in self.iter_errors(instance, schema): + if path is not None: + error.path.appendleft(path) + if schema_path is not None: + error.schema_path.appendleft(schema_path) + yield error + + def validate(self, *args, **kwargs): + for error in self.iter_errors(*args, **kwargs): + raise error + + def is_type(self, instance, type): + if type not in self._types: + raise UnknownType(type, instance, self.schema) + pytypes = self._types[type] + + # bool inherits from int, so ensure bools aren't reported as ints + if isinstance(instance, bool): + pytypes = _utils.flatten(pytypes) + is_number = any( + issubclass(pytype, numbers.Number) for pytype in pytypes + ) + if is_number and bool not in pytypes: + return False + return isinstance(instance, pytypes) + + def is_valid(self, instance, _schema=None): + error = next(self.iter_errors(instance, _schema), None) + return error is None + + if version is not None: + Validator = validates(version)(Validator) + Validator.__name__ = version.title().replace(" ", "") + "Validator" + + return Validator + + +def extend(validator, validators, version=None): + all_validators = dict(validator.VALIDATORS) + all_validators.update(validators) + return create( + meta_schema=validator.META_SCHEMA, + validators=all_validators, + version=version, + default_types=validator.DEFAULT_TYPES, + ) + + +Draft3Validator = create( + meta_schema=_utils.load_schema("draft3"), + validators={ + u"$ref" : _validators.ref, + u"additionalItems" : _validators.additionalItems, + u"additionalProperties" : _validators.additionalProperties, + u"dependencies" : _validators.dependencies, + u"disallow" : _validators.disallow_draft3, + u"divisibleBy" : _validators.multipleOf, + u"enum" : _validators.enum, + u"extends" : _validators.extends_draft3, + u"format" : _validators.format, + u"items" : _validators.items, + u"maxItems" : _validators.maxItems, + u"maxLength" : _validators.maxLength, + u"maximum" : _validators.maximum, + u"minItems" : _validators.minItems, + u"minLength" : _validators.minLength, + u"minimum" : _validators.minimum, + u"multipleOf" : _validators.multipleOf, + u"pattern" : _validators.pattern, + u"patternProperties" : _validators.patternProperties, + u"properties" : _validators.properties_draft3, + u"type" : _validators.type_draft3, + u"uniqueItems" : _validators.uniqueItems, + }, + version="draft3", +) + +Draft4Validator = create( + meta_schema=_utils.load_schema("draft4"), + validators={ + u"$ref" : _validators.ref, + u"additionalItems" : _validators.additionalItems, + u"additionalProperties" : _validators.additionalProperties, + u"allOf" : _validators.allOf_draft4, + u"anyOf" : _validators.anyOf_draft4, + u"dependencies" : _validators.dependencies, + u"enum" : _validators.enum, + u"format" : _validators.format, + u"items" : _validators.items, + u"maxItems" : _validators.maxItems, + u"maxLength" : _validators.maxLength, + u"maxProperties" : _validators.maxProperties_draft4, + u"maximum" : _validators.maximum, + u"minItems" : _validators.minItems, + u"minLength" : _validators.minLength, + u"minProperties" : _validators.minProperties_draft4, + u"minimum" : _validators.minimum, + u"multipleOf" : _validators.multipleOf, + u"not" : _validators.not_draft4, + u"oneOf" : _validators.oneOf_draft4, + u"pattern" : _validators.pattern, + u"patternProperties" : _validators.patternProperties, + u"properties" : _validators.properties_draft4, + u"required" : _validators.required_draft4, + u"type" : _validators.type_draft4, + u"uniqueItems" : _validators.uniqueItems, + }, + version="draft4", +) + + +class RefResolver(object): + """ + Resolve JSON References. + + :argument str base_uri: URI of the referring document + :argument referrer: the actual referring document + :argument dict store: a mapping from URIs to documents to cache + :argument bool cache_remote: whether remote refs should be cached after + first resolution + :argument dict handlers: a mapping from URI schemes to functions that + should be used to retrieve them + + """ + + def __init__( + self, base_uri, referrer, store=(), cache_remote=True, handlers=(), + ): + self.base_uri = base_uri + self.resolution_scope = base_uri + # This attribute is not used, it is for backwards compatibility + self.referrer = referrer + self.cache_remote = cache_remote + self.handlers = dict(handlers) + + self.store = _utils.URIDict( + (id, validator.META_SCHEMA) + for id, validator in iteritems(meta_schemas) + ) + self.store.update(store) + self.store[base_uri] = referrer + + @classmethod + def from_schema(cls, schema, *args, **kwargs): + """ + Construct a resolver from a JSON schema object. + + :argument schema schema: the referring schema + :rtype: :class:`RefResolver` + + """ + + return cls(schema.get(u"id", u""), schema, *args, **kwargs) + + @contextlib.contextmanager + def in_scope(self, scope): + old_scope = self.resolution_scope + self.resolution_scope = urljoin(old_scope, scope) + try: + yield + finally: + self.resolution_scope = old_scope + + @contextlib.contextmanager + def resolving(self, ref): + """ + Context manager which resolves a JSON ``ref`` and enters the + resolution scope of this ref. + + :argument str ref: reference to resolve + + """ + + full_uri = urljoin(self.resolution_scope, ref) + uri, fragment = urldefrag(full_uri) + if not uri: + uri = self.base_uri + + if uri in self.store: + document = self.store[uri] + else: + try: + document = self.resolve_remote(uri) + except Exception as exc: + raise RefResolutionError(exc) + + old_base_uri, self.base_uri = self.base_uri, uri + try: + with self.in_scope(uri): + yield self.resolve_fragment(document, fragment) + finally: + self.base_uri = old_base_uri + + def resolve_fragment(self, document, fragment): + """ + Resolve a ``fragment`` within the referenced ``document``. + + :argument document: the referrant document + :argument str fragment: a URI fragment to resolve within it + + """ + + fragment = fragment.lstrip(u"/") + parts = unquote(fragment).split(u"/") if fragment else [] + + for part in parts: + part = part.replace(u"~1", u"/").replace(u"~0", u"~") + + if isinstance(document, Sequence): + # Array indexes should be turned into integers + try: + part = int(part) + except ValueError: + pass + try: + document = document[part] + except (TypeError, LookupError): + raise RefResolutionError( + "Unresolvable JSON pointer: %r" % fragment + ) + + return document + + def resolve_remote(self, uri): + """ + Resolve a remote ``uri``. + + Does not check the store first, but stores the retrieved document in + the store if :attr:`RefResolver.cache_remote` is True. + + .. note:: + + If the requests_ library is present, ``jsonschema`` will use it to + request the remote ``uri``, so that the correct encoding is + detected and used. + + If it isn't, or if the scheme of the ``uri`` is not ``http`` or + ``https``, UTF-8 is assumed. + + :argument str uri: the URI to resolve + :returns: the retrieved document + + .. _requests: http://pypi.python.org/pypi/requests/ + + """ + + scheme = urlsplit(uri).scheme + + if scheme in self.handlers: + result = self.handlers[scheme](uri) + elif ( + scheme in [u"http", u"https"] and + requests and + getattr(requests.Response, "json", None) is not None + ): + # Requests has support for detecting the correct encoding of + # json over http + if callable(requests.Response.json): + result = requests.get(uri).json() + else: + result = requests.get(uri).json + else: + # Otherwise, pass off to urllib and assume utf-8 + result = json.loads(urlopen(uri).read().decode("utf-8")) + + if self.cache_remote: + self.store[uri] = result + return result + + +def validator_for(schema, default=_unset): + if default is _unset: + default = Draft4Validator + return meta_schemas.get(schema.get(u"$schema", u""), default) + + +def validate(instance, schema, cls=None, *args, **kwargs): + """ + Validate an instance under the given schema. + + >>> validate([2, 3, 4], {"maxItems" : 2}) + Traceback (most recent call last): + ... + ValidationError: [2, 3, 4] is too long + + :func:`validate` will first verify that the provided schema is itself + valid, since not doing so can lead to less obvious error messages and fail + in less obvious or consistent ways. If you know you have a valid schema + already or don't care, you might prefer using the + :meth:`~IValidator.validate` method directly on a specific validator + (e.g. :meth:`Draft4Validator.validate`). + + + :argument instance: the instance to validate + :argument schema: the schema to validate with + :argument cls: an :class:`IValidator` class that will be used to validate + the instance. + + If the ``cls`` argument is not provided, two things will happen in + accordance with the specification. First, if the schema has a + :validator:`$schema` property containing a known meta-schema [#]_ then the + proper validator will be used. The specification recommends that all + schemas contain :validator:`$schema` properties for this reason. If no + :validator:`$schema` property is found, the default validator class is + :class:`Draft4Validator`. + + Any other provided positional and keyword arguments will be passed on when + instantiating the ``cls``. + + :raises: + :exc:`ValidationError` if the instance is invalid + + :exc:`SchemaError` if the schema itself is invalid + + .. rubric:: Footnotes + .. [#] known by a validator registered with :func:`validates` + """ + if cls is None: + cls = validator_for(schema) + cls.check_schema(schema) + cls(schema, *args, **kwargs).validate(instance) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 6c4a15aaab..ab78ecef30 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -59,9 +59,8 @@ import spack.repository try: repo = spack.repository.RepoPath() sys.meta_path.append(repo) -except spack.repository.RepoError, e: - tty.error('while initializing Spack RepoPath:') - tty.die(e.message) +except spack.error.SpackError, e: + tty.die('while initializing Spack RepoPath:', e.message) # # Set up the installed packages database diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index ebe42d0138..a792f04cfd 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -118,9 +118,8 @@ def repo_create(args): shutil.rmtree(root, ignore_errors=True) tty.msg("Created repo with namespace '%s'." % namespace) - tty.msg("To register it with Spack, add a line like this to ~/.spack/repos.yaml:", - 'repos:', - ' - ' + full_path) + tty.msg("To register it with spack, run this command:", + 'spack repo add %s' % full_path) def repo_add(args): diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 3ff83ae529..e57aa75f8e 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -118,9 +118,11 @@ the site configuration will be ignored. """ import os +import re import sys import copy - +import jsonschema +from jsonschema import Draft4Validator, validators import yaml from yaml.error import MarkedYAMLError from ordereddict_backport import OrderedDict @@ -137,32 +139,62 @@ import spack.util.spack_yaml as syaml """Dict from section names -> schema for that section.""" section_schemas = { - 'compilers' : { + 'compilers': { '$schema': 'http://json-schema.org/schema#', - 'title' : 'Spack compiler configuration file schema', - 'type' : 'object', - 'properties' : { - 'compilers' : { - 'type' : 'map', - }, - }, - }, - - 'mirrors' : { + 'title': 'Spack compiler configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + 'compilers:?': { # optional colon for overriding site config. + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { # architecture + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*@\w[\w-]*': { # compiler spec + 'type': 'object', + 'additionalProperties': False, + 'required': ['cc', 'cxx', 'f77', 'fc'], + 'properties': { + 'cc': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'cxx': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'f77': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'fc': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + },},},},},},},}, + + 'mirrors': { '$schema': 'http://json-schema.org/schema#', - 'title' : 'Spack mirror configuration file schema', - 'type' : 'map', - 'properties' : { - 'mirrors' : { - - } - }, - }, - - 'repos' : { + 'title': 'Spack mirror configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'mirrors:?': { + 'type': 'object', + 'default': {}, + 'additionalProperties': False, + 'patternProperties': { + r'\w[\w-]*': { + 'type': 'string'},},},},}, + + 'repos': { '$schema': 'http://json-schema.org/schema#', - 'title' : 'Spack repository configuration file schema', - }} + 'title': 'Spack repository configuration file schema', + 'type': 'object', + 'additionalProperties': False, + 'patternProperties': { + r'repos:?': { + 'type': 'array', + 'default': [], + 'items': { + 'type': 'string'},},},}, +} """OrderedDict of config scopes keyed by name. Later scopes will override earlier scopes. @@ -170,13 +202,64 @@ section_schemas = { config_scopes = OrderedDict() -def validate_section(section): +def validate_section_name(section): """Raise a ValueError if the section is not a valid section.""" if section not in section_schemas: raise ValueError("Invalid config section: '%s'. Options are %s." % (section, section_schemas)) +def extend_with_default(validator_class): + """Add support for the 'default' attribute for properties and patternProperties. + + jsonschema does not handle this out of the box -- it only + validates. This allows us to set default values for configs + where certain fields are `None` b/c they're deleted or + commented out. + + """ + validate_properties = validator_class.VALIDATORS["properties"] + validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] + + def set_defaults(validator, properties, instance, schema): + for property, subschema in properties.iteritems(): + if "default" in subschema: + instance.setdefault(property, subschema["default"]) + for err in validate_properties(validator, properties, instance, schema): + yield err + + def set_pp_defaults(validator, properties, instance, schema): + for property, subschema in properties.iteritems(): + if "default" in subschema: + if isinstance(instance, dict): + for key, val in instance.iteritems(): + if re.match(property, key) and val is None: + instance[key] = subschema["default"] + + for err in validate_pattern_properties(validator, properties, instance, schema): + yield err + + return validators.extend(validator_class, { + "properties" : set_defaults, + "patternProperties" : set_pp_defaults + }) + + +DefaultSettingValidator = extend_with_default(Draft4Validator) + +def validate_section(data, schema): + """Validate data read in from a Spack YAML file. + + This leverages the line information (start_mark, end_mark) stored + on Spack YAML structures. + + """ + try: + DefaultSettingValidator(schema).validate(data) + except jsonschema.ValidationError as e: + raise ConfigFormatError(e, data) + + class ConfigScope(object): """This class represents a configuration scope. @@ -195,18 +278,16 @@ class ConfigScope(object): config_scopes[name] = self def get_section_filename(self, section): - validate_section(section) + validate_section_name(section) return os.path.join(self.path, "%s.yaml" % section) def get_section(self, section): if not section in self.sections: - path = self.get_section_filename(section) - data = _read_config_file(path) - if data is None: - self.sections[section] = {} - else: - self.sections[section] = data + path = self.get_section_filename(section) + schema = section_schemas[section] + data = _read_config_file(path, schema) + self.sections[section] = data return self.sections[section] @@ -255,7 +336,7 @@ def validate_scope(scope): % (scope, config_scopes.keys())) -def _read_config_file(filename): +def _read_config_file(filename, schema): """Read a YAML configuration file.""" # Ignore nonexisting files. if not os.path.exists(filename): @@ -269,8 +350,13 @@ def _read_config_file(filename): raise ConfigFileError("Config file is not readable: %s." % filename) try: + tty.debug("Reading config file %s" % filename) with open(filename) as f: - return syaml.load(f) + data = syaml.load(f) + + validate_section(data, schema) + + return data except MarkedYAMLError, e: raise ConfigFileError( @@ -337,7 +423,7 @@ def get_config(section, scope=None): Strips off the top-level section name from the YAML dict. """ - validate_section(section) + validate_section_name(section) merged_section = syaml.syaml_dict() if scope is None: @@ -387,7 +473,7 @@ def update_config(section, update_data, scope=None): # read in the config to ensure we've got current data get_config(section) - validate_section(section) # validate section name + validate_section_name(section) # validate section name scope = validate_scope(scope) # get ConfigScope object from string. # read only the requested section's data. @@ -407,4 +493,43 @@ def print_section(section): class ConfigError(SpackError): pass class ConfigFileError(ConfigError): pass -class ConfigFormatError(ConfigError): pass + +def get_path(path, data): + if path: + return get_path(path[1:], data[path[0]]) + else: + return data + +class ConfigFormatError(ConfigError): + """Raised when a configuration format does not match its schema.""" + def __init__(self, validation_error, data): + # Try to get line number from erroneous instance and its parent + instance_mark = getattr(validation_error.instance, '_start_mark', None) + parent_mark = getattr(validation_error.parent, '_start_mark', None) + path = getattr(validation_error, 'path', None) + + # Try really hard to get the parent (which sometimes is not + # set) This digs it out of the validated structure if it's not + # on the validation_error. + if not parent_mark: + parent_path = list(path)[:-1] + parent = get_path(parent_path, data) + if path[-1] in parent: + if isinstance(parent, dict): + keylist = parent.keys() + elif isinstance(parent, list): + keylist = parent + idx = keylist.index(path[-1]) + parent_mark = getattr(keylist[idx], '_start_mark', None) + + if instance_mark: + location = '%s:%d' % (instance_mark.name, instance_mark.line + 1) + elif parent_mark: + location = '%s:%d' % (parent_mark.name, parent_mark.line + 1) + elif path: + location = 'At ' + ':'.join(path) + else: + location = '' + + message = '%s: %s' % (location, validation_error.message) + super(ConfigError, self).__init__(message) -- cgit v1.2.3-70-g09d2 From d869d8e86fcabfd04121806c8c1554a00cc5339c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 14 Jan 2016 12:00:33 -0800 Subject: add sanity check on config write as well as validation on read. --- lib/spack/spack/config.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index e57aa75f8e..193b311434 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -297,7 +297,10 @@ class ConfigScope(object): try: mkdirp(self.path) with open(filename, 'w') as f: + validate_section(data, section_schemas[section]) syaml.dump(data, stream=f, default_flow_style=False) + except jsonschema.ValidationError as e: + raise ConfigSanityError(e, data) except (yaml.YAMLError, IOError) as e: raise ConfigFileError("Error writing to config file: '%s'" % str(e)) @@ -533,3 +536,6 @@ class ConfigFormatError(ConfigError): message = '%s: %s' % (location, validation_error.message) super(ConfigError, self).__init__(message) + +class ConfigSanityError(ConfigFormatError): + """Same as ConfigFormatError, raised when config is written by Spack.""" -- cgit v1.2.3-70-g09d2 From aa00999fcacf6ba19987388c3998cdd4d1c6e65c Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 14 Jan 2016 12:00:56 -0800 Subject: update compiler config test. --- lib/spack/spack/test/config.py | 70 ++++++++++++++++++++++-------------------- 1 file changed, 37 insertions(+), 33 deletions(-) diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py index 5f99dcb903..d8be5a855b 100644 --- a/lib/spack/spack/test/config.py +++ b/lib/spack/spack/test/config.py @@ -33,39 +33,43 @@ from spack.test.mock_packages_test import * # Some sample compiler config data a_comps = { - "gcc@4.7.3" : { - "cc" : "/gcc473", - "cxx" : "/g++473", - "f77" : None, - "f90" : None }, - "gcc@4.5.0" : { - "cc" : "/gcc450", - "cxx" : "/g++450", - "f77" : "/gfortran", - "f90" : "/gfortran" }, - "clang@3.3" : { - "cc" : "", - "cxx" : "", - "f77" : "", - "f90" : "" } + "all": { + "gcc@4.7.3" : { + "cc" : "/gcc473", + "cxx": "/g++473", + "f77": None, + "fc" : None }, + "gcc@4.5.0" : { + "cc" : "/gcc450", + "cxx": "/g++450", + "f77": "/gfortran", + "fc" : "/gfortran" }, + "clang@3.3" : { + "cc" : "", + "cxx": "", + "f77": "", + "fc" : "" } + } } b_comps = { - "icc@10.0" : { - "cc" : "/icc100", - "cxx" : "/icc100", - "f77" : None, - "f90" : None }, - "icc@11.1" : { - "cc" : "/icc111", - "cxx" : "/icp111", - "f77" : "/ifort", - "f90" : "/ifort" }, - "clang@3.3" : { - "cc" : "/clang", - "cxx" : "/clang++", - "f77" : None, - "f90" : None} + "all": { + "icc@10.0" : { + "cc" : "/icc100", + "cxx": "/icc100", + "f77": None, + "fc" : None }, + "icc@11.1" : { + "cc" : "/icc111", + "cxx": "/icp111", + "f77": "/ifort", + "fc" : "/ifort" }, + "clang@3.3" : { + "cc" : "/clang", + "cxx": "/clang++", + "f77": None, + "fc" : None} + } } class ConfigTest(MockPackagesTest): @@ -85,11 +89,11 @@ class ConfigTest(MockPackagesTest): def check_config(self, comps, *compiler_names): """Check that named compilers in comps match Spack's config.""" config = spack.config.get_config('compilers') - compiler_list = ['cc', 'cxx', 'f77', 'f90'] + compiler_list = ['cc', 'cxx', 'f77', 'fc'] for key in compiler_names: for c in compiler_list: - expected = comps[key][c] - actual = config[key][c] + expected = comps['all'][key][c] + actual = config['all'][key][c] self.assertEqual(expected, actual) -- cgit v1.2.3-70-g09d2 From 358b2ab4ba9d08bc47b59bf39811b6472acbc53a Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 17 Jan 2016 18:12:08 -0800 Subject: Temporary fix: Clang is the default compiler on Mac OS X. --- lib/spack/spack/compilers/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 2a3d67c731..facc9c338b 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -27,6 +27,7 @@ system and configuring Spack to use multiple compilers. """ import imp import os +import platform from llnl.util.lang import memoized, list_modules from llnl.util.filesystem import join_path @@ -47,7 +48,11 @@ _imported_compilers_module = 'spack.compilers' _required_instance_vars = ['cc', 'cxx', 'f77', 'fc'] # TODO: customize order in config file -_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc'] +if platform.system() == 'Darwin': + _default_order = ['clang', 'gcc', 'intel'] +else: + _default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc'] + def _auto_compiler_spec(function): def converter(cspec_like, *args, **kwargs): -- cgit v1.2.3-70-g09d2 From 5984bc2ad3b3d161ac8e8c37ec1dc3dfa9250241 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 17 Jan 2016 18:12:53 -0800 Subject: Add namespace option to find command. --- lib/spack/spack/cmd/find.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index d951c37fe0..714f1d514b 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -40,6 +40,9 @@ description ="Find installed spack packages" def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() + format_group.add_argument( + '-s', '--short', action='store_const', dest='mode', const='short', + help='Show only specs (default)') format_group.add_argument( '-p', '--paths', action='store_const', dest='mode', const='paths', help='Show paths to package install directories') @@ -48,21 +51,24 @@ def setup_parser(subparser): help='Show full dependency DAG of installed packages') subparser.add_argument( - '-l', '--long', action='store_true', dest='long', + '-l', '--long', action='store_true', help='Show dependency hashes as well as versions.') subparser.add_argument( - '-L', '--very-long', action='store_true', dest='very_long', + '-L', '--very-long', action='store_true', help='Show dependency hashes as well as versions.') subparser.add_argument( - '-u', '--unknown', action='store_true', dest='unknown', + '-u', '--unknown', action='store_true', help='Show only specs Spack does not have a package for.') subparser.add_argument( - '-m', '--missing', action='store_true', dest='missing', + '-m', '--missing', action='store_true', help='Show missing dependencies as well as installed specs.') subparser.add_argument( - '-M', '--only-missing', action='store_true', dest='only_missing', + '-M', '--only-missing', action='store_true', help='Show only missing dependencies.') + subparser.add_argument( + '-N', '--namespace', action='store_true', + help='Show fully qualified package names.') subparser.add_argument( 'query_specs', nargs=argparse.REMAINDER, @@ -76,6 +82,7 @@ def gray_hash(spec, length): def display_specs(specs, **kwargs): mode = kwargs.get('mode', 'short') hashes = kwargs.get('long', False) + namespace = kwargs.get('namespace', False) hlen = 7 if kwargs.get('very_long', False): @@ -97,7 +104,8 @@ def display_specs(specs, **kwargs): specs = index[(architecture,compiler)] specs.sort() - abbreviated = [s.format('$_$@$+', color=True) for s in specs] + nfmt = '.' if namespace else '_' + abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs] if mode == 'paths': # Print one spec per line along with prefix path width = max(len(s) for s in abbreviated) @@ -112,7 +120,7 @@ def display_specs(specs, **kwargs): elif mode == 'deps': for spec in specs: print spec.tree( - format='$_$@$+', + format='$%s$@$+' % nfmt, color=True, indent=4, prefix=(lambda s: gray_hash(s, hlen)) if hashes else None) @@ -122,7 +130,7 @@ def display_specs(specs, **kwargs): string = "" if hashes: string += gray_hash(s, hlen) + ' ' - string += s.format('$-_$@$+', color=True) + string += s.format('$-%s$@$+' % nfmt, color=True) return string colify(fmt(s) for s in specs) @@ -171,4 +179,5 @@ def find(parser, args): tty.msg("%d installed packages." % len(specs)) display_specs(specs, mode=args.mode, long=args.long, - very_long=args.very_long) + very_long=args.very_long, + namespace=args.namespace) -- cgit v1.2.3-70-g09d2 From 97b492756acce93dbd5f1c305504f07df7582ba0 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Sun, 17 Jan 2016 18:14:35 -0800 Subject: Fix create, diy, edit, and repo commands to use multiple repos. --- lib/spack/spack/cmd/create.py | 117 +++++++++++++++++++++++++++++++---------- lib/spack/spack/cmd/diy.py | 2 +- lib/spack/spack/cmd/edit.py | 35 ++++++++---- lib/spack/spack/cmd/repo.py | 57 ++++++++++---------- lib/spack/spack/repository.py | 103 ++++++++++++++++++++++++++++-------- lib/spack/spack/util/naming.py | 25 ++++++++- 6 files changed, 247 insertions(+), 92 deletions(-) diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 1a60875de8..7cea39cb55 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -36,7 +36,9 @@ import spack.cmd import spack.cmd.checksum import spack.url import spack.util.web +from spack.spec import Spec from spack.util.naming import * +from spack.repository import Repo, RepoError import spack.util.crypto as crypto from spack.util.executable import which @@ -85,21 +87,34 @@ ${versions} """) +def make_version_calls(ver_hash_tuples): + """Adds a version() call to the package for each version found.""" + max_len = max(len(str(v)) for v, h in ver_hash_tuples) + format = " version(%%-%ds, '%%s')" % (max_len + 2) + return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples) + + def setup_parser(subparser): subparser.add_argument('url', nargs='?', help="url of package archive") subparser.add_argument( - '--keep-stage', action='store_true', dest='keep_stage', + '--keep-stage', action='store_true', help="Don't clean up staging area when command completes.") subparser.add_argument( - '-n', '--name', dest='alternate_name', default=None, + '-n', '--name', dest='alternate_name', default=None, metavar='NAME', help="Override the autodetected name for the created package.") subparser.add_argument( - '-p', '--package-repo', dest='package_repo', default=None, - help="Create the package in the specified packagerepo.") + '-r', '--repo', default=None, + help="Path to a repository where the package should be created.") + subparser.add_argument( + '-N', '--namespace', + help="Specify a namespace for the package. Must be the namespace of " + "a repository registered with Spack.") subparser.add_argument( '-f', '--force', action='store_true', dest='force', help="Overwrite any existing package file with the same name.") + setup_parser.subparser = subparser + class ConfigureGuesser(object): def __call__(self, stage): @@ -137,16 +152,7 @@ class ConfigureGuesser(object): self.build_system = build_system -def make_version_calls(ver_hash_tuples): - """Adds a version() call to the package for each version found.""" - max_len = max(len(str(v)) for v, h in ver_hash_tuples) - format = " version(%%-%ds, '%%s')" % (max_len + 2) - return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples) - - -def create(parser, args): - url = args.url - +def guess_name_and_version(url, args): # Try to deduce name and version of the new package from the URL version = spack.url.parse_version(url) if not version: @@ -163,21 +169,52 @@ def create(parser, args): tty.die("Couldn't guess a name for this package. Try running:", "", "spack create --name ") - package_repo = args.package_repo - - if not valid_module_name(name): + if not valid_fully_qualified_module_name(name): tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'") - tty.msg("This looks like a URL for %s version %s." % (name, version)) - tty.msg("Creating template for package %s" % name) + return name, version - # Create a directory for the new package. - pkg_path = spack.repo.filename_for_package_name(name, package_repo) - if os.path.exists(pkg_path) and not args.force: - tty.die("%s already exists." % pkg_path) + +def find_repository(spec, args): + # figure out namespace for spec + if spec.namespace and args.namespace and spec.namespace != args.namespace: + tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace)) + + if not spec.namespace and args.namespace: + spec.namespace = args.namespace + + # Figure out where the new package should live. + repo_path = args.repo + if repo_path is not None: + try: + repo = Repo(repo_path) + if spec.namespace and spec.namespace != repo.namespace: + tty.die("Can't create package with namespace %s in repo with namespace %s." + % (spec.namespace, repo.namespace)) + except RepoError as e: + tty.die(str(e)) else: - mkdirp(os.path.dirname(pkg_path)) + if spec.namespace: + repo = spack.repo.get_repo(spec.namespace, None) + if not repo: + tty.die("Unknown namespace: %s" % spec.namespace) + else: + repo = spack.repo.first_repo() + + # Set the namespace on the spec if it's not there already + if not spec.namespace: + spec.namespace = repo.namespace + + return repo + + +def fetch_tarballs(url, name, args): + """Try to find versions of the supplied archive by scraping the web. + + Prompts the user to select how many to download if many are found. + + """ versions = spack.util.web.find_versions_of_archive(url) rkeys = sorted(versions.keys(), reverse=True) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) @@ -196,13 +233,35 @@ def create(parser, args): default=5, abort='q') if not archives_to_fetch: - tty.msg("Aborted.") - return + tty.die("Aborted.") + + sorted_versions = sorted(versions.keys(), reverse=True) + sorted_urls = [versions[v] for v in sorted_versions] + return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch] + + +def create(parser, args): + url = args.url + if not url: + setup_parser.subparser.print_help() + return + + # Figure out a name and repo for the package. + name, version = guess_name_and_version(url, args) + spec = Spec(name) + name = spec.name # factors out namespace, if any + repo = find_repository(spec, args) + + tty.msg("This looks like a URL for %s version %s." % (name, version)) + tty.msg("Creating template for package %s" % name) + + # Fetch tarballs (prompting user if necessary) + versions, urls = fetch_tarballs(url, name, args) + # Try to guess what configure system is used. guesser = ConfigureGuesser() ver_hash_tuples = spack.cmd.checksum.get_checksums( - versions.keys()[:archives_to_fetch], - [versions[v] for v in versions.keys()[:archives_to_fetch]], + versions, urls, first_stage_function=guesser, keep_stage=args.keep_stage) @@ -214,7 +273,7 @@ def create(parser, args): name = 'py-%s' % name # Create a directory for the new package. - pkg_path = spack.repo.filename_for_package_name(name) + pkg_path = repo.filename_for_package_name(name) if os.path.exists(pkg_path) and not args.force: tty.die("%s already exists." % pkg_path) else: diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 1acbebbc15..9df53312f8 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -69,7 +69,7 @@ def diy(self, args): sys.exit(1) else: tty.msg("Running 'spack edit -f %s'" % spec.name) - edit_package(spec.name, True) + edit_package(spec.name, spack.repo.first_repo(), None, True) return if not spec.version.concrete: diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py index e0688dc96b..a20e40df9b 100644 --- a/lib/spack/spack/cmd/edit.py +++ b/lib/spack/spack/cmd/edit.py @@ -30,6 +30,8 @@ from llnl.util.filesystem import mkdirp, join_path import spack import spack.cmd +from spack.spec import Spec +from spack.repository import Repo from spack.util.naming import mod_to_class description = "Open package files in $EDITOR" @@ -53,9 +55,16 @@ class ${class_name}(Package): """) -def edit_package(name, force=False): - path = spack.repo.filename_for_package_name(name) +def edit_package(name, repo_path, namespace, force=False): + if repo_path: + repo = Repo(repo_path) + elif namespace: + repo = spack.repo.get_repo(namespace) + else: + repo = spack.repo + path = repo.filename_for_package_name(name) + spec = Spec(name) if os.path.exists(path): if not os.path.isfile(path): tty.die("Something's wrong. '%s' is not a file!" % path) @@ -63,13 +72,13 @@ def edit_package(name, force=False): tty.die("Insufficient permissions on '%s'!" % path) elif not force: tty.die("No package '%s'. Use spack create, or supply -f/--force " - "to edit a new file." % name) + "to edit a new file." % spec.name) else: mkdirp(os.path.dirname(path)) with open(path, "w") as pkg_file: pkg_file.write( package_template.substitute( - name=name, class_name=mod_to_class(name))) + name=spec.name, class_name=mod_to_class(spec.name))) spack.editor(path) @@ -79,17 +88,25 @@ def setup_parser(subparser): '-f', '--force', dest='force', action='store_true', help="Open a new file in $EDITOR even if package doesn't exist.") - filetypes = subparser.add_mutually_exclusive_group() - filetypes.add_argument( + excl_args = subparser.add_mutually_exclusive_group() + + # Various filetypes you can edit directly from the cmd line. + excl_args.add_argument( '-c', '--command', dest='path', action='store_const', const=spack.cmd.command_path, help="Edit the command with the supplied name.") - filetypes.add_argument( + excl_args.add_argument( '-t', '--test', dest='path', action='store_const', const=spack.test_path, help="Edit the test with the supplied name.") - filetypes.add_argument( + excl_args.add_argument( '-m', '--module', dest='path', action='store_const', const=spack.module_path, help="Edit the main spack module with the supplied name.") + # Options for editing packages + excl_args.add_argument( + '-r', '--repo', default=None, help="Path to repo to edit package in.") + excl_args.add_argument( + '-N', '--namespace', default=None, help="Namespace of package to edit.") + subparser.add_argument( 'name', nargs='?', default=None, help="name of package to edit") @@ -107,7 +124,7 @@ def edit(parser, args): spack.editor(path) elif name: - edit_package(name, args.force) + edit_package(name, args.repo, args.namespace, args.force) else: # By default open the directory where packages or commands live. spack.editor(path) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index a792f04cfd..34c755fb67 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -44,9 +44,10 @@ def setup_parser(subparser): # Create create_parser = sp.add_parser('create', help=repo_create.__doc__) create_parser.add_argument( - 'namespace', help="Namespace to identify packages in the repository.") + 'directory', help="Directory to create the repo in.") create_parser.add_argument( - 'directory', help="Directory to create the repo in. Defaults to same as namespace.", nargs='?') + 'namespace', help="Namespace to identify packages in the repository. " + "Defaults to the directory name.", nargs='?') # List list_parser = sp.add_parser('list', help=repo_list.__doc__) @@ -72,14 +73,15 @@ def setup_parser(subparser): def repo_create(args): - """Create a new package repo for a particular namespace.""" + """Create a new package repository.""" + root = canonicalize_path(args.directory) namespace = args.namespace - if not re.match(r'\w[\.\w-]*', namespace): - tty.die("Invalid namespace: '%s'" % namespace) - root = args.directory - if not root: - root = namespace + if not args.namespace: + namespace = os.path.basename(root) + + if not re.match(r'\w[\.\w-]*', namespace): + tty.die("'%s' is not a valid namespace." % namespace) existed = False if os.path.exists(root): @@ -123,27 +125,22 @@ def repo_create(args): def repo_add(args): - """Add a package source to the Spack configuration""" + """Add a package source to Spack's configuration.""" path = args.path - # check if the path is relative to the spack directory. - real_path = path - if path.startswith('$spack'): - real_path = spack.repository.substitute_spack_prefix(path) - elif not os.path.isabs(real_path): - real_path = os.path.abspath(real_path) - path = real_path + # real_path is absolute and handles substitution. + canon_path = canonicalize_path(path) # check if the path exists - if not os.path.exists(real_path): + if not os.path.exists(canon_path): tty.die("No such file or directory: '%s'." % path) # Make sure the path is a directory. - if not os.path.isdir(real_path): + if not os.path.isdir(canon_path): tty.die("Not a Spack repository: '%s'." % path) # Make sure it's actually a spack repository by constructing it. - repo = Repo(real_path) + repo = Repo(canon_path) # If that succeeds, finally add it to the configuration. repos = spack.config.get_config('repos', args.scope) @@ -152,30 +149,32 @@ def repo_add(args): if repo.root in repos or path in repos: tty.die("Repository is already registered with Spack: '%s'" % path) - repos.insert(0, path) + repos.insert(0, canon_path) spack.config.update_config('repos', repos, args.scope) tty.msg("Created repo with namespace '%s'." % repo.namespace) def repo_remove(args): - """Remove a repository from the Spack configuration.""" + """Remove a repository from Spack's configuration.""" repos = spack.config.get_config('repos', args.scope) path_or_namespace = args.path_or_namespace # If the argument is a path, remove that repository from config. - path = os.path.abspath(path_or_namespace) - if path in repos: - repos.remove(path) - spack.config.update_config('repos', repos, args.scope) - tty.msg("Removed repository '%s'." % path) - return + canon_path = canonicalize_path(path_or_namespace) + for repo_path in repos: + repo_canon_path = canonicalize_path(repo_path) + if canon_path == repo_canon_path: + repos.remove(repo_path) + spack.config.update_config('repos', repos, args.scope) + tty.msg("Removed repository '%s'." % repo_path) + return # If it is a namespace, remove corresponding repo for path in repos: try: repo = Repo(path) if repo.namespace == path_or_namespace: - repos.remove(repo.root) + repos.remove(path) spack.config.update_config('repos', repos, args.scope) tty.msg("Removed repository '%s' with namespace %s." % (repo.root, repo.namespace)) @@ -188,7 +187,7 @@ def repo_remove(args): def repo_list(args): - """List package sources and their mnemoics""" + """Show registered repositories and their namespaces.""" roots = spack.config.get_config('repos', args.scope) repos = [] for r in roots: diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 3367572ef5..31596cee7a 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -54,6 +54,9 @@ repo_config_name = 'repo.yaml' # Top-level filename for repo config. packages_dir_name = 'packages' # Top-level repo directory containing pkgs. package_file_name = 'package.py' # Filename for packages in a repository. +# Guaranteed unused default value for some functions. +NOT_PROVIDED = object() + def _autospec(function): """Decorator that automatically converts the argument of a single-arg @@ -75,7 +78,15 @@ def _make_namespace_module(ns): def substitute_spack_prefix(path): """Replaces instances of $spack with Spack's prefix.""" - return path.replace('$spack', spack.prefix) + return re.sub(r'^\$spack', spack.prefix, path) + + +def canonicalize_path(path): + """Substitute $spack, expand user home, take abspath.""" + path = substitute_spack_prefix(path) + path = os.path.expanduser(path) + path = os.path.abspath(path) + return path class RepoPath(object): @@ -109,7 +120,10 @@ class RepoPath(object): repo = Repo(root, self.super_namespace) self.put_last(repo) except RepoError as e: - tty.warn("Failed to initialize repository at '%s'." % root, e.message) + tty.warn("Failed to initialize repository at '%s'." % root, + e.message, + "To remove the bad repository, run this command:", + " spack repo rm %s" % root) def swap(self, other): @@ -173,6 +187,31 @@ class RepoPath(object): self.repos.remove(repo) + def get_repo(self, namespace, default=NOT_PROVIDED): + """Get a repository by namespace. + Arguments + namespace + Look up this namespace in the RepoPath, and return + it if found. + + Optional Arguments + default + If default is provided, return it when the namespace + isn't found. If not, raise an UnknownNamespaceError. + """ + fullspace = '%s.%s' % (self.super_namespace, namespace) + if fullspace not in self.by_namespace: + if default == NOT_PROVIDED: + raise UnknownNamespaceError(namespace) + return default + return self.by_namespace[fullspace] + + + def first_repo(self): + """Get the first repo in precedence order.""" + return self.repos[0] if self.repos else None + + def all_package_names(self): """Return all unique package names in all repositories.""" return self._all_package_names @@ -229,7 +268,6 @@ class RepoPath(object): if fullname in sys.modules: return sys.modules[fullname] - # partition fullname into prefix and module name. namespace, dot, module_name = fullname.rpartition('.') @@ -242,11 +280,23 @@ class RepoPath(object): return module - def repo_for_pkg(self, pkg_name): + @_autospec + def repo_for_pkg(self, spec): + """Given a spec, get the repository for its package.""" + # If the spec already has a namespace, then return the + # corresponding repo if we know about it. + if spec.namespace: + fullspace = '%s.%s' % (self.super_namespace, spec.namespace) + if fullspace not in self.by_namespace: + raise UnknownNamespaceError(spec.namespace) + return self.by_namespace[fullspace] + + # If there's no namespace, search in the RepoPath. for repo in self.repos: - if pkg_name in repo: + if spec.name in repo: return repo - raise UnknownPackageError(pkg_name) + else: + raise UnknownPackageError(spec.name) @_autospec @@ -255,16 +305,7 @@ class RepoPath(object): Raises UnknownPackageError if not found. """ - # if the spec has a fully qualified namespace, we grab it - # directly and ignore overlay precedence. - if spec.namespace: - fullspace = '%s.%s' % (self.super_namespace, spec.namespace) - if not fullspace in self.by_namespace: - raise UnknownPackageError( - "No configured repository contains package %s." % spec.fullname) - return self.by_namespace[fullspace].get(spec) - else: - return self.repo_for_pkg(spec.name).get(spec) + return self.repo_for_pkg(spec).get(spec) def dirname_for_package_name(self, pkg_name): @@ -310,7 +351,7 @@ class Repo(object): """ # Root directory, containing _repo.yaml and package dirs # Allow roots to by spack-relative by starting with '$spack' - self.root = substitute_spack_prefix(root) + self.root = canonicalize_path(root) # super-namespace for all packages in the Repo self.super_namespace = namespace @@ -330,7 +371,7 @@ class Repo(object): # Read configuration and validate namespace config = self._read_config() check('namespace' in config, '%s must define a namespace.' - % join_path(self.root, repo_config_name)) + % join_path(root, repo_config_name)) self.namespace = config['namespace'] check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace), @@ -524,13 +565,22 @@ class Repo(object): return [p for p in self.all_packages() if p.extends(extendee_spec)] - def dirname_for_package_name(self, pkg_name): + def _check_namespace(self, spec): + """Check that the spec's namespace is the same as this repository's.""" + if spec.namespace and spec.namespace != self.namespace: + raise UnknownNamespaceError(spec.namespace) + + + @_autospec + def dirname_for_package_name(self, spec): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - return join_path(self.packages_path, pkg_name) + self._check_namespace(spec) + return join_path(self.packages_path, spec.name) - def filename_for_package_name(self, pkg_name): + @_autospec + def filename_for_package_name(self, spec): """Get the filename for the module we should load for a particular package. Packages for a Repo live in ``$root//package.py`` @@ -539,8 +589,8 @@ class Repo(object): package doesn't exist yet, so callers will need to ensure the package exists before importing. """ - validate_module_name(pkg_name) - pkg_dir = self.dirname_for_package_name(pkg_name) + self._check_namespace(spec) + pkg_dir = self.dirname_for_package_name(spec.name) return join_path(pkg_dir, package_file_name) @@ -679,6 +729,13 @@ class UnknownPackageError(PackageLoadError): self.name = name +class UnknownNamespaceError(PackageLoadError): + """Raised when we encounter an unknown namespace""" + def __init__(self, namespace): + super(UnknownNamespaceError, self).__init__( + "Unknown namespace: %s" % namespace) + + class FailedConstructorError(PackageLoadError): """Raised when a package's class constructor fails.""" def __init__(self, name, exc_type, exc_obj, exc_tb): diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py index 26ca86c77f..5025f15027 100644 --- a/lib/spack/spack/util/naming.py +++ b/lib/spack/spack/util/naming.py @@ -8,11 +8,15 @@ from StringIO import StringIO import spack __all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name', + 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name', 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie'] # Valid module names can contain '-' but can't start with it. _valid_module_re = r'^\w[\w-]*$' +# Valid module names can contain '-' but can't start with it. +_valid_fully_qualified_module_re = r'^(\w[\w-]*)(\.\w[\w-]*)*$' + def mod_to_class(mod_name): """Convert a name from module style to class name style. Spack mostly @@ -75,16 +79,27 @@ def possible_spack_module_names(python_mod_name): def valid_module_name(mod_name): - """Return whether the mod_name is valid for use in Spack.""" + """Return whether mod_name is valid for use in Spack.""" return bool(re.match(_valid_module_re, mod_name)) +def valid_fully_qualified_module_name(mod_name): + """Return whether mod_name is a valid namespaced module name.""" + return bool(re.match(_valid_fully_qualified_module_re, mod_name)) + + def validate_module_name(mod_name): """Raise an exception if mod_name is not valid.""" if not valid_module_name(mod_name): raise InvalidModuleNameError(mod_name) +def validate_fully_qualified_module_name(mod_name): + """Raise an exception if mod_name is not a valid namespaced module name.""" + if not valid_fully_qualified_module_name(mod_name): + raise InvalidFullyQualifiedModuleNameError(mod_name) + + class InvalidModuleNameError(spack.error.SpackError): """Raised when we encounter a bad module name.""" def __init__(self, name): @@ -93,6 +108,14 @@ class InvalidModuleNameError(spack.error.SpackError): self.name = name +class InvalidFullyQualifiedModuleNameError(spack.error.SpackError): + """Raised when we encounter a bad full package name.""" + def __init__(self, name): + super(InvalidFullyQualifiedModuleNameError, self).__init__( + "Invalid fully qualified package name: " + name) + self.name = name + + class NamespaceTrie(object): class Element(object): def __init__(self, value): -- cgit v1.2.3-70-g09d2