diff options
31 files changed, 773 insertions, 178 deletions
@@ -150,7 +150,7 @@ def main(): sys.stderr.write('\n') tty.die("Keyboard interrupt.") - # Allow commands to return values if they want to exit with some ohter code. + # Allow commands to return values if they want to exit with some other code. if return_val is None: sys.exit(0) elif isinstance(return_val, int): diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py index cc87b7eac2..c84828951d 100644 --- a/lib/spack/llnl/util/lang.py +++ b/lib/spack/llnl/util/lang.py @@ -112,7 +112,7 @@ def partition_list(elements, predicate): def caller_locals(): """This will return the locals of the *parent* of the caller. - This allows a fucntion to insert variables into its caller's + This allows a function to insert variables into its caller's scope. Yes, this is some black magic, and yes it's useful for implementing things like depends_on and provides. """ diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 02eeed01fa..92cb417a85 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -24,6 +24,7 @@ ############################################################################## import os import tempfile +import getpass from llnl.util.filesystem import * # This lives in $prefix/lib/spack/spack/__file__ @@ -111,9 +112,17 @@ use_tmp_stage = True # that it can create. tmp_dirs = [] _default_tmp = tempfile.gettempdir() -if _default_tmp != os.getcwd(): - tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage')) -tmp_dirs.append('/nfs/tmp2/%u/spack-stage') +_tmp_user = getpass.getuser() + +_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp') +for path in _tmp_candidates: + # don't add a second username if it's already unique by user. + if not _tmp_user in path: + tmp_dirs.append(join_path(path, '%u', 'spack-stage')) + +for path in _tmp_candidates: + if not path in tmp_dirs: + tmp_dirs.append(join_path(path, 'spack-stage')) # Whether spack should allow installation of unsafe versions of # software. "Unsafe" versions are ones it doesn't have a checksum diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 20154a55b3..84c2f6015b 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -145,7 +145,7 @@ def set_build_environment_variables(pkg): # Install root prefix os.environ[SPACK_INSTALL] = spack.install_path - # Remove these vars from the environment during build becaus they + # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH") diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 5e42860f3e..9ecb709110 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -34,8 +34,8 @@ from llnl.util.filesystem import mkdirp import spack import spack.cmd import spack.cmd.checksum -import spack.package import spack.url +import spack.util.web from spack.util.naming import * import spack.util.crypto as crypto @@ -166,7 +166,7 @@ def create(parser, args): tty.msg("This looks like a URL for %s version %s." % (name, version)) tty.msg("Creating template for package %s" % name) - versions = spack.package.find_versions_of_archive(url) + versions = spack.util.web.find_versions_of_archive(url) rkeys = sorted(versions.keys(), reverse=True) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py new file mode 100644 index 0000000000..077c793d2e --- /dev/null +++ b/lib/spack/spack/cmd/url-parse.py @@ -0,0 +1,75 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys + +import llnl.util.tty as tty + +import spack +import spack.url +from spack.util.web import find_versions_of_archive + +description = "Show parsing of a URL, optionally spider web for other versions." + +def setup_parser(subparser): + subparser.add_argument('url', help="url of a package archive") + subparser.add_argument( + '-s', '--spider', action='store_true', help="Spider the source page for versions.") + + +def print_name_and_version(url): + name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url) + underlines = [" "] * max(ns+nl, vs+vl) + for i in range(ns, ns+nl): + underlines[i] = '-' + for i in range(vs, vs+vl): + underlines[i] = '~' + + print " %s" % url + print " %s" % ''.join(underlines) + + +def url_parse(parser, args): + url = args.url + + ver, vs, vl = spack.url.parse_version_offset(url) + name, ns, nl = spack.url.parse_name_offset(url, ver) + + tty.msg("Parsing URL:") + try: + print_name_and_version(url) + except spack.url.UrlParseError as e: + tty.error(str(e)) + + print + tty.msg("Substituting version 9.9.9b:") + newurl = spack.url.substitute_version(url, '9.9.9b') + print_name_and_version(newurl) + + if args.spider: + print + tty.msg("Spidering for versions:") + versions = find_versions_of_archive(url) + for v in sorted(versions): + print "%-20s%s" % (v, versions[v]) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index d9419da784..85cdb202d5 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -40,7 +40,6 @@ import spack.error from spack.version import * - class DefaultConcretizer(object): """This class doesn't have any state, it just provides some methods for concretization. You can subclass it to override just some of the @@ -68,9 +67,17 @@ class DefaultConcretizer(object): # If there are known available versions, return the most recent # version that satisfies the spec pkg = spec.package + + # Key function to sort versions first by whether they were + # marked `preferred=True`, then by most recent. + def preferred_key(v): + prefer = pkg.versions[v].get('preferred', False) + return (prefer, v) + valid_versions = sorted( [v for v in pkg.versions - if any(v.satisfies(sv) for sv in spec.versions)]) + if any(v.satisfies(sv) for sv in spec.versions)], + key=preferred_key) if valid_versions: spec.versions = ver([valid_versions[-1]]) diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 2a818e8d0c..aa9fbd8d33 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -42,15 +42,19 @@ The available directives are: * ``extends`` * ``patch`` * ``variant`` + * ``resource`` """ -__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version', - 'variant' ] +__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version', + 'variant', 'resource'] import re import inspect +import os.path +import functools from llnl.util.lang import * +from llnl.util.filesystem import join_path import spack import spack.spec @@ -60,7 +64,8 @@ from spack.version import Version from spack.patch import Patch from spack.variant import Variant from spack.spec import Spec, parse_anonymous_spec - +from spack.resource import Resource +from spack.fetch_strategy import from_kwargs # # This is a list of all directives, built up as they are defined in @@ -79,8 +84,8 @@ class directive(object): """Decorator for Spack directives. Spack directives allow you to modify a package while it is being - defined, e.g. to add version or depenency information. Directives - are one of the key pieces of Spack's package "langauge", which is + defined, e.g. to add version or dependency information. Directives + are one of the key pieces of Spack's package "language", which is embedded in python. Here's an example directive: @@ -141,6 +146,7 @@ class directive(object): def __call__(self, directive_function): directives[directive_function.__name__] = self + @functools.wraps(directive_function) def wrapped(*args, **kwargs): pkg = DictWrapper(caller_locals()) self.ensure_dicts(pkg) @@ -259,6 +265,43 @@ def variant(pkg, name, default=False, description=""): pkg.variants[name] = Variant(default, description) +@directive('resources') +def resource(pkg, **kwargs): + """ + Define an external resource to be fetched and staged when building the package. Based on the keywords present in the + dictionary the appropriate FetchStrategy will be used for the resource. Resources are fetched and staged in their + own folder inside spack stage area, and then linked into the stage area of the package that needs them. + + List of recognized keywords: + + * 'when' : (optional) represents the condition upon which the resource is needed + * 'destination' : (optional) path where to link the resource. This path must be relative to the main package stage + area. + * 'placement' : (optional) gives the possibility to fine tune how the resource is linked into the main package stage + area. + """ + when = kwargs.get('when', pkg.name) + destination = kwargs.get('destination', "") + placement = kwargs.get('placement', None) + # Check if the path is relative + if os.path.isabs(destination): + message = "The destination keyword of a resource directive can't be an absolute path.\n" + message += "\tdestination : '{dest}\n'".format(dest=destination) + raise RuntimeError(message) + # Check if the path falls within the main package stage area + test_path = 'stage_folder_root/' + normalized_destination = os.path.normpath(join_path(test_path, destination)) # Normalized absolute path + if test_path not in normalized_destination: + message = "The destination folder of a resource must fall within the main package stage directory.\n" + message += "\tdestination : '{dest}'\n".format(dest=destination) + raise RuntimeError(message) + when_spec = parse_anonymous_spec(when, pkg.name) + resources = pkg.resources.setdefault(when_spec, []) + fetcher = from_kwargs(**kwargs) + name = kwargs.get('name') + resources.append(Resource(name, fetcher, destination, placement)) + + class DirectiveError(spack.error.SpackError): """This is raised when something is wrong with a package directive.""" def __init__(self, directive, message): diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 8c4bd4ed8a..0657146bf6 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -634,6 +634,22 @@ def from_url(url): return URLFetchStrategy(url) +def from_kwargs(**kwargs): + """ + Construct the appropriate FetchStrategy from the given keyword arguments. + + :param kwargs: dictionary of keyword arguments + :return: fetcher or raise a FetchError exception + """ + for fetcher in all_strategies: + if fetcher.matches(kwargs): + return fetcher(**kwargs) + # Raise an error in case we can't instantiate any known strategy + message = "Cannot instantiate any FetchStrategy" + long_message = message + " from the given arguments : {arguments}".format(srguments=kwargs) + raise FetchError(message, long_message) + + def args_are_for(args, fetcher): fetcher.matches(args) @@ -671,7 +687,7 @@ def for_package_version(pkg, version): class FetchError(spack.error.SpackError): - def __init__(self, msg, long_msg): + def __init__(self, msg, long_msg=None): super(FetchError, self).__init__(msg, long_msg) @@ -689,7 +705,7 @@ class NoArchiveFileError(FetchError): class NoDigestError(FetchError): - def __init__(self, msg, long_msg): + def __init__(self, msg, long_msg=None): super(NoDigestError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 6fbf82de14..1d9b0e7ef2 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -26,7 +26,7 @@ This file contains code for creating spack mirror directories. A mirror is an organized hierarchy containing specially named archive files. This enabled spack to know where to find files in a mirror if -the main server for a particualr package is down. Or, if the computer +the main server for a particular package is down. Or, if the computer where spack is run is not connected to the internet, it allows spack to download packages directly from a mirror (e.g., on an intranet). """ @@ -42,7 +42,7 @@ import spack.fetch_strategy as fs from spack.spec import Spec from spack.stage import Stage from spack.version import * -from spack.util.compression import extension +from spack.util.compression import extension, allowed_archive def mirror_archive_filename(spec): @@ -87,11 +87,26 @@ def get_matching_versions(specs, **kwargs): if v.satisfies(spec.versions): s = Spec(pkg.name) s.versions = VersionList([v]) + s.variants = spec.variants.copy() matching.append(s) return matching +def suggest_archive_basename(resource): + """ + Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types. + + :param fetcher: + :return: + """ + basename = os.path.basename(resource.fetcher.url) + if not allowed_archive(basename): + raise RuntimeError("%s is not an allowed archive tye" % basename) + return basename + + + def create(path, specs, **kwargs): """Create a directory to be used as a spack mirror, and fill it with package archives. @@ -108,7 +123,7 @@ def create(path, specs, **kwargs): Return Value: Returns a tuple of lists: (present, mirrored, error) - * present: Package specs that were already prsent. + * present: Package specs that were already present. * mirrored: Package specs that were successfully mirrored. * error: Package specs that failed to mirror due to some error. @@ -140,6 +155,7 @@ def create(path, specs, **kwargs): error = [] # Iterate through packages and download all the safe tarballs for each of them + everything_already_exists = True for spec in version_specs: pkg = spec.package @@ -152,26 +168,47 @@ def create(path, specs, **kwargs): if os.path.exists(archive_path): tty.msg("Already added %s" % spec.format("$_$@")) + else: + everything_already_exists = False + # Set up a stage and a fetcher for the download + unique_fetch_name = spec.format("$_$@") + fetcher = fs.for_package_version(pkg, pkg.version) + stage = Stage(fetcher, name=unique_fetch_name) + fetcher.set_stage(stage) + + # Do the fetch and checksum if necessary + fetcher.fetch() + if not kwargs.get('no_checksum', False): + fetcher.check() + tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version)) + + # Fetchers have to know how to archive their files. Use + # that to move/copy/create an archive in the mirror. + fetcher.archive(archive_path) + tty.msg("Added %s." % spec.format("$_$@")) + + # Fetch resources if they are associated with the spec + resources = pkg._get_resources() + for resource in resources: + resource_archive_path = join_path(subdir, suggest_archive_basename(resource)) + if os.path.exists(resource_archive_path): + tty.msg("Already added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version)) + continue + everything_already_exists = False + resource_stage_folder = pkg._resource_stage(resource) + resource_stage = Stage(resource.fetcher, name=resource_stage_folder) + resource.fetcher.set_stage(resource_stage) + resource.fetcher.fetch() + if not kwargs.get('no_checksum', False): + resource.fetcher.check() + tty.msg("Checksum passed for the resource %s (%s@%s)" % (resource.name, pkg.name, pkg.version)) + resource.fetcher.archive(resource_archive_path) + tty.msg("Added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version)) + + if everything_already_exists: present.append(spec) - continue - - # Set up a stage and a fetcher for the download - unique_fetch_name = spec.format("$_$@") - fetcher = fs.for_package_version(pkg, pkg.version) - stage = Stage(fetcher, name=unique_fetch_name) - fetcher.set_stage(stage) - - # Do the fetch and checksum if necessary - fetcher.fetch() - if not kwargs.get('no_checksum', False): - fetcher.check() - tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version)) - - # Fetchers have to know how to archive their files. Use - # that to move/copy/create an archive in the mirror. - fetcher.archive(archive_path) - tty.msg("Added %s." % spec.format("$_$@")) - mirrored.append(spec) + else: + mirrored.append(spec) except Exception, e: if spack.debug: diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 4d75726e06..84bcb15f7f 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -655,26 +655,62 @@ class Package(object): "Will not fetch %s." % self.spec.format('$_$@'), checksum_msg) self.stage.fetch() + + ########## + # Fetch resources + resources = self._get_resources() + for resource in resources: + resource_stage_folder = self._resource_stage(resource) + # FIXME : works only for URLFetchStrategy + resource_mirror = join_path(self.name, os.path.basename(resource.fetcher.url)) + resource_stage = Stage(resource.fetcher, name=resource_stage_folder, mirror_path=resource_mirror) + resource.fetcher.set_stage(resource_stage) + # Delegate to stage object to trigger mirror logic + resource_stage.fetch() + resource_stage.check() + ########## + self._fetch_time = time.time() - start_time if spack.do_checksum and self.version in self.versions: self.stage.check() - def do_stage(self): """Unpacks the fetched tarball, then changes into the expanded tarball directory.""" if not self.spec.concrete: raise ValueError("Can only stage concrete packages.") - self.do_fetch() + def _expand_archive(stage, name=self.name): + archive_dir = stage.source_path + if not archive_dir: + stage.expand_archive() + tty.msg("Created stage in %s." % stage.path) + else: + tty.msg("Already staged %s in %s." % (name, stage.path)) - archive_dir = self.stage.source_path - if not archive_dir: - self.stage.expand_archive() - tty.msg("Created stage in %s." % self.stage.path) - else: - tty.msg("Already staged %s in %s." % (self.name, self.stage.path)) + + self.do_fetch() + _expand_archive(self.stage) + + ########## + # Stage resources in appropriate path + resources = self._get_resources() + for resource in resources: + stage = resource.fetcher.stage + _expand_archive(stage, resource.name) + # Turn placement into a dict with relative paths + placement = os.path.basename(stage.source_path) if resource.placement is None else resource.placement + if not isinstance(placement, dict): + placement = {'': placement} + # Make the paths in the dictionary absolute and link + for key, value in placement.iteritems(): + link_path = join_path(self.stage.source_path, resource.destination, value) + source_path = join_path(stage.source_path, key) + if not os.path.exists(link_path): + # Create a symlink + os.symlink(source_path, link_path) + ########## self.stage.chdir_to_source() @@ -697,9 +733,10 @@ class Package(object): # Construct paths to special files in the archive dir used to # keep track of whether patches were successfully applied. - archive_dir = self.stage.source_path - good_file = join_path(archive_dir, '.spack_patched') - bad_file = join_path(archive_dir, '.spack_patch_failed') + archive_dir = self.stage.source_path + good_file = join_path(archive_dir, '.spack_patched') + no_patches_file = join_path(archive_dir, '.spack_no_patches') + bad_file = join_path(archive_dir, '.spack_patch_failed') # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. @@ -713,29 +750,46 @@ class Package(object): if os.path.isfile(good_file): tty.msg("Already patched %s" % self.name) return + elif os.path.isfile(no_patches_file): + tty.msg("No patches needed for %s." % self.name) + return # Apply all the patches for specs that match this one + patched = False for spec, patch_list in self.patches.items(): if self.spec.satisfies(spec): for patch in patch_list: - tty.msg('Applying patch %s' % patch.path_or_url) try: patch.apply(self.stage) + tty.msg('Applied patch %s' % patch.path_or_url) + patched = True except: # Touch bad file if anything goes wrong. + tty.msg('Patch %s failed.' % patch.path_or_url) touch(bad_file) raise - # patch succeeded. Get rid of failed file & touch good file so we - # don't try to patch again again next time. + if has_patch_fun: + try: + self.patch() + tty.msg("Ran patch() for %s." % self.name) + patched = True + except: + tty.msg("patch() function failed for %s." % self.name) + touch(bad_file) + raise + + # Get rid of any old failed file -- patches have either succeeded + # or are not needed. This is mostly defensive -- it's needed + # if the restage() method doesn't clean *everything* (e.g., for a repo) if os.path.isfile(bad_file): os.remove(bad_file) - touch(good_file) - - if has_patch_fun: - self.patch() - tty.msg("Patched %s" % self.name) + # touch good or no patches file so that we skip next time. + if patched: + touch(good_file) + else: + touch(no_patches_file) def do_fake_install(self): @@ -746,6 +800,19 @@ class Package(object): mkdirp(self.prefix.man1) + def _get_resources(self): + resources = [] + # Select the resources that are needed for this build + for when_spec, resource_list in self.resources.items(): + if when_spec in self.spec: + resources.extend(resource_list) + return resources + + def _resource_stage(self, resource): + pieces = ['resource', resource.name, self.spec.dag_hash()] + resource_stage_folder = '-'.join(pieces) + return resource_stage_folder + def _build_logger(self, log_path): """Create a context manager to log build output.""" @@ -1115,7 +1182,7 @@ class Package(object): raise VersionFetchError(self.__class__) try: - return find_versions_of_archive( + return spack.util.web.find_versions_of_archive( *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) except spack.error.NoNetworkConnectionError, e: tty.die("Package.fetch_versions couldn't connect to:", @@ -1139,49 +1206,6 @@ class Package(object): return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) -def find_versions_of_archive(*archive_urls, **kwargs): - list_url = kwargs.get('list_url', None) - list_depth = kwargs.get('list_depth', 1) - - # Generate a list of list_urls based on archive urls and any - # explicitly listed list_url in the package - list_urls = set() - if list_url: - list_urls.add(list_url) - for aurl in archive_urls: - list_urls.add(spack.url.find_list_url(aurl)) - - # Grab some web pages to scrape. - page_map = {} - for lurl in list_urls: - pages = spack.util.web.get_pages(lurl, depth=list_depth) - page_map.update(pages) - - # Scrape them for archive URLs - regexes = [] - for aurl in archive_urls: - # This creates a regex from the URL with a capture group for - # the version part of the URL. The capture group is converted - # to a generic wildcard, so we can use this to extract things - # on a page that look like archive URLs. - url_regex = spack.url.wildcard_version(aurl) - - # We'll be a bit more liberal and just look for the archive - # part, not the full path. - regexes.append(os.path.basename(url_regex)) - - # Build a version list from all the matches we find - versions = {} - for page_url, content in page_map.iteritems(): - # extract versions from matches. - for regex in regexes: - versions.update( - (Version(m.group(1)), urljoin(page_url, m.group(0))) - for m in re.finditer(regex, content)) - - return versions - - def validate_package_url(url_string): """Determine whether spack can handle a particular URL or not.""" url = urlparse(url_string) diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py new file mode 100644 index 0000000000..8d081b45c9 --- /dev/null +++ b/lib/spack/spack/resource.py @@ -0,0 +1,39 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +""" +Describes an optional resource needed for a build. Typically a bunch of sources that can be built in-tree within another +package to enable optional features. +""" + + +class Resource(object): + """ + Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement + """ + def __init__(self, name, fetcher, destination, placement): + self.name = name + self.fetcher = fetcher + self.destination = destination + self.placement = placement diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 754344fc01..76ca7273cb 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -82,14 +82,18 @@ class Stage(object): stage object later). If name is not provided, then this stage will be given a unique name automatically. """ + # TODO: fetch/stage coupling needs to be reworked -- the logic + # TODO: here is convoluted and not modular enough. if isinstance(url_or_fetch_strategy, basestring): self.fetcher = fs.from_url(url_or_fetch_strategy) elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): self.fetcher = url_or_fetch_strategy else: raise ValueError("Can't construct Stage without url or fetch strategy") - self.fetcher.set_stage(self) + self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. + self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. + self.name = kwargs.get('name') self.mirror_path = kwargs.get('mirror_path') @@ -198,17 +202,18 @@ class Stage(object): @property def archive_file(self): """Path to the source archive within this stage directory.""" - if not isinstance(self.fetcher, fs.URLFetchStrategy): - return None + paths = [] + if isinstance(self.fetcher, fs.URLFetchStrategy): + paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) - paths = [os.path.join(self.path, os.path.basename(self.fetcher.url))] if self.mirror_path: paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) for path in paths: if os.path.exists(path): return path - return None + else: + return None @property @@ -238,23 +243,34 @@ class Stage(object): """Downloads an archive or checks out code from a repository.""" self.chdir() - fetchers = [self.fetcher] + fetchers = [self.default_fetcher] # TODO: move mirror logic out of here and clean it up! + # TODO: Or @alalazo may have some ideas about how to use a + # TODO: CompositeFetchStrategy here. + self.skip_checksum_for_mirror = True if self.mirror_path: urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] + # If this archive is normally fetched from a tarball URL, + # then use the same digest. `spack mirror` ensures that + # the checksum will be the same. digest = None - if isinstance(self.fetcher, fs.URLFetchStrategy): - digest = self.fetcher.digest - fetchers = [fs.URLFetchStrategy(url, digest) - for url in urls] + fetchers - for f in fetchers: - f.set_stage(self) + if isinstance(self.default_fetcher, fs.URLFetchStrategy): + digest = self.default_fetcher.digest + + # Have to skip the checkesum for things archived from + # repositories. How can this be made safer? + self.skip_checksum_for_mirror = not bool(digest) + + for url in urls: + fetchers.insert(0, fs.URLFetchStrategy(url, digest)) for fetcher in fetchers: try: - fetcher.fetch() + fetcher.set_stage(self) + self.fetcher = fetcher + self.fetcher.fetch() break except spack.error.SpackError, e: tty.msg("Fetching from %s failed." % fetcher) @@ -262,13 +278,22 @@ class Stage(object): continue else: errMessage = "All fetchers failed for %s" % self.name + self.fetcher = self.default_fetcher raise fs.FetchError(errMessage, None) def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" - self.fetcher.check() + if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror: + tty.warn("Fetching from mirror without a checksum!", + "This package is normally checked out from a version " + "control system, but it has been archived on a spack " + "mirror. This means we cannot know a checksum for the " + "tarball in advance. Be sure that your connection to " + "this mirror is secure!.") + else: + self.fetcher.check() def expand_archive(self): diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 94384e9c86..e26daef296 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import re +import os import sys import subprocess import urllib2, cookielib @@ -70,7 +71,9 @@ def _spider(args): """ url, visited, root, opener, depth, max_depth, raise_on_error = args - pages = {} + pages = {} # dict from page URL -> text content. + links = set() # set of all links seen on visited pages. + try: # Make a HEAD request first to check the content type. This lets # us ignore tarballs and gigantic files. @@ -99,42 +102,45 @@ def _spider(args): page = response.read() pages[response_url] = page - # If we're not at max depth, parse out the links in the page - if depth < max_depth: - link_parser = LinkParser() - subcalls = [] - link_parser.feed(page) - - while link_parser.links: - raw_link = link_parser.links.pop() + # Parse out the links in the page + link_parser = LinkParser() + subcalls = [] + link_parser.feed(page) - # Skip stuff that looks like an archive - if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): - continue + while link_parser.links: + raw_link = link_parser.links.pop() + abs_link = urlparse.urljoin(response_url, raw_link) - # Evaluate the link relative to the page it came from. - abs_link = urlparse.urljoin(response_url, raw_link) + links.add(abs_link) - # Skip things outside the root directory - if not abs_link.startswith(root): - continue + # Skip stuff that looks like an archive + if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): + continue - # Skip already-visited links - if abs_link in visited: - continue + # Skip things outside the root directory + if not abs_link.startswith(root): + continue - subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error)) - visited.add(abs_link) + # Skip already-visited links + if abs_link in visited: + continue - if subcalls: - try: - pool = Pool(processes=len(subcalls)) - dicts = pool.map(_spider, subcalls) - for d in dicts: - pages.update(d) - finally: - pool.terminate() - pool.join() + # If we're not at max depth, follow links. + if depth < max_depth: + subcalls.append((abs_link, visited, root, None, + depth+1, max_depth, raise_on_error)) + visited.add(abs_link) + + if subcalls: + try: + pool = Pool(processes=len(subcalls)) + results = pool.map(_spider, subcalls) + for sub_pages, sub_links in results: + pages.update(sub_pages) + links.update(sub_links) + finally: + pool.terminate() + pool.join() except urllib2.URLError, e: tty.debug(e) @@ -155,10 +161,10 @@ def _spider(args): # Other types of errors are completely ignored, except in debug mode. tty.debug("Error in _spider: %s" % e) - return pages + return pages, links -def get_pages(root_url, **kwargs): +def spider(root_url, **kwargs): """Gets web pages from a root URL. If depth is specified (e.g., depth=2), then this will also fetches pages linked from the root and its children up to depth. @@ -167,5 +173,69 @@ def get_pages(root_url, **kwargs): performance over a sequential fetch. """ max_depth = kwargs.setdefault('depth', 1) - pages = _spider((root_url, set(), root_url, None, 1, max_depth, False)) - return pages + pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False)) + return pages, links + + +def find_versions_of_archive(*archive_urls, **kwargs): + """Scrape web pages for new versions of a tarball. + + Arguments: + archive_urls: + URLs for different versions of a package. Typically these + are just the tarballs from the package file itself. By + default, this searches the parent directories of archives. + + Keyword Arguments: + list_url: + + URL for a listing of archives. Spack wills scrape these + pages for download links that look like the archive URL. + + list_depth: + Max depth to follow links on list_url pages. + + """ + list_url = kwargs.get('list_url', None) + list_depth = kwargs.get('list_depth', 1) + + # Generate a list of list_urls based on archive urls and any + # explicitly listed list_url in the package + list_urls = set() + if list_url: + list_urls.add(list_url) + for aurl in archive_urls: + list_urls.add(spack.url.find_list_url(aurl)) + + # Grab some web pages to scrape. + pages = {} + links = set() + for lurl in list_urls: + p, l = spider(lurl, depth=list_depth) + pages.update(p) + links.update(l) + + # Scrape them for archive URLs + regexes = [] + for aurl in archive_urls: + # This creates a regex from the URL with a capture group for + # the version part of the URL. The capture group is converted + # to a generic wildcard, so we can use this to extract things + # on a page that look like archive URLs. + url_regex = spack.url.wildcard_version(aurl) + + # We'll be a bit more liberal and just look for the archive + # part, not the full path. + regexes.append(os.path.basename(url_regex)) + + # Build a dict version -> URL from any links that match the wildcards. + versions = {} + for url in links: + if any(re.search(r, url) for r in regexes): + try: + ver = spack.url.parse_version(url) + versions[ver] = url + except spack.url.UndetectableVersionError as e: + continue + + return versions diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 47202f6087..6503728d8f 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -55,13 +55,11 @@ # avoids the need to come up with a user-friendly naming scheme for # spack dotfiles. ######################################################################## + function spack { # save raw arguments into an array before butchering them - args=() - for a in "$@"; do - # yup, this is awful, blame bash2 compat - args=("${args[@]}" "$a") - done + declare -a args=( "$@" ) + # accumulate initial flags for main spack command _sp_flags="" while [[ "$1" =~ ^- ]]; do diff --git a/var/spack/packages/clang/package.py b/var/spack/packages/clang/package.py index 20a5ac2c94..e46e08d5f1 100644 --- a/var/spack/packages/clang/package.py +++ b/var/spack/packages/clang/package.py @@ -22,8 +22,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## + + from spack import * +import os +import os.path + class Clang(Package): """The goal of the Clang project is to create a new C, C++, Objective C and Objective C++ front-end for the LLVM compiler. @@ -39,13 +44,52 @@ class Clang(Package): version('3.6.2', 'ff862793682f714bb7862325b9c06e20', url='http://llvm.org/releases/3.6.2/cfe-3.6.2.src.tar.xz') version('3.5.1', '93f9532f8f7e6f1d8e5c1116907051cb', url='http://llvm.org/releases/3.5.1/cfe-3.5.1.src.tar.xz') + ########## + # @3.7.0 + resource(name='clang-tools-extra', + url='http://llvm.org/releases/3.7.0/clang-tools-extra-3.7.0.src.tar.xz', + md5='d5a87dacb65d981a427a536f6964642e', destination='tools', when='@3.7.0') + ########## + def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag with working_dir('spack-build', create=True): + + options = [] + if '@3.7.0:' in spec: + options.append('-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp') + options.extend(std_cmake_args) + cmake('..', - '-DCLANG_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix, - '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix, - *std_cmake_args) + '-DCLANG_PATH_TO_LLVM_BUILD:PATH=%s' % spec['llvm'].prefix, + '-DLLVM_MAIN_SRC_DIR:PATH=%s' % spec['llvm'].prefix, + *options) make() make("install") + # CLang doesn't look in llvm folders for system headers... + self.link_llvm_directories(spec) + + def link_llvm_directories(self, spec): + + def clang_include_dir_at(root): + return join_path(root, 'include') + + def clang_lib_dir_at(root): + return join_path(root, 'lib/clang/', str(self.version), 'include') + + def do_link(source_dir, destination_dir): + if os.path.exists(source_dir): + for name in os.listdir(source_dir): + source = join_path(source_dir, name) + link = join_path(destination_dir, name) + os.symlink(source, link) + + # Link folder and files in include + llvm_dir = clang_include_dir_at(spec['llvm'].prefix) + clang_dir = clang_include_dir_at(self.prefix) + do_link(llvm_dir, clang_dir) + # Link folder and files in lib + llvm_dir = clang_lib_dir_at(spec['llvm'].prefix) + clang_dir = clang_lib_dir_at(self.prefix) + do_link(llvm_dir, clang_dir)
\ No newline at end of file diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py index 44d4ede278..adac79d9bb 100644 --- a/var/spack/packages/hdf5/package.py +++ b/var/spack/packages/hdf5/package.py @@ -26,16 +26,14 @@ class Hdf5(Package): if '+mpi' in spec: extra_args.extend([ "--enable-parallel", - "CC=%s" % spec['mpich'].prefix.bin + "/mpicc", - "CXX=%s" % spec['mpich'].prefix.bin + "/mpic++", + "CC=%s" % spec['mpi'].prefix.bin + "/mpicc", + "CXX=%s" % spec['mpi'].prefix.bin + "/mpic++", ]) configure( "--prefix=%s" % prefix, "--with-zlib=%s" % spec['zlib'].prefix, "--enable-shared", - "CC=%s" % spec['mpi'].prefix.bin + "/mpicc", - "CXX=%s" % spec['mpi'].prefix.bin + "/mpic++", *extra_args) make() diff --git a/var/spack/packages/hwloc/package.py b/var/spack/packages/hwloc/package.py index 452a7d7ce3..7ebede76a3 100644 --- a/var/spack/packages/hwloc/package.py +++ b/var/spack/packages/hwloc/package.py @@ -15,6 +15,8 @@ class Hwloc(Package): homepage = "http://www.open-mpi.org/projects/hwloc/" url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" + version('1.11.2', '486169cbe111cdea57be12638828ebbf', + url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.2.tar.bz2') version('1.11.1', '002742efd3a8431f98d6315365a2b543', url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.bz2') version('1.9', '1f9f9155682fe8946a97c08896109508') @@ -26,4 +28,3 @@ class Hwloc(Package): make() make("install") - diff --git a/var/spack/packages/libedit/package.py b/var/spack/packages/libedit/package.py new file mode 100644 index 0000000000..bcd5212b9e --- /dev/null +++ b/var/spack/packages/libedit/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Libedit(Package): + """An autotools compatible port of the NetBSD editline library""" + homepage = "http://thrysoee.dk/editline/" + url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz" + + version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/packages/libpciaccess/package.py b/var/spack/packages/libpciaccess/package.py index 6022fc34a3..403bafbbd2 100644 --- a/var/spack/packages/libpciaccess/package.py +++ b/var/spack/packages/libpciaccess/package.py @@ -1,4 +1,5 @@ from spack import * +import os.path class Libpciaccess(Package): """Generic PCI access library.""" @@ -13,6 +14,12 @@ class Libpciaccess(Package): depends_on('libtool') def install(self, spec, prefix): + # libpciaccess does not support OS X + if spec.satisfies('=darwin-x86_64'): + # create a dummy directory + mkdir(prefix.lib) + return + from subprocess import call call(["./autogen.sh"]) configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py index b3ca488809..a3307584e0 100644 --- a/var/spack/packages/llvm/package.py +++ b/var/spack/packages/llvm/package.py @@ -42,13 +42,31 @@ class Llvm(Package): depends_on('python@2.7:') + variant('libcxx', default=False, description="Builds the LLVM Standard C++ library targeting C++11") + + ########## + # @3.7.0 + resource(name='compiler-rt', + url='http://llvm.org/releases/3.7.0/compiler-rt-3.7.0.src.tar.xz', md5='383c10affd513026f08936b5525523f5', + destination='projects', when='@3.7.0') + resource(name='openmp', + url='http://llvm.org/releases/3.7.0/openmp-3.7.0.src.tar.xz', md5='f482c86fdead50ba246a1a2b0bbf206f', + destination='projects', when='@3.7.0') + resource(name='libcxx', + url='http://llvm.org/releases/3.7.0/libcxx-3.7.0.src.tar.xz', md5='46aa5175cbe1ad42d6e9c995968e56dd', + destination='projects', placement='libcxx', when='+libcxx@3.7.0') + resource(name='libcxxabi', + url='http://llvm.org/releases/3.7.0/libcxxabi-3.7.0.src.tar.xz', md5='5aa769e2fca79fa5335cfae8f6258772', + destination='projects', placement='libcxxabi', when='+libcxx@3.7.0') + ########## + def install(self, spec, prefix): env['CXXFLAGS'] = self.compiler.cxx11_flag with working_dir('spack-build', create=True): cmake('..', - '-DLLVM_REQUIRES_RTTI=1', - '-DPYTHON_EXECUTABLE=%s/bin/python' % spec['python'].prefix, + '-DLLVM_REQUIRES_RTTI:BOOL=ON', + '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix, *std_cmake_args) make() make("install") diff --git a/var/spack/packages/ninja/package.py b/var/spack/packages/ninja/package.py new file mode 100644 index 0000000000..9e6bf4e358 --- /dev/null +++ b/var/spack/packages/ninja/package.py @@ -0,0 +1,22 @@ +from spack import * +import os + +class Ninja(Package): + """ A small, fast Make alternative """ + homepage = "https://martine.github.io/ninja/" + url = "https://github.com/martine/ninja/archive/v1.6.0.tar.gz" + + version('1.6.0', '254133059f2da79d8727f654d7198f43') + + extends('python') + + def install(self, spec, prefix): + sh = which('sh') + python('configure.py', '--bootstrap') + + cp = which('cp') + + bindir = os.path.join(prefix, 'bin') + mkdir(bindir) + cp('-a', '-t', bindir, 'ninja') + cp('-ra', 'misc', prefix) diff --git a/var/spack/packages/py-blessings/package.py b/var/spack/packages/py-blessings/package.py new file mode 100644 index 0000000000..f2475a0efd --- /dev/null +++ b/var/spack/packages/py-blessings/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyBlessings(Package): + """A nicer, kinder way to write to the terminal """ + homepage = "https://github.com/erikrose/blessings" + url = "https://pypi.python.org/packages/source/b/blessings/blessings-1.6.tar.gz" + + version('1.6', '4f552a8ebcd4982693c92571beb99394') + + depends_on('py-setuptools') + + extends("python") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-coverage/package.py b/var/spack/packages/py-coverage/package.py new file mode 100644 index 0000000000..39b2ac3b01 --- /dev/null +++ b/var/spack/packages/py-coverage/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyCoverage(Package): + """ Testing coverage checker for python """ + # FIXME: add a proper url for your package's homepage here. + homepage = "http://nedbatchelder.com/code/coverage/" + url = "https://pypi.python.org/packages/source/c/coverage/coverage-4.0a6.tar.gz" + + version('4.0a6', '1bb4058062646148965bef0796b61efc') + + depends_on('py-setuptools') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mysqldb1/package.py b/var/spack/packages/py-mysqldb1/package.py new file mode 100644 index 0000000000..fda02b4982 --- /dev/null +++ b/var/spack/packages/py-mysqldb1/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyMysqldb1(Package): + """Legacy mysql bindings for python""" + homepage = "https://github.com/farcepest/MySQLdb1" + url = "https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz" + + version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py index efa109a3e9..4c085fba6e 100644 --- a/var/spack/packages/py-numpy/package.py +++ b/var/spack/packages/py-numpy/package.py @@ -13,13 +13,6 @@ class PyNumpy(Package): depends_on('netlib-blas+fpic') depends_on('netlib-lapack+shared') - def patch(self): - filter_file( - "possible_executables = \['(gfortran|g77|ifort|efl)", - "possible_executables = ['fc", - "numpy/distutils/fcompiler/gnu.py", - "numpy/distutils/fcompiler/intel.py") - def install(self, spec, prefix): with open('site.cfg', 'w') as f: f.write('[DEFAULT]\n') diff --git a/var/spack/packages/py-tappy/package.py b/var/spack/packages/py-tappy/package.py new file mode 100644 index 0000000000..df61a909da --- /dev/null +++ b/var/spack/packages/py-tappy/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyTappy(Package): + """Python TAP interface module for unit tests""" + homepage = "https://github.com/mblayman/tappy" + # base https://pypi.python.org/pypi/cffi + url = "https://pypi.python.org/packages/source/t/tap.py/tap.py-1.6.tar.gz" + + version('1.6', 'c8bdb93ad66e05f939905172a301bedf') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-urwid/package.py b/var/spack/packages/py-urwid/package.py new file mode 100644 index 0000000000..aaa11c681d --- /dev/null +++ b/var/spack/packages/py-urwid/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyUrwid(Package): + """A full-featured console UI library""" + homepage = "http://urwid.org/" + url = "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.0.tar.gz" + + version('1.3.0', 'a989acd54f4ff1a554add464803a9175') + + depends_on('py-setuptools') + + extends("python") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/packages/python/package.py b/var/spack/packages/python/package.py index 31fb8b3d5a..eae2566b57 100644 --- a/var/spack/packages/python/package.py +++ b/var/spack/packages/python/package.py @@ -2,6 +2,7 @@ import os import re from contextlib import closing from llnl.util.lang import match_predicate +from spack.util.environment import * from spack import * import spack @@ -16,22 +17,40 @@ class Python(Package): version('2.7.8', 'd235bdfa75b8396942e360a70487ee00') version('2.7.10', 'c685ef0b8e9f27b5e3db5db12b268ac6') + version('2.7.11', '1dbcc848b4cd8399a8199d000f9f823c', preferred=True) + version('3.5.0', 'd149d2812f10cbe04c042232e7964171') + version('3.5.1', 'e9ea6f2623fffcdd871b7b19113fde80') depends_on("openssl") depends_on("bzip2") depends_on("readline") depends_on("ncurses") depends_on("sqlite") + depends_on("zlib") def install(self, spec, prefix): # Need this to allow python build to find the Python installation. env['PYTHONHOME'] = prefix env['MACOSX_DEPLOYMENT_TARGET'] = '10.6' - # Rest of install is pretty standard. - configure("--prefix=%s" % prefix, + # Rest of install is pretty standard except setup.py needs to be able to read the CPPFLAGS + # and LDFLAGS as it scans for the library and headers to build + configure_args= [ + "--prefix=%s" % prefix, "--with-threads", - "--enable-shared") + "--enable-shared", + "CPPFLAGS=-I%s/include -I%s/include -I%s/include -I%s/include -I%s/include -I%s/include" % ( + spec['openssl'].prefix, spec['bzip2'].prefix, + spec['readline'].prefix, spec['ncurses'].prefix, + spec['sqlite'].prefix, spec['zlib'].prefix), + "LDFLAGS=-L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib" % ( + spec['openssl'].prefix, spec['bzip2'].prefix, + spec['readline'].prefix, spec['ncurses'].prefix, + spec['sqlite'].prefix, spec['zlib'].prefix) + ] + if spec.satisfies('@3:'): + configure_args.append('--without-ensurepip') + configure(*configure_args) make() make("install") @@ -63,7 +82,10 @@ class Python(Package): python('setup.py', 'install', '--prefix=%s' % prefix) """ # Python extension builds can have a global python executable function - module.python = Executable(join_path(spec.prefix.bin, 'python')) + if self.version >= Version("3.0.0") and self.version < Version("4.0.0"): + module.python = Executable(join_path(spec.prefix.bin, 'python3')) + else: + module.python = Executable(join_path(spec.prefix.bin, 'python')) # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs. module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir) @@ -95,7 +117,7 @@ class Python(Package): # Ignore pieces of setuptools installed by other packages. if ext_pkg.name != 'py-setuptools': - patterns.append(r'/site\.pyc?$') + patterns.append(r'/site[^/]*\.pyc?$') patterns.append(r'setuptools\.pth') patterns.append(r'bin/easy_install[^/]*$') patterns.append(r'setuptools.*egg$') diff --git a/var/spack/packages/rsync/package.py b/var/spack/packages/rsync/package.py index 8ae21b1cb9..76aec3096d 100644 --- a/var/spack/packages/rsync/package.py +++ b/var/spack/packages/rsync/package.py @@ -5,6 +5,7 @@ class Rsync(Package): homepage = "https://rsync.samba.org" url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz" + version('3.1.2', '0f758d7e000c0f7f7d3792610fad70cb') version('3.1.1', '43bd6676f0b404326eee2d63be3cdcfe') def install(self, spec, prefix): diff --git a/var/spack/packages/valgrind/package.py b/var/spack/packages/valgrind/package.py new file mode 100644 index 0000000000..0b030d73e9 --- /dev/null +++ b/var/spack/packages/valgrind/package.py @@ -0,0 +1,55 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +from spack import * + + +class Valgrind(Package): + """ + Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can + automatically detect many memory management and threading bugs, and profile your programs in detail. You can also + use Valgrind to build new tools. + + Valgrind is Open Source / Free Software, and is freely available under the GNU General Public License, version 2. + """ + homepage = "http://valgrind.org/" + url = "http://valgrind.org/downloads/valgrind-3.11.0.tar.bz2" + + version('3.11.0', '4ea62074da73ae82e0162d6550d3f129') + version('3.10.1', '60ddae962bc79e7c95cfc4667245707f') + version('3.10.0', '7c311a72a20388aceced1aa5573ce970') + + variant('mpi', default=True, description='Activates MPI support for valgrind') + variant('boost', default=True, description='Activates boost support for valgrind') + + depends_on('mpi', when='+mpi') + depends_on('boost', when='+boost') + + def install(self, spec, prefix): + options = ['--prefix=%s' % prefix, + '--enable-ubsan'] + configure(*options) + make() + make("install") |