diff options
26 files changed, 666 insertions, 386 deletions
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 5e42860f3e..9ecb709110 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -34,8 +34,8 @@ from llnl.util.filesystem import mkdirp import spack import spack.cmd import spack.cmd.checksum -import spack.package import spack.url +import spack.util.web from spack.util.naming import * import spack.util.crypto as crypto @@ -166,7 +166,7 @@ def create(parser, args): tty.msg("This looks like a URL for %s version %s." % (name, version)) tty.msg("Creating template for package %s" % name) - versions = spack.package.find_versions_of_archive(url) + versions = spack.util.web.find_versions_of_archive(url) rkeys = sorted(versions.keys(), reverse=True) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py new file mode 100644 index 0000000000..077c793d2e --- /dev/null +++ b/lib/spack/spack/cmd/url-parse.py @@ -0,0 +1,75 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys + +import llnl.util.tty as tty + +import spack +import spack.url +from spack.util.web import find_versions_of_archive + +description = "Show parsing of a URL, optionally spider web for other versions." + +def setup_parser(subparser): + subparser.add_argument('url', help="url of a package archive") + subparser.add_argument( + '-s', '--spider', action='store_true', help="Spider the source page for versions.") + + +def print_name_and_version(url): + name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url) + underlines = [" "] * max(ns+nl, vs+vl) + for i in range(ns, ns+nl): + underlines[i] = '-' + for i in range(vs, vs+vl): + underlines[i] = '~' + + print " %s" % url + print " %s" % ''.join(underlines) + + +def url_parse(parser, args): + url = args.url + + ver, vs, vl = spack.url.parse_version_offset(url) + name, ns, nl = spack.url.parse_name_offset(url, ver) + + tty.msg("Parsing URL:") + try: + print_name_and_version(url) + except spack.url.UrlParseError as e: + tty.error(str(e)) + + print + tty.msg("Substituting version 9.9.9b:") + newurl = spack.url.substitute_version(url, '9.9.9b') + print_name_and_version(newurl) + + if args.spider: + print + tty.msg("Spidering for versions:") + versions = find_versions_of_archive(url) + for v in sorted(versions): + print "%-20s%s" % (v, versions[v]) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index a9374fb34b..0657146bf6 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -687,7 +687,7 @@ def for_package_version(pkg, version): class FetchError(spack.error.SpackError): - def __init__(self, msg, long_msg): + def __init__(self, msg, long_msg=None): super(FetchError, self).__init__(msg, long_msg) @@ -705,7 +705,7 @@ class NoArchiveFileError(FetchError): class NoDigestError(FetchError): - def __init__(self, msg, long_msg): + def __init__(self, msg, long_msg=None): super(NoDigestError, self).__init__(msg, long_msg) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index b95afb073d..84bcb15f7f 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -733,9 +733,10 @@ class Package(object): # Construct paths to special files in the archive dir used to # keep track of whether patches were successfully applied. - archive_dir = self.stage.source_path - good_file = join_path(archive_dir, '.spack_patched') - bad_file = join_path(archive_dir, '.spack_patch_failed') + archive_dir = self.stage.source_path + good_file = join_path(archive_dir, '.spack_patched') + no_patches_file = join_path(archive_dir, '.spack_no_patches') + bad_file = join_path(archive_dir, '.spack_patch_failed') # If we encounter an archive that failed to patch, restage it # so that we can apply all the patches again. @@ -749,29 +750,46 @@ class Package(object): if os.path.isfile(good_file): tty.msg("Already patched %s" % self.name) return + elif os.path.isfile(no_patches_file): + tty.msg("No patches needed for %s." % self.name) + return # Apply all the patches for specs that match this one + patched = False for spec, patch_list in self.patches.items(): if self.spec.satisfies(spec): for patch in patch_list: - tty.msg('Applying patch %s' % patch.path_or_url) try: patch.apply(self.stage) + tty.msg('Applied patch %s' % patch.path_or_url) + patched = True except: # Touch bad file if anything goes wrong. + tty.msg('Patch %s failed.' % patch.path_or_url) touch(bad_file) raise - # patch succeeded. Get rid of failed file & touch good file so we - # don't try to patch again again next time. + if has_patch_fun: + try: + self.patch() + tty.msg("Ran patch() for %s." % self.name) + patched = True + except: + tty.msg("patch() function failed for %s." % self.name) + touch(bad_file) + raise + + # Get rid of any old failed file -- patches have either succeeded + # or are not needed. This is mostly defensive -- it's needed + # if the restage() method doesn't clean *everything* (e.g., for a repo) if os.path.isfile(bad_file): os.remove(bad_file) - touch(good_file) - - if has_patch_fun: - self.patch() - tty.msg("Patched %s" % self.name) + # touch good or no patches file so that we skip next time. + if patched: + touch(good_file) + else: + touch(no_patches_file) def do_fake_install(self): @@ -1164,7 +1182,7 @@ class Package(object): raise VersionFetchError(self.__class__) try: - return find_versions_of_archive( + return spack.util.web.find_versions_of_archive( *self.all_urls, list_url=self.list_url, list_depth=self.list_depth) except spack.error.NoNetworkConnectionError, e: tty.die("Package.fetch_versions couldn't connect to:", @@ -1188,49 +1206,6 @@ class Package(object): return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) -def find_versions_of_archive(*archive_urls, **kwargs): - list_url = kwargs.get('list_url', None) - list_depth = kwargs.get('list_depth', 1) - - # Generate a list of list_urls based on archive urls and any - # explicitly listed list_url in the package - list_urls = set() - if list_url: - list_urls.add(list_url) - for aurl in archive_urls: - list_urls.add(spack.url.find_list_url(aurl)) - - # Grab some web pages to scrape. - page_map = {} - for lurl in list_urls: - pages = spack.util.web.get_pages(lurl, depth=list_depth) - page_map.update(pages) - - # Scrape them for archive URLs - regexes = [] - for aurl in archive_urls: - # This creates a regex from the URL with a capture group for - # the version part of the URL. The capture group is converted - # to a generic wildcard, so we can use this to extract things - # on a page that look like archive URLs. - url_regex = spack.url.wildcard_version(aurl) - - # We'll be a bit more liberal and just look for the archive - # part, not the full path. - regexes.append(os.path.basename(url_regex)) - - # Build a version list from all the matches we find - versions = {} - for page_url, content in page_map.iteritems(): - # extract versions from matches. - for regex in regexes: - versions.update( - (Version(m.group(1)), urljoin(page_url, m.group(0))) - for m in re.finditer(regex, content)) - - return versions - - def validate_package_url(url_string): """Determine whether spack can handle a particular URL or not.""" url = urlparse(url_string) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 754344fc01..76ca7273cb 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -82,14 +82,18 @@ class Stage(object): stage object later). If name is not provided, then this stage will be given a unique name automatically. """ + # TODO: fetch/stage coupling needs to be reworked -- the logic + # TODO: here is convoluted and not modular enough. if isinstance(url_or_fetch_strategy, basestring): self.fetcher = fs.from_url(url_or_fetch_strategy) elif isinstance(url_or_fetch_strategy, fs.FetchStrategy): self.fetcher = url_or_fetch_strategy else: raise ValueError("Can't construct Stage without url or fetch strategy") - self.fetcher.set_stage(self) + self.default_fetcher = self.fetcher # self.fetcher can change with mirrors. + self.skip_checksum_for_mirror = True # used for mirrored archives of repositories. + self.name = kwargs.get('name') self.mirror_path = kwargs.get('mirror_path') @@ -198,17 +202,18 @@ class Stage(object): @property def archive_file(self): """Path to the source archive within this stage directory.""" - if not isinstance(self.fetcher, fs.URLFetchStrategy): - return None + paths = [] + if isinstance(self.fetcher, fs.URLFetchStrategy): + paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url))) - paths = [os.path.join(self.path, os.path.basename(self.fetcher.url))] if self.mirror_path: paths.append(os.path.join(self.path, os.path.basename(self.mirror_path))) for path in paths: if os.path.exists(path): return path - return None + else: + return None @property @@ -238,23 +243,34 @@ class Stage(object): """Downloads an archive or checks out code from a repository.""" self.chdir() - fetchers = [self.fetcher] + fetchers = [self.default_fetcher] # TODO: move mirror logic out of here and clean it up! + # TODO: Or @alalazo may have some ideas about how to use a + # TODO: CompositeFetchStrategy here. + self.skip_checksum_for_mirror = True if self.mirror_path: urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()] + # If this archive is normally fetched from a tarball URL, + # then use the same digest. `spack mirror` ensures that + # the checksum will be the same. digest = None - if isinstance(self.fetcher, fs.URLFetchStrategy): - digest = self.fetcher.digest - fetchers = [fs.URLFetchStrategy(url, digest) - for url in urls] + fetchers - for f in fetchers: - f.set_stage(self) + if isinstance(self.default_fetcher, fs.URLFetchStrategy): + digest = self.default_fetcher.digest + + # Have to skip the checkesum for things archived from + # repositories. How can this be made safer? + self.skip_checksum_for_mirror = not bool(digest) + + for url in urls: + fetchers.insert(0, fs.URLFetchStrategy(url, digest)) for fetcher in fetchers: try: - fetcher.fetch() + fetcher.set_stage(self) + self.fetcher = fetcher + self.fetcher.fetch() break except spack.error.SpackError, e: tty.msg("Fetching from %s failed." % fetcher) @@ -262,13 +278,22 @@ class Stage(object): continue else: errMessage = "All fetchers failed for %s" % self.name + self.fetcher = self.default_fetcher raise fs.FetchError(errMessage, None) def check(self): """Check the downloaded archive against a checksum digest. No-op if this stage checks code out of a repository.""" - self.fetcher.check() + if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror: + tty.warn("Fetching from mirror without a checksum!", + "This package is normally checked out from a version " + "control system, but it has been archived on a spack " + "mirror. This means we cannot know a checksum for the " + "tarball in advance. Be sure that your connection to " + "this mirror is secure!.") + else: + self.fetcher.check() def expand_archive(self): diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index 94384e9c86..e26daef296 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -23,6 +23,7 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import re +import os import sys import subprocess import urllib2, cookielib @@ -70,7 +71,9 @@ def _spider(args): """ url, visited, root, opener, depth, max_depth, raise_on_error = args - pages = {} + pages = {} # dict from page URL -> text content. + links = set() # set of all links seen on visited pages. + try: # Make a HEAD request first to check the content type. This lets # us ignore tarballs and gigantic files. @@ -99,42 +102,45 @@ def _spider(args): page = response.read() pages[response_url] = page - # If we're not at max depth, parse out the links in the page - if depth < max_depth: - link_parser = LinkParser() - subcalls = [] - link_parser.feed(page) - - while link_parser.links: - raw_link = link_parser.links.pop() + # Parse out the links in the page + link_parser = LinkParser() + subcalls = [] + link_parser.feed(page) - # Skip stuff that looks like an archive - if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): - continue + while link_parser.links: + raw_link = link_parser.links.pop() + abs_link = urlparse.urljoin(response_url, raw_link) - # Evaluate the link relative to the page it came from. - abs_link = urlparse.urljoin(response_url, raw_link) + links.add(abs_link) - # Skip things outside the root directory - if not abs_link.startswith(root): - continue + # Skip stuff that looks like an archive + if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES): + continue - # Skip already-visited links - if abs_link in visited: - continue + # Skip things outside the root directory + if not abs_link.startswith(root): + continue - subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error)) - visited.add(abs_link) + # Skip already-visited links + if abs_link in visited: + continue - if subcalls: - try: - pool = Pool(processes=len(subcalls)) - dicts = pool.map(_spider, subcalls) - for d in dicts: - pages.update(d) - finally: - pool.terminate() - pool.join() + # If we're not at max depth, follow links. + if depth < max_depth: + subcalls.append((abs_link, visited, root, None, + depth+1, max_depth, raise_on_error)) + visited.add(abs_link) + + if subcalls: + try: + pool = Pool(processes=len(subcalls)) + results = pool.map(_spider, subcalls) + for sub_pages, sub_links in results: + pages.update(sub_pages) + links.update(sub_links) + finally: + pool.terminate() + pool.join() except urllib2.URLError, e: tty.debug(e) @@ -155,10 +161,10 @@ def _spider(args): # Other types of errors are completely ignored, except in debug mode. tty.debug("Error in _spider: %s" % e) - return pages + return pages, links -def get_pages(root_url, **kwargs): +def spider(root_url, **kwargs): """Gets web pages from a root URL. If depth is specified (e.g., depth=2), then this will also fetches pages linked from the root and its children up to depth. @@ -167,5 +173,69 @@ def get_pages(root_url, **kwargs): performance over a sequential fetch. """ max_depth = kwargs.setdefault('depth', 1) - pages = _spider((root_url, set(), root_url, None, 1, max_depth, False)) - return pages + pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False)) + return pages, links + + +def find_versions_of_archive(*archive_urls, **kwargs): + """Scrape web pages for new versions of a tarball. + + Arguments: + archive_urls: + URLs for different versions of a package. Typically these + are just the tarballs from the package file itself. By + default, this searches the parent directories of archives. + + Keyword Arguments: + list_url: + + URL for a listing of archives. Spack wills scrape these + pages for download links that look like the archive URL. + + list_depth: + Max depth to follow links on list_url pages. + + """ + list_url = kwargs.get('list_url', None) + list_depth = kwargs.get('list_depth', 1) + + # Generate a list of list_urls based on archive urls and any + # explicitly listed list_url in the package + list_urls = set() + if list_url: + list_urls.add(list_url) + for aurl in archive_urls: + list_urls.add(spack.url.find_list_url(aurl)) + + # Grab some web pages to scrape. + pages = {} + links = set() + for lurl in list_urls: + p, l = spider(lurl, depth=list_depth) + pages.update(p) + links.update(l) + + # Scrape them for archive URLs + regexes = [] + for aurl in archive_urls: + # This creates a regex from the URL with a capture group for + # the version part of the URL. The capture group is converted + # to a generic wildcard, so we can use this to extract things + # on a page that look like archive URLs. + url_regex = spack.url.wildcard_version(aurl) + + # We'll be a bit more liberal and just look for the archive + # part, not the full path. + regexes.append(os.path.basename(url_regex)) + + # Build a dict version -> URL from any links that match the wildcards. + versions = {} + for url in links: + if any(re.search(r, url) for r in regexes): + try: + ver = spack.url.parse_version(url) + versions[ver] = url + except spack.url.UndetectableVersionError as e: + continue + + return versions diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh index 47202f6087..586a5b836b 100755 --- a/share/spack/setup-env.sh +++ b/share/spack/setup-env.sh @@ -55,13 +55,11 @@ # avoids the need to come up with a user-friendly naming scheme for # spack dotfiles. ######################################################################## + function spack { # save raw arguments into an array before butchering them - args=() - for a in "$@"; do - # yup, this is awful, blame bash2 compat - args=("${args[@]}" "$a") - done + args=( "$@" ) + # accumulate initial flags for main spack command _sp_flags="" while [[ "$1" =~ ^- ]]; do diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py index 81dadbbf61..9b7c22c53d 100644 --- a/var/spack/packages/boost/package.py +++ b/var/spack/packages/boost/package.py @@ -14,6 +14,7 @@ class Boost(Package): list_url = "http://sourceforge.net/projects/boost/files/boost/" list_depth = 2 + version('1.60.0', '65a840e1a0b13a558ff19eeb2c4f0cbe') version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87') version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546') version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76') @@ -48,11 +49,11 @@ class Boost(Package): variant('mpi', default=False, description='Activate the component Boost.MPI') variant('compression', default=True, description='Activate the compression Boost.iostreams') - depends_on('mpi', when='+mpi') depends_on('python', when='+python') - depends_on('zlib', when='+compression') + depends_on('mpi', when='+mpi') depends_on('bzip2', when='+compression') - + depends_on('zlib', when='+compression') + def url_for_version(self, version): """Handle Boost's weird URLs, which write the version two different ways.""" parts = [str(p) for p in Version(version)] @@ -61,20 +62,23 @@ class Boost(Package): return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % ( dots, underscores) - def determine_toolset(self): - toolsets = {'gcc': 'gcc', + def determine_toolset(self, spec): + if spec.satisfies("=darwin-x86_64"): + return 'darwin' + + toolsets = {'g++': 'gcc', 'icpc': 'intel', 'clang++': 'clang'} for cc, toolset in toolsets.iteritems(): - if(cc in self.compiler.cxx_names): + if cc in self.compiler.cxx_names: return toolset # fallback to gcc if no toolset found return 'gcc' def determine_bootstrap_options(self, spec, options): - options.append('--with-toolset=%s' % self.determine_toolset()) + options.append('--with-toolset=%s' % self.determine_toolset(spec)) without_libs = [] if '~mpi' in spec: @@ -82,17 +86,20 @@ class Boost(Package): if '~python' in spec: without_libs.append('python') else: - options.append('--with-python=%s' % (spec['python'].prefix.bin + '/python')) + options.append('--with-python=%s' % + join_path(spec['python'].prefix.bin, 'python')) if without_libs: options.append('--without-libraries=%s' % ','.join(without_libs)) with open('user-config.jam', 'w') as f: if '+mpi' in spec: - f.write('using mpi : %s ;\n' % (spec['mpi'].prefix.bin + '/mpicxx')) + f.write('using mpi : %s ;\n' % + joinpath(spec['mpi'].prefix.bin, 'mpicxx')) if '+python' in spec: - f.write('using python : %s : %s ;\n' % (spec['python'].version, - (spec['python'].prefix.bin + '/python'))) + f.write('using python : %s : %s ;\n' % + (spec['python'].version, + joinpath(spec['python'].prefix.bin, 'python'))) def determine_b2_options(self, spec, options): if '+debug' in spec: @@ -101,22 +108,26 @@ class Boost(Package): options.append('variant=release') if '~compression' in spec: - options.extend(['-s NO_BZIP2=1', - '-s NO_ZLIB=1', - ]) + options.extend([ + '-s', 'NO_BZIP2=1', + '-s', 'NO_ZLIB=1']) if '+compression' in spec: - options.extend(['-s BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include, - '-s BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib, - '-s ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include, - '-s ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib]) - - options.extend(['toolset=%s' % self.determine_toolset(), - 'link=static,shared', - '--layout=tagged']) + options.extend([ + '-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include, + '-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib, + '-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include, + '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib, + ]) + + options.extend([ + 'toolset=%s' % self.determine_toolset(spec), + 'link=static,shared', + 'threading=single,multi', + '--layout=tagged']) def install(self, spec, prefix): - # to make him find the user-config.jam + # to make Boost find the user-config.jam env['BOOST_BUILD_PATH'] = './' bootstrap = Executable('./bootstrap.sh') @@ -130,9 +141,8 @@ class Boost(Package): b2name = './b2' if spec.satisfies('@1.47:') else './bjam' b2 = Executable(b2name) - b2_options = ['-j %s' % make_jobs] + b2_options = ['-j', '%s' % make_jobs] self.determine_b2_options(spec, b2_options) - b2('install', 'threading=single', *b2_options) - b2('install', 'threading=multi', *b2_options) + b2('install', *b2_options) diff --git a/var/spack/packages/cube/package.py b/var/spack/packages/cube/package.py index cc1c684594..17d388c33d 100644 --- a/var/spack/packages/cube/package.py +++ b/var/spack/packages/cube/package.py @@ -1,58 +1,55 @@ -# FIXME: Add copyright statement +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. # +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + from spack import * -from contextlib import closing + class Cube(Package): - """Cube the profile viewer for Score-P and Scalasca profiles. It - displays a multi-dimensional performance space consisting - of the dimensions (i) performance metric, (ii) call path, - and (iii) system resource.""" + """ + Cube the profile viewer for Score-P and Scalasca profiles. It displays a multi-dimensional performance space + consisting of the dimensions: + - performance metric + - call path + - system resource + """ homepage = "http://www.scalasca.org/software/cube-4.x/download.html" - url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" + url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz" version('4.3.3', '07e109248ed8ffc7bdcce614264a2909', url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz') - version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20') - - version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f', - url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz') - - # Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt - # to guess a matching C compiler when configuring scorep-score - backend_user_provided = """\ -CC=cc -CXX=CC -F77=f77 -FC=f90 -#CFLAGS=-fPIC -#CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=CC -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -""" + version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20', + url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz") - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - - with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) + # TODO : add variant that builds GUI on top of Qt + def install(self, spec, prefix): configure_args = ["--prefix=%s" % prefix, - "--with-custom-compilers", - "--without-paraver", + "--without-paraver", "--without-gui"] - configure(*configure_args) - make(parallel=False) make("install", parallel=False) diff --git a/var/spack/packages/fftw/package.py b/var/spack/packages/fftw/package.py index 5f71762c4f..4d2b964242 100644 --- a/var/spack/packages/fftw/package.py +++ b/var/spack/packages/fftw/package.py @@ -39,54 +39,21 @@ class Fftw(Package): version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3') - ########## - # Floating point precision - FLOAT = 'float' - LONG_DOUBLE = 'long_double' - QUAD_PRECISION = 'quad' - PRECISION_OPTIONS = { - FLOAT: '--enable-float', - LONG_DOUBLE: '--enable--long-double', - QUAD_PRECISION: '--enable-quad-precision' - } - variant(FLOAT, default=False, description='Produces a single precision version of the library') - variant(LONG_DOUBLE, default=False, description='Produces a long double precision version of the library') - variant(QUAD_PRECISION, default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)') - ########## + variant('float', default=True, description='Produces a single precision version of the library') + variant('long_double', default=True, description='Produces a long double precision version of the library') + variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)') variant('mpi', default=False, description='Activate MPI support') depends_on('mpi', when='+mpi') - @staticmethod - def enabled(x): - """ - Given a variant name returns the string that means the variant is enabled - - :param x: variant name - """ - # FIXME : duplicated from MVAPICH2 - return '+' + x - - def check_fortran_availability(self, options): - if not self.compiler.f77 or not self.compiler.fc: - options.append("--disable-fortran") - - def set_floating_point_precision(self, spec, options): - l = [option for variant, option in Fftw.PRECISION_OPTIONS.iteritems() if self.enabled(variant) in spec] - if len(l) > 1: - raise RuntimeError('At most one floating point precision variant may activated per build.') - options.extend(l) - def install(self, spec, prefix): - options = ['--prefix=%s' % prefix, '--enable-shared', '--enable-threads', '--enable-openmp'] - self.check_fortran_availability(options) - self.set_floating_point_precision(spec, options) - + if not self.compiler.f77 or not self.compiler.fc: + options.append("--disable-fortran") if '+mpi' in spec: options.append('--enable-mpi') @@ -94,3 +61,15 @@ class Fftw(Package): make() make("install") + if '+float' in spec: + configure('--enable-float', *options) + make() + make("install") + if '+long_double' in spec: + configure('--enable-long-double', *options) + make() + make("install") + if '+quad' in spec: + configure('--enable-quad-precision', *options) + make() + make("install") diff --git a/var/spack/packages/hwloc/package.py b/var/spack/packages/hwloc/package.py index 452a7d7ce3..7ebede76a3 100644 --- a/var/spack/packages/hwloc/package.py +++ b/var/spack/packages/hwloc/package.py @@ -15,6 +15,8 @@ class Hwloc(Package): homepage = "http://www.open-mpi.org/projects/hwloc/" url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz" + version('1.11.2', '486169cbe111cdea57be12638828ebbf', + url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.2.tar.bz2') version('1.11.1', '002742efd3a8431f98d6315365a2b543', url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.bz2') version('1.9', '1f9f9155682fe8946a97c08896109508') @@ -26,4 +28,3 @@ class Hwloc(Package): make() make("install") - diff --git a/var/spack/packages/julia/package.py b/var/spack/packages/julia/package.py new file mode 100644 index 0000000000..9fd946c905 --- /dev/null +++ b/var/spack/packages/julia/package.py @@ -0,0 +1,66 @@ +from spack import * +import os + +class Julia(Package): + """The Julia Language: A fresh approach to technical computing""" + homepage = "http://julialang.org" + url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz" + + version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06') + + # Build-time dependencies + # depends_on("cmake") + # depends_on("awk") + # depends_on("m4") + # depends_on("pkg-config") + + # I think that Julia requires the dependencies above, but it builds find (on + # my system) without these. We should enable them as necessary. + + # Run-time dependencies + # depends_on("arpack") + # depends_on("fftw +float") + # depends_on("gmp") + # depends_on("mpfr") + # depends_on("pcre2") + + # ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia. + + # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS + # has an option for this; make it available as variant. + + # FFTW: Something doesn't work when using a pre-installed FFTW library; need + # to investigate. + + # GMP, MPFR: Something doesn't work when using a pre-installed FFTW library; + # need to investigate. + + # LLVM: Julia works only with specific versions, and might require patches. + # Thus we let Julia install its own LLVM. + + # Other possible dependencies: + # USE_SYSTEM_OPENLIBM=0 + # USE_SYSTEM_OPENSPECFUN=0 + # USE_SYSTEM_DSFMT=0 + # USE_SYSTEM_SUITESPARSE=0 + # USE_SYSTEM_UTF8PROC=0 + # USE_SYSTEM_LIBGIT2=0 + + def install(self, spec, prefix): + # Explicitly setting CC, CXX, or FC breaks building libuv, one of + # Julia's dependencies. This might be a Darwin-specific problem. Given + # how Spack sets up compilers, Julia should still use Spack's compilers, + # even if we don't specify them explicitly. + options = [#"CC=cc", + #"CXX=c++", + #"FC=fc", + #"USE_SYSTEM_ARPACK=1", + #"USE_SYSTEM_FFTW=1", + #"USE_SYSTEM_GMP=1", + #"USE_SYSTEM_MPFR=1", + #TODO "USE_SYSTEM_PCRE=1", + "prefix=%s" % prefix] + with open('Make.user', 'w') as f: + f.write('\n'.join(options) + '\n') + make() + make("install") diff --git a/var/spack/packages/libedit/package.py b/var/spack/packages/libedit/package.py new file mode 100644 index 0000000000..bcd5212b9e --- /dev/null +++ b/var/spack/packages/libedit/package.py @@ -0,0 +1,14 @@ +from spack import * + +class Libedit(Package): + """An autotools compatible port of the NetBSD editline library""" + homepage = "http://thrysoee.dk/editline/" + url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz" + + version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz") + + def install(self, spec, prefix): + configure('--prefix=%s' % prefix) + + make() + make("install") diff --git a/var/spack/packages/libpciaccess/package.py b/var/spack/packages/libpciaccess/package.py index 6022fc34a3..403bafbbd2 100644 --- a/var/spack/packages/libpciaccess/package.py +++ b/var/spack/packages/libpciaccess/package.py @@ -1,4 +1,5 @@ from spack import * +import os.path class Libpciaccess(Package): """Generic PCI access library.""" @@ -13,6 +14,12 @@ class Libpciaccess(Package): depends_on('libtool') def install(self, spec, prefix): + # libpciaccess does not support OS X + if spec.satisfies('=darwin-x86_64'): + # create a dummy directory + mkdir(prefix.lib) + return + from subprocess import call call(["./autogen.sh"]) configure("--prefix=%s" % prefix) diff --git a/var/spack/packages/ninja/package.py b/var/spack/packages/ninja/package.py new file mode 100644 index 0000000000..9e6bf4e358 --- /dev/null +++ b/var/spack/packages/ninja/package.py @@ -0,0 +1,22 @@ +from spack import * +import os + +class Ninja(Package): + """ A small, fast Make alternative """ + homepage = "https://martine.github.io/ninja/" + url = "https://github.com/martine/ninja/archive/v1.6.0.tar.gz" + + version('1.6.0', '254133059f2da79d8727f654d7198f43') + + extends('python') + + def install(self, spec, prefix): + sh = which('sh') + python('configure.py', '--bootstrap') + + cp = which('cp') + + bindir = os.path.join(prefix, 'bin') + mkdir(bindir) + cp('-a', '-t', bindir, 'ninja') + cp('-ra', 'misc', prefix) diff --git a/var/spack/packages/opari2/package.py b/var/spack/packages/opari2/package.py index 3f8c65377d..c68978f5c0 100644 --- a/var/spack/packages/opari2/package.py +++ b/var/spack/packages/opari2/package.py @@ -1,18 +1,38 @@ -# FIXME: Add copyright statement here +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * -from contextlib import closing class Opari2(Package): - """OPARI2 is a source-to-source instrumentation tool for OpenMP and - hybrid codes. It surrounds OpenMP directives and runtime library - calls with calls to the POMP2 measurement interface. - OPARI2 will provide you with a new initialization method that allows - for multi-directory and parallel builds as well as the usage of - pre-instrumented libraries. Furthermore, an efficient way of - tracking parent-child relationships was added. Additionally, we - extended OPARI2 to support instrumentation of OpenMP 3.0 - tied tasks. """ + """ + OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid codes. It surrounds OpenMP directives and + runtime library calls with calls to the POMP2 measurement interface. OPARI2 will provide you with a new + initialization method that allows for multi-directory and parallel builds as well as the usage of pre-instrumented + libraries. Furthermore, an efficient way of tracking parent-child relationships was added. Additionally, we extended + OPARI2 to support instrumentation of OpenMP 3.0 tied tasks. + """ homepage = "http://www.vi-hps.org/projects/score-p" url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz" @@ -21,47 +41,8 @@ class Opari2(Package): url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz') version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e') - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=mpicc -MPICXX=mpicxx -MPIF77=mpif77 -MPIFC=mpif90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - with closing(open("platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) - with closing(open("platform-mpi-user-provided", "w")) as mpi_file: - mpi_file.write(self.mpi_user_provided) - - # FIXME: Modify the configure line to suit your build system here. configure("--prefix=%s" % prefix, - "--with-custom-compilers", "--enable-shared") - - # FIXME: Add logic to build and install here make() make("install") diff --git a/var/spack/packages/otf2/package.py b/var/spack/packages/otf2/package.py index b3d3a5b663..c3d61bc228 100644 --- a/var/spack/packages/otf2/package.py +++ b/var/spack/packages/otf2/package.py @@ -1,12 +1,35 @@ -# FIXME: Add copyright +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * -from contextlib import closing -import os + class Otf2(Package): - """The Open Trace Format 2 is a highly scalable, memory efficient event - trace data format plus support library.""" + """ + The Open Trace Format 2 is a highly scalable, memory efficient event trace data format plus support library. + """ homepage = "http://www.vi-hps.org/score-p" url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz" @@ -22,57 +45,11 @@ class Otf2(Package): version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8', url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz") - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=cc -MPICXX=c++ -MPIF77=f77 -MPIFC=f90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" - - @when('@:1.2.1') - def version_specific_args(self): - return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"] - - @when('@1.3:') - def version_specific_args(self): - # TODO: figure out what scorep's build does as of otf2 1.3 - return ["--with-custom-compilers"] - def install(self, spec, prefix): - # Use a custom compiler configuration, otherwise the score-p - # build system messes with spack's compiler settings. - # Create these three files in the build directory - with closing(open("platform-backend-user-provided", "w")) as backend_file: - backend_file.write(self.backend_user_provided) - with closing(open("platform-frontend-user-provided", "w")) as frontend_file: - frontend_file.write(self.frontend_user_provided) - with closing(open("platform-mpi-user-provided", "w")) as mpi_file: - mpi_file.write(self.mpi_user_provided) - configure_args=["--prefix=%s" % prefix, - "--enable-shared"] - - configure_args.extend(self.version_specific_args()) - + "--enable-shared", + "CFLAGS=-fPIC", + "CXXFLAGS=-fPIC"] configure(*configure_args) - make() make("install") diff --git a/var/spack/packages/pcre2/package.py b/var/spack/packages/pcre2/package.py new file mode 100644 index 0000000000..6a0244a15e --- /dev/null +++ b/var/spack/packages/pcre2/package.py @@ -0,0 +1,15 @@ +from spack import * + +class Pcre2(Package): + """The PCRE2 package contains Perl Compatible Regular Expression + libraries. These are useful for implementing regular expression + pattern matching using the same syntax and semantics as Perl 5.""" + homepage = "http://www.pcre.org""" + url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre2-10.20.tar.bz2" + + version('10.20', 'dcd027c57ecfdc8a6c3af9d0acf5e3f7') + + def install(self, spec, prefix): + configure("--prefix=%s" % prefix) + make() + make("install") diff --git a/var/spack/packages/py-blessings/package.py b/var/spack/packages/py-blessings/package.py new file mode 100644 index 0000000000..f2475a0efd --- /dev/null +++ b/var/spack/packages/py-blessings/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyBlessings(Package): + """A nicer, kinder way to write to the terminal """ + homepage = "https://github.com/erikrose/blessings" + url = "https://pypi.python.org/packages/source/b/blessings/blessings-1.6.tar.gz" + + version('1.6', '4f552a8ebcd4982693c92571beb99394') + + depends_on('py-setuptools') + + extends("python") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-coverage/package.py b/var/spack/packages/py-coverage/package.py new file mode 100644 index 0000000000..39b2ac3b01 --- /dev/null +++ b/var/spack/packages/py-coverage/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyCoverage(Package): + """ Testing coverage checker for python """ + # FIXME: add a proper url for your package's homepage here. + homepage = "http://nedbatchelder.com/code/coverage/" + url = "https://pypi.python.org/packages/source/c/coverage/coverage-4.0a6.tar.gz" + + version('4.0a6', '1bb4058062646148965bef0796b61efc') + + depends_on('py-setuptools') + + extends('python') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-mysqldb1/package.py b/var/spack/packages/py-mysqldb1/package.py new file mode 100644 index 0000000000..fda02b4982 --- /dev/null +++ b/var/spack/packages/py-mysqldb1/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyMysqldb1(Package): + """Legacy mysql bindings for python""" + homepage = "https://github.com/farcepest/MySQLdb1" + url = "https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz" + + version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/packages/py-tappy/package.py b/var/spack/packages/py-tappy/package.py new file mode 100644 index 0000000000..df61a909da --- /dev/null +++ b/var/spack/packages/py-tappy/package.py @@ -0,0 +1,15 @@ +from spack import * + +class PyTappy(Package): + """Python TAP interface module for unit tests""" + homepage = "https://github.com/mblayman/tappy" + # base https://pypi.python.org/pypi/cffi + url = "https://pypi.python.org/packages/source/t/tap.py/tap.py-1.6.tar.gz" + + version('1.6', 'c8bdb93ad66e05f939905172a301bedf') + + extends('python') + depends_on('py-setuptools') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/packages/py-urwid/package.py b/var/spack/packages/py-urwid/package.py new file mode 100644 index 0000000000..aaa11c681d --- /dev/null +++ b/var/spack/packages/py-urwid/package.py @@ -0,0 +1,16 @@ +from spack import * + +class PyUrwid(Package): + """A full-featured console UI library""" + homepage = "http://urwid.org/" + url = "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.0.tar.gz" + + version('1.3.0', 'a989acd54f4ff1a554add464803a9175') + + depends_on('py-setuptools') + + extends("python") + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s' % prefix) + diff --git a/var/spack/packages/rsync/package.py b/var/spack/packages/rsync/package.py index 8ae21b1cb9..76aec3096d 100644 --- a/var/spack/packages/rsync/package.py +++ b/var/spack/packages/rsync/package.py @@ -5,6 +5,7 @@ class Rsync(Package): homepage = "https://rsync.samba.org" url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz" + version('3.1.2', '0f758d7e000c0f7f7d3792610fad70cb') version('3.1.1', '43bd6676f0b404326eee2d63be3cdcfe') def install(self, spec, prefix): diff --git a/var/spack/packages/scalasca/package.py b/var/spack/packages/scalasca/package.py index cf7a40c1f5..6de14564b2 100644 --- a/var/spack/packages/scalasca/package.py +++ b/var/spack/packages/scalasca/package.py @@ -1,65 +1,63 @@ -# FIXME: Add copyright +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * + class Scalasca(Package): - """Scalasca is a software tool that supports the performance optimization - of parallel programs by measuring and analyzing their runtime behavior. - The analysis identifies potential performance bottlenecks - in - particular those concerning communication and synchronization - and - offers guidance in exploring their causes.""" + """ + Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and + analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those + concerning communication and synchronization - and offers guidance in exploring their causes. + """ - # FIXME: add a proper url for your package's homepage here. homepage = "http://www.scalasca.org" - url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" + url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz" - version('2.1', 'bab9c2b021e51e2ba187feec442b96e6', - url = 'http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz' ) + version('2.2.2', '2bafce988b0522d18072f7771e491ab9', + url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.2/dist/scalasca-2.2.2.tar.gz') - depends_on("mpi") - depends_on("otf2@1.4") - depends_on("cube@4.2.3") + version('2.1', 'bab9c2b021e51e2ba187feec442b96e6', + url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz') - backend_user_provided = """\ -CC=cc -CXX=c++ -F77=f77 -FC=f90 -CFLAGS=-fPIC -CXXFLAGS=-fPIC -""" - frontend_user_provided = """\ -CC_FOR_BUILD=cc -CXX_FOR_BUILD=c++ -F77_FOR_BUILD=f70 -FC_FOR_BUILD=f90 -CFLAGS_FOR_BUILD=-fPIC -CXXFLAGS_FOR_BUILD=-fPIC -""" - mpi_user_provided = """\ -MPICC=mpicc -MPICXX=mpicxx -MPIF77=mpif77 -MPIFC=mpif90 -MPI_CFLAGS=-fPIC -MPI_CXXFLAGS=-fPIC -""" + depends_on("mpi") + ########## + # Hard-code dependencies for Scalasca according to what stated in the release page + # The OTF2 library path should be detected automatically from SCOREP + # SCALASCA 2.2.2 + depends_on("scorep@1.4:", when='@2.2.2') + depends_on("cube@4.3:", when='@2.2.2') + # SCALASCA 2.1 + depends_on("scorep@1.3", when='@2.1') + depends_on("cube@4.2:", when='@2.1') + ########## def install(self, spec, prefix): configure_args = ["--prefix=%s" % prefix, - "--with-custom-compilers", - "--with-otf2=%s" % spec['otf2'].prefix.bin, "--with-cube=%s" % spec['cube'].prefix.bin, "--enable-shared"] - configure(*configure_args) - - make() - make("install") - - # FIXME: Modify the configure line to suit your build system here. - configure("--prefix=%s" % prefix) - - # FIXME: Add logic to build and install here make() - make("install") + make("install")
\ No newline at end of file diff --git a/var/spack/packages/scorep/package.py b/var/spack/packages/scorep/package.py index 0820f2d8ac..5127e814b6 100644 --- a/var/spack/packages/scorep/package.py +++ b/var/spack/packages/scorep/package.py @@ -28,9 +28,9 @@ from spack import * class Scorep(Package): """ - The Score-P measurement infrastructure is a highly scalable and - easy-to-use tool suite for profiling, event tracing, and online - analysis of HPC applications.""" + The Score-P measurement infrastructure is a highly scalable and easy-to-use tool suite for profiling, event + tracing, and online analysis of HPC applications. + """ homepage = "http://www.vi-hps.org/projects/score-p" url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz" @@ -55,15 +55,6 @@ class Scorep(Package): depends_on("mpi") depends_on("papi") - def get_compiler_config_line(self): - backend_user_provided = ['CC=%s' % self.compiler.cc_names[0], - 'CXX=%s' % self.compiler.cxx_names[0], - 'F77=%s' % self.compiler.f77_names[0] if len(self.compiler.f77_names) else "", - 'FC=%s' % self.compiler.fc_names[0] if len(self.compiler.fc_names) else "", - 'CFLAGS=-fPIC %s' % self.rpath_args, - 'CXXFLAGS=-fPIC %s'% self.rpath_args] - return backend_user_provided - def install(self, spec, prefix): configure = Executable( join_path(self.stage.source_path, 'configure') ) with working_dir('spack-build', create=True): @@ -73,8 +64,9 @@ class Scorep(Package): "--with-cube=%s" % spec['cube'].prefix.bin, "--with-papi-header=%s" % spec['papi'].prefix.include, "--with-papi-lib=%s" % spec['papi'].prefix.lib, - "--enable-shared"] - configure_args.extend(self.get_compiler_config_line()) + "--enable-shared", + "CFLAGS=-fPIC", + "CXXFLAGS=-fPIC"] configure(*configure_args) make() make("install") |