summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xlib/spack/env/cc53
-rw-r--r--lib/spack/spack/__init__.py2
-rw-r--r--lib/spack/spack/build_environment.py31
-rw-r--r--lib/spack/spack/cmd/find.py174
-rw-r--r--lib/spack/spack/cmd/install.py3
-rw-r--r--lib/spack/spack/cmd/uninstall.py4
-rw-r--r--lib/spack/spack/compiler.py12
-rw-r--r--lib/spack/spack/compilers/__init__.py6
-rw-r--r--lib/spack/spack/concretize.py55
-rw-r--r--lib/spack/spack/database.py36
-rw-r--r--lib/spack/spack/directives.py51
-rw-r--r--lib/spack/spack/environment.py38
-rw-r--r--lib/spack/spack/fetch_strategy.py240
-rw-r--r--lib/spack/spack/modules.py6
-rw-r--r--lib/spack/spack/multimethod.py4
-rw-r--r--lib/spack/spack/package.py10
-rw-r--r--lib/spack/spack/spec.py390
-rw-r--r--lib/spack/spack/test/cc.py72
-rw-r--r--lib/spack/spack/test/concretize.py18
-rw-r--r--lib/spack/spack/test/modules.py16
-rw-r--r--lib/spack/spack/test/multimethod.py14
-rw-r--r--lib/spack/spack/test/optional_deps.py7
-rw-r--r--lib/spack/spack/test/spec_dag.py6
-rw-r--r--lib/spack/spack/test/spec_semantics.py81
-rw-r--r--lib/spack/spack/test/spec_syntax.py7
-rw-r--r--lib/spack/spack/util/executable.py92
-rw-r--r--lib/spack/spack/variant.py2
-rw-r--r--lib/spack/spack/virtual.py5
-rwxr-xr-xshare/spack/qa/run-flake812
-rw-r--r--var/spack/repos/builtin.mock/packages/multimethod/package.py8
-rw-r--r--var/spack/repos/builtin/packages/boost/package.py2
-rw-r--r--var/spack/repos/builtin/packages/doxygen/package.py23
-rw-r--r--var/spack/repos/builtin/packages/flex/package.py5
-rw-r--r--var/spack/repos/builtin/packages/gcc/package.py91
-rw-r--r--var/spack/repos/builtin/packages/ghostscript/package.py8
-rw-r--r--var/spack/repos/builtin/packages/go-bootstrap/package.py51
-rw-r--r--var/spack/repos/builtin/packages/go/package.py80
-rw-r--r--var/spack/repos/builtin/packages/graphviz/package.py14
-rw-r--r--var/spack/repos/builtin/packages/hub/package.py24
-rw-r--r--var/spack/repos/builtin/packages/libpciaccess/package.py2
-rw-r--r--var/spack/repos/builtin/packages/lua-luaposix/package.py16
-rw-r--r--var/spack/repos/builtin/packages/lua/package.py107
-rw-r--r--var/spack/repos/builtin/packages/netcdf-cxx4/package.py9
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py12
-rw-r--r--var/spack/repos/builtin/packages/openssl/package.py4
-rw-r--r--var/spack/repos/builtin/packages/scotch/package.py64
-rw-r--r--var/spack/repos/builtin/packages/sed/package.py39
-rw-r--r--var/spack/repos/builtin/packages/the_platinum_searcher/package.py21
-rw-r--r--var/spack/repos/builtin/packages/turbomole/package.py76
49 files changed, 1590 insertions, 513 deletions
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index b9b79f83a3..9758b74f37 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -55,7 +55,10 @@ parameters=(
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
-# Debug flag is optional; set to "TRUE" for debug logging:
+# The default compiler flags are passed from these variables:
+# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
+# SPACK_LDFLAGS, SPACK_LDLIBS
+# Debug env var is optional; set to true for debug logging:
# SPACK_DEBUG
# Test command is used to unit test the compiler script.
# SPACK_TEST_COMMAND
@@ -99,21 +102,25 @@ case "$command" in
command="$SPACK_CC"
language="C"
comp="CC"
+ lang_flags=C
;;
c++|CC|g++|clang++|icpc|pgc++|xlc++)
command="$SPACK_CXX"
language="C++"
comp="CXX"
+ lang_flags=CXX
;;
f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
+ lang_flags=F
;;
f77|gfortran|ifort|pgfortran|xlf|nagfor)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
+ lang_flags=F
;;
ld)
mode=ld
@@ -131,7 +138,7 @@ if [[ -z $mode ]]; then
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
mode=vcheck
break
- fi
+ fi
done
fi
@@ -188,6 +195,42 @@ fi
input_command="$@"
args=("$@")
+# Prepend cppflags, cflags, cxxflags, fcflags, fflags, and ldflags
+
+# Add ldflags
+case "$mode" in
+ ld|ccld)
+ args=(${SPACK_LDFLAGS[@]} "${args[@]}") ;;
+esac
+
+# Add compiler flags.
+case "$mode" in
+ cc|ccld)
+ # Add c, cxx, fc, and f flags
+ case $lang_flags in
+ C)
+ args=(${SPACK_CFLAGS[@]} "${args[@]}") ;;
+ CXX)
+ args=(${SPACK_CXXFLAGS[@]} "${args[@]}") ;;
+ esac
+ ;;
+esac
+
+# Add cppflags
+case "$mode" in
+ cpp|as|cc|ccld)
+ args=(${SPACK_CPPFLAGS[@]} "${args[@]}") ;;
+esac
+
+case "$mode" in cc|ccld)
+ # Add fortran flags
+ case $lang_flags in
+ F)
+ args=(${SPACK_FFLAGS[@]} "${args[@]}") ;;
+ esac
+ ;;
+esac
+
# Read spack dependencies from the path environment variable
IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
for dep in "${deps[@]}"; do
@@ -230,6 +273,12 @@ elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
fi
+# Add SPACK_LDLIBS to args
+case "$mode" in
+ ld|ccld)
+ args=("${args[@]}" ${SPACK_LDLIBS[@]}) ;;
+esac
+
#
# Unset pesky environment variables that could affect build sanity.
#
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 164340bf0f..c7a155597f 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -105,7 +105,7 @@ concretizer = DefaultConcretizer()
# Version information
from spack.version import Version
-spack_version = Version("0.8.15")
+spack_version = Version("0.9")
#
# Executables used by Spack
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 5ce4cb1ce1..d87aaa6285 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -51,15 +51,16 @@ There are two parts to the build environment:
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
-import multiprocessing
import os
-import platform
-import shutil
import sys
+import shutil
+import multiprocessing
+import platform
-import spack
import llnl.util.tty as tty
from llnl.util.filesystem import *
+
+import spack
from spack.environment import EnvironmentModifications, validate
from spack.util.environment import *
from spack.util.executable import Executable, which
@@ -115,22 +116,24 @@ class MakeExecutable(Executable):
def set_compiler_environment_variables(pkg, env):
assert pkg.spec.concrete
+ compiler = pkg.compiler
+ flags = pkg.spec.compiler_flags
+
# Set compiler variables used by CMake and autotools
- assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
+ assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
# Populate an object with the list of environment modifications
# and return it
# TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
link_dir = spack.build_env_path
- env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
- env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
- env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
- env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
+ env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
+ env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
+ env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
+ env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
- compiler = pkg.compiler
if compiler.cc:
- env.set('SPACK_CC', compiler.cc)
+ env.set('SPACK_CC', compiler.cc)
if compiler.cxx:
env.set('SPACK_CXX', compiler.cxx)
if compiler.f77:
@@ -144,6 +147,12 @@ def set_compiler_environment_variables(pkg, env):
env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
+ # Add every valid compiler flag to the environment, prefixed with "SPACK_"
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ # Concreteness guarantees key safety here
+ if flags[flag] != []:
+ env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
+
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
return env
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index a99012a275..7d6aef44a3 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -22,57 +22,92 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-import collections
-import itertools
import argparse
-from StringIO import StringIO
+import sys
import llnl.util.tty as tty
+import spack
+import spack.spec
+from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
-from llnl.util.lang import *
-import spack
-import spack.spec
+description = "Find installed spack packages"
-description ="Find installed spack packages"
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
+ format_group.add_argument('-s',
+ '--short',
+ action='store_const',
+ dest='mode',
+ const='short',
+ help='Show only specs (default)')
+ format_group.add_argument('-p',
+ '--paths',
+ action='store_const',
+ dest='mode',
+ const='paths',
+ help='Show paths to package install directories')
format_group.add_argument(
- '-s', '--short', action='store_const', dest='mode', const='short',
- help='Show only specs (default)')
- format_group.add_argument(
- '-p', '--paths', action='store_const', dest='mode', const='paths',
- help='Show paths to package install directories')
- format_group.add_argument(
- '-d', '--deps', action='store_const', dest='mode', const='deps',
+ '-d',
+ '--deps',
+ action='store_const',
+ dest='mode',
+ const='deps',
help='Show full dependency DAG of installed packages')
+ subparser.add_argument('-l',
+ '--long',
+ action='store_true',
+ dest='long',
+ help='Show dependency hashes as well as versions.')
+ subparser.add_argument('-L',
+ '--very-long',
+ action='store_true',
+ dest='very_long',
+ help='Show dependency hashes as well as versions.')
+ subparser.add_argument('-f',
+ '--show-flags',
+ action='store_true',
+ dest='show_flags',
+ help='Show spec compiler flags.')
+
subparser.add_argument(
- '-l', '--long', action='store_true',
- help='Show dependency hashes as well as versions.')
+ '-e',
+ '--explicit',
+ action='store_true',
+ help='Show only specs that were installed explicitly')
subparser.add_argument(
- '-L', '--very-long', action='store_true',
- help='Show dependency hashes as well as versions.')
-
+ '-E',
+ '--implicit',
+ action='store_true',
+ help='Show only specs that were installed as dependencies')
subparser.add_argument(
- '-u', '--unknown', action='store_true',
+ '-u',
+ '--unknown',
+ action='store_true',
+ dest='unknown',
help='Show only specs Spack does not have a package for.')
subparser.add_argument(
- '-m', '--missing', action='store_true',
+ '-m',
+ '--missing',
+ action='store_true',
+ dest='missing',
help='Show missing dependencies as well as installed specs.')
- subparser.add_argument(
- '-M', '--only-missing', action='store_true',
- help='Show only missing dependencies.')
- subparser.add_argument(
- '-N', '--namespace', action='store_true',
- help='Show fully qualified package names.')
+ subparser.add_argument('-M',
+ '--only-missing',
+ action='store_true',
+ dest='only_missing',
+ help='Show only missing dependencies.')
+ subparser.add_argument('-N',
+ '--namespace',
+ action='store_true',
+ help='Show fully qualified package names.')
- subparser.add_argument(
- 'query_specs', nargs=argparse.REMAINDER,
- help='optional specs to filter results')
+ subparser.add_argument('query_specs',
+ nargs=argparse.REMAINDER,
+ help='optional specs to filter results')
def gray_hash(spec, length):
@@ -89,23 +124,29 @@ def display_specs(specs, **kwargs):
hashes = True
hlen = None
+ nfmt = '.' if namespace else '_'
+ format_string = '$%s$@$+' % nfmt
+ flags = kwargs.get('show_flags', False)
+ if flags:
+ format_string = '$%s$@$%%+$+' % nfmt
+
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
- if i > 0: print
+ if i > 0:
+ print
- header = "%s{%s} / %s{%s}" % (
- spack.spec.architecture_color, architecture,
- spack.spec.compiler_color, compiler)
+ header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
+ architecture, spack.spec.compiler_color,
+ compiler)
tty.hline(colorize(header), char='-')
- specs = index[(architecture,compiler)]
+ specs = index[(architecture, compiler)]
specs.sort()
- nfmt = '.' if namespace else '_'
- abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
+ abbreviated = [s.format(format_string, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
@@ -114,38 +155,46 @@ def display_specs(specs, **kwargs):
for abbrv, spec in zip(abbreviated, specs):
if hashes:
- print gray_hash(spec, hlen),
- print format % (abbrv, spec.prefix)
+ print(gray_hash(spec, hlen), )
+ print(format % (abbrv, spec.prefix))
elif mode == 'deps':
for spec in specs:
- print spec.tree(
- format='$%s$@$+' % nfmt,
+ print(spec.tree(
+ format=format_string,
color=True,
indent=4,
- prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
+ prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
elif mode == 'short':
- def fmt(s):
- string = ""
- if hashes:
- string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@$+' % nfmt, color=True)
+ # Print columns of output if not printing flags
+ if not flags:
- return string
- colify(fmt(s) for s in specs)
+ def fmt(s):
+ string = ""
+ if hashes:
+ string += gray_hash(s, hlen) + ' '
+ string += s.format('$-%s$@$+' % nfmt, color=True)
- else:
- raise ValueError(
- "Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
+ return string
+
+ colify(fmt(s) for s in specs)
+ # Print one entry per line if including flags
+ else:
+ for spec in specs:
+ # Print the hash if necessary
+ hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
+ print(hsh + spec.format(format_string, color=True) + '\n')
+ else:
+ raise ValueError("Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode) # NOQA: ignore=E501
def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
query_specs, nonexisting = partition_list(
- query_specs, lambda s: spack.repo.exists(s.name))
+ query_specs, lambda s: spack.repo.exists(s.name) or not s.name)
if nonexisting:
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
@@ -163,13 +212,21 @@ def find(parser, args):
installed = any
if args.unknown:
known = False
- q_args = { 'installed' : installed, 'known' : known }
+
+ explicit = any
+ if args.explicit:
+ explicit = False
+ if args.implicit:
+ explicit = True
+
+ q_args = {'installed': installed, 'known': known, "explicit": explicit}
# Get all the specs the user asked for
if not query_specs:
specs = set(spack.installed_db.query(**q_args))
else:
- results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs]
+ results = [set(spack.installed_db.query(qs, **q_args))
+ for qs in query_specs]
specs = set.union(*results)
if not args.mode:
@@ -177,7 +234,8 @@ def find(parser, args):
if sys.stdout.isatty():
tty.msg("%d installed packages." % len(specs))
- display_specs(specs, mode=args.mode,
+ display_specs(specs,
+ mode=args.mode,
long=args.long,
very_long=args.very_long,
- namespace=args.namespace)
+ show_flags=args.show_flags)
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index fef21f15ba..9d3175786b 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -78,4 +78,5 @@ def install(parser, args):
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
verbose=args.verbose,
- fake=args.fake)
+ fake=args.fake,
+ explicit=True)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 3bffc2633b..9fdf3045b2 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -92,7 +92,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, long=True)
+ display_specs(matching, long=True, show_flags=True)
print()
has_errors = True
@@ -186,7 +186,7 @@ def uninstall(parser, args):
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
- display_specs(uninstall_list, long=True)
+ display_specs(uninstall_list, long=True, show_flags=True)
print('')
ask_for_confirmation('Do you want to proceed ? ')
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index e2da272212..2ae305f201 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -109,7 +109,7 @@ class Compiler(object):
return '-Wl,-rpath,'
- def __init__(self, cspec, cc, cxx, f77, fc):
+ def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
def check(exe):
if exe is None:
return None
@@ -121,6 +121,15 @@ class Compiler(object):
self.f77 = check(f77)
self.fc = check(fc)
+ # Unfortunately have to make sure these params are accepted
+ # in the same order they are returned by sorted(flags)
+ # in compilers/__init__.py
+ self.flags = {}
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ value = kwargs.get(flag, None)
+ if value is not None:
+ self.flags[flag] = value.split()
+
self.spec = cspec
@@ -188,7 +197,6 @@ class Compiler(object):
def fc_version(cls, fc):
return cls.default_version(fc)
-
@classmethod
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
"""Finds compilers in the paths supplied.
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 692e5518aa..7c951ae8bc 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -255,7 +255,11 @@ def compilers_for_spec(compiler_spec, arch=None, scope=None):
else:
compiler_paths.append(None)
- return cls(cspec, *compiler_paths)
+ flags = {}
+ for f in spack.spec.FlagMap.valid_compiler_flags():
+ if f in items:
+ flags[f] = items[f]
+ return cls(cspec, *compiler_paths, **flags)
matches = find(compiler_spec, arch, scope)
return [get_compiler(cspec) for cspec in matches]
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index f5e1c10b48..4f78bfc347 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -44,6 +44,7 @@ from spec import DependencyMap
from itertools import chain
from spack.config import *
+
class DefaultConcretizer(object):
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
@@ -269,6 +270,59 @@ class DefaultConcretizer(object):
return True # things changed.
+ def concretize_compiler_flags(self, spec):
+ """
+ The compiler flags are updated to match those of the spec whose
+ compiler is used, defaulting to no compiler flags in the spec.
+ Default specs set at the compiler level will still be added later.
+ """
+ ret = False
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ try:
+ nearest = next(p for p in spec.traverse(direction='parents')
+ if ((p.compiler == spec.compiler and p is not spec)
+ and flag in p.compiler_flags))
+ if ((not flag in spec.compiler_flags) or
+ sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
+ if flag in spec.compiler_flags:
+ spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
+ set(nearest.compiler_flags[flag]))
+ else:
+ spec.compiler_flags[flag] = nearest.compiler_flags[flag]
+ ret = True
+
+ except StopIteration:
+ if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or
+ sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))):
+ if flag in spec.compiler_flags:
+ spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
+ set(spec.root.compiler_flags[flag]))
+ else:
+ spec.compiler_flags[flag] = spec.root.compiler_flags[flag]
+ ret = True
+ else:
+ if not flag in spec.compiler_flags:
+ spec.compiler_flags[flag] = []
+
+ # Include the compiler flag defaults from the config files
+ # This ensures that spack will detect conflicts that stem from a change
+ # in default compiler flags.
+ compiler = spack.compilers.compiler_for_spec(spec.compiler)
+ for flag in compiler.flags:
+ if flag not in spec.compiler_flags:
+ spec.compiler_flags[flag] = compiler.flags[flag]
+ if compiler.flags[flag] != []:
+ ret = True
+ else:
+ if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and
+ (not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))):
+ ret = True
+ spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
+ set(compiler.flags[flag]))
+
+ return ret
+
+
def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
@@ -330,7 +384,6 @@ def cmp_specs(lhs, rhs):
return 0
-
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index ee4473e079..f3967e6b72 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -60,7 +60,7 @@ from spack.repository import UnknownPackageError
_db_dirname = '.spack-db'
# DB version. This is stuck in the DB file to track changes in format.
-_db_version = Version('0.9')
+_db_version = Version('0.9.1')
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
@@ -92,22 +92,24 @@ class InstallRecord(object):
dependents left.
"""
- def __init__(self, spec, path, installed, ref_count=0):
+ def __init__(self, spec, path, installed, ref_count=0, explicit=False):
self.spec = spec
self.path = str(path)
self.installed = bool(installed)
self.ref_count = ref_count
+ self.explicit = explicit
def to_dict(self):
return { 'spec' : self.spec.to_node_dict(),
'path' : self.path,
'installed' : self.installed,
- 'ref_count' : self.ref_count }
+ 'ref_count' : self.ref_count,
+ 'explicit' : self.explicit }
@classmethod
def from_dict(cls, spec, dictionary):
d = dictionary
- return InstallRecord(spec, d['path'], d['installed'], d['ref_count'])
+ return InstallRecord(spec, d['path'], d['installed'], d['ref_count'], d.get('explicit', False))
class Database(object):
@@ -203,6 +205,11 @@ class Database(object):
spec_dict = installs[hash_key]['spec']
+ # Install records don't include hash with spec, so we add it in here
+ # to ensure it is read properly.
+ for name in spec_dict:
+ spec_dict[name]['hash'] = hash_key
+
# Build spec from dict first.
spec = Spec.from_node_dict(spec_dict)
@@ -248,13 +255,18 @@ class Database(object):
check('installs' in db, "No 'installs' in YAML DB.")
check('version' in db, "No 'version' in YAML DB.")
+
+ installs = db['installs']
+
# TODO: better version checking semantics.
version = Version(db['version'])
- if version != _db_version:
+ if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
+ elif version < _db_version:
+ self.reindex(spack.install_layout)
+ installs = dict((k, v.to_dict()) for k, v in self._data.items())
# Iterate through database and check each record.
- installs = db['installs']
data = {}
for hash_key, rec in installs.items():
try:
@@ -370,7 +382,7 @@ class Database(object):
self.reindex(spack.install_layout)
- def _add(self, spec, path, directory_layout=None):
+ def _add(self, spec, path, directory_layout=None, explicit=False):
"""Add an install record for spec at path to the database.
This assumes that the spec is not already installed. It
@@ -392,7 +404,7 @@ class Database(object):
rec.path = path
else:
- self._data[key] = InstallRecord(spec, path, True)
+ self._data[key] = InstallRecord(spec, path, True, explicit=explicit)
for dep in spec.dependencies.values():
self._increment_ref_count(dep, directory_layout)
@@ -415,7 +427,7 @@ class Database(object):
self._data[key].ref_count += 1
@_autospec
- def add(self, spec, path):
+ def add(self, spec, path, explicit=False):
"""Add spec at path to database, locking and reading DB to sync.
``add()`` will lock and read from the DB on disk.
@@ -424,7 +436,7 @@ class Database(object):
# TODO: ensure that spec is concrete?
# Entire add is transactional.
with self.write_transaction():
- self._add(spec, path)
+ self._add(spec, path, explicit=explicit)
def _get_matching_spec_key(self, spec, **kwargs):
@@ -513,7 +525,7 @@ class Database(object):
# TODO: conditional way to do this instead of catching exceptions
- def query(self, query_spec=any, known=any, installed=True):
+ def query(self, query_spec=any, known=any, installed=True, explicit=any):
"""Run a query on the database.
``query_spec``
@@ -553,6 +565,8 @@ class Database(object):
for key, rec in self._data.items():
if installed is not any and rec.installed != installed:
continue
+ if explicit is not any and rec.explicit != explicit:
+ continue
if known is not any and spack.repo.exists(rec.spec.name) != known:
continue
if query_spec is any or rec.spec.satisfies(query_spec):
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 74ee7b0add..51b26773e2 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -45,11 +45,8 @@ The available directives are:
* ``resource``
"""
-__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version',
- 'variant', 'resource']
import re
-import inspect
import os.path
import functools
@@ -67,6 +64,9 @@ from spack.spec import Spec, parse_anonymous_spec
from spack.resource import Resource
from spack.fetch_strategy import from_kwargs
+__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version', 'variant',
+ 'resource']
+
#
# This is a list of all directives, built up as they are defined in
# this file.
@@ -122,15 +122,14 @@ class directive(object):
def __init__(self, dicts=None):
if isinstance(dicts, basestring):
- dicts = (dicts,)
+ dicts = (dicts, )
elif type(dicts) not in (list, tuple):
raise TypeError(
- "dicts arg must be list, tuple, or string. Found %s"
- % type(dicts))
+ "dicts arg must be list, tuple, or string. Found %s" %
+ type(dicts))
self.dicts = dicts
-
def ensure_dicts(self, pkg):
"""Ensure that a package has the dicts required by this directive."""
for d in self.dicts:
@@ -142,7 +141,6 @@ class directive(object):
raise spack.error.SpackError(
"Package %s has non-dict %s attribute!" % (pkg, d))
-
def __call__(self, directive_function):
directives[directive_function.__name__] = self
@@ -259,11 +257,12 @@ def variant(pkg, name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
- default = bool(default)
+ default = bool(default)
description = str(description).strip()
if not re.match(spack.spec.identifier_re, name):
- raise DirectiveError("Invalid variant name in %s: '%s'" % (pkg.name, name))
+ raise DirectiveError("Invalid variant name in %s: '%s'" %
+ (pkg.name, name))
pkg.variants[name] = Variant(default, description)
@@ -271,31 +270,37 @@ def variant(pkg, name, default=False, description=""):
@directive('resources')
def resource(pkg, **kwargs):
"""
- Define an external resource to be fetched and staged when building the package. Based on the keywords present in the
- dictionary the appropriate FetchStrategy will be used for the resource. Resources are fetched and staged in their
- own folder inside spack stage area, and then linked into the stage area of the package that needs them.
+ Define an external resource to be fetched and staged when building the
+ package. Based on the keywords present in the dictionary the appropriate
+ FetchStrategy will be used for the resource. Resources are fetched and
+ staged in their own folder inside spack stage area, and then linked into
+ the stage area of the package that needs them.
List of recognized keywords:
- * 'when' : (optional) represents the condition upon which the resource is needed
- * 'destination' : (optional) path where to link the resource. This path must be relative to the main package stage
- area.
- * 'placement' : (optional) gives the possibility to fine tune how the resource is linked into the main package stage
- area.
+ * 'when' : (optional) represents the condition upon which the resource is
+ needed
+ * 'destination' : (optional) path where to link the resource. This path
+ must be relative to the main package stage area.
+ * 'placement' : (optional) gives the possibility to fine tune how the
+ resource is linked into the main package stage area.
"""
when = kwargs.get('when', pkg.name)
destination = kwargs.get('destination', "")
placement = kwargs.get('placement', None)
# Check if the path is relative
if os.path.isabs(destination):
- message = "The destination keyword of a resource directive can't be an absolute path.\n"
+ message = "The destination keyword of a resource directive can't be"
+ " an absolute path.\n"
message += "\tdestination : '{dest}\n'".format(dest=destination)
raise RuntimeError(message)
# Check if the path falls within the main package stage area
- test_path = 'stage_folder_root/'
- normalized_destination = os.path.normpath(join_path(test_path, destination)) # Normalized absolute path
+ test_path = 'stage_folder_root'
+ normalized_destination = os.path.normpath(join_path(test_path, destination)
+ ) # Normalized absolute path
if test_path not in normalized_destination:
- message = "The destination folder of a resource must fall within the main package stage directory.\n"
+ message = "The destination folder of a resource must fall within the"
+ " main package stage directory.\n"
message += "\tdestination : '{dest}'\n".format(dest=destination)
raise RuntimeError(message)
when_spec = parse_anonymous_spec(when, pkg.name)
@@ -307,6 +312,7 @@ def resource(pkg, **kwargs):
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
+
def __init__(self, directive, message):
super(DirectiveError, self).__init__(message)
self.directive = directive
@@ -314,6 +320,7 @@ class DirectiveError(spack.error.SpackError):
class CircularReferenceError(DirectiveError):
"""This is raised when something depends on itself."""
+
def __init__(self, directive, package):
super(CircularReferenceError, self).__init__(
directive,
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index 11998ad8d2..af642dcc9b 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -39,7 +39,8 @@ class NameValueModifier(object):
def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
- self.args = {'name': name, 'value': value}
+ self.separator = kwargs.get('separator', ':')
+ self.args = {'name': name, 'value': value, 'delim': self.separator}
self.args.update(kwargs)
@@ -56,34 +57,36 @@ class UnsetEnv(NameModifier):
class SetPath(NameValueModifier):
def execute(self):
- string_path = concatenate_paths(self.value)
+ string_path = concatenate_paths(self.value, separator=self.separator)
os.environ[self.name] = string_path
class AppendPath(NameValueModifier):
def execute(self):
environment_value = os.environ.get(self.name, '')
- directories = environment_value.split(':') if environment_value else []
+ directories = environment_value.split(
+ self.separator) if environment_value else []
directories.append(os.path.normpath(self.value))
- os.environ[self.name] = ':'.join(directories)
+ os.environ[self.name] = self.separator.join(directories)
class PrependPath(NameValueModifier):
def execute(self):
environment_value = os.environ.get(self.name, '')
- directories = environment_value.split(':') if environment_value else []
+ directories = environment_value.split(
+ self.separator) if environment_value else []
directories = [os.path.normpath(self.value)] + directories
- os.environ[self.name] = ':'.join(directories)
+ os.environ[self.name] = self.separator.join(directories)
class RemovePath(NameValueModifier):
def execute(self):
environment_value = os.environ.get(self.name, '')
- directories = environment_value.split(':') if environment_value else []
- directories = [os.path.normpath(x)
- for x in directories
+ directories = environment_value.split(
+ self.separator) if environment_value else []
+ directories = [os.path.normpath(x) for x in directories
if x != os.path.normpath(self.value)]
- os.environ[self.name] = ':'.join(directories)
+ os.environ[self.name] = self.separator.join(directories)
class EnvironmentModifications(object):
@@ -238,17 +241,19 @@ class EnvironmentModifications(object):
x.execute()
-def concatenate_paths(paths):
+def concatenate_paths(paths, separator=':'):
"""
- Concatenates an iterable of paths into a string of column separated paths
+ Concatenates an iterable of paths into a string of paths separated by
+ separator, defaulting to colon
Args:
paths: iterable of paths
+ separator: the separator to use, default ':'
Returns:
string
"""
- return ':'.join(str(item) for item in paths)
+ return separator.join(str(item) for item in paths)
def set_or_unset_not_first(variable, changes, errstream):
@@ -256,16 +261,13 @@ def set_or_unset_not_first(variable, changes, errstream):
Check if we are going to set or unset something after other modifications
have already been requested
"""
- indexes = [ii
- for ii, item in enumerate(changes)
+ indexes = [ii for ii, item in enumerate(changes)
if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
if indexes:
good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}'
message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
- errstream(
- message.format(
- var=variable))
+ errstream(message.format(var=variable))
for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good
errstream(print_format.format(**item.args))
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index e05cb13c1e..7c8cebe0c9 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -57,7 +57,6 @@ from spack.version import Version, ver
from spack.util.compression import decompressor_for, extension
import spack.util.pattern as pattern
-
"""List of all fetch strategies, created by FetchStrategy metaclass."""
all_strategies = []
@@ -82,13 +81,16 @@ class FetchStrategy(object):
class __metaclass__(type):
"""This metaclass registers all fetch strategies in a list."""
+
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
- if cls.enabled: all_strategies.append(cls)
+ if cls.enabled:
+ all_strategies.append(cls)
def __init__(self):
- # The stage is initialized late, so that fetch strategies can be constructed
- # at package construction time. This is where things will be fetched.
+ # The stage is initialized late, so that fetch strategies can be
+ # constructed at package construction time. This is where things
+ # will be fetched.
self.stage = None
def set_stage(self, stage):
@@ -97,15 +99,20 @@ class FetchStrategy(object):
self.stage = stage
# Subclasses need to implement these methods
- def fetch(self): pass # Return True on success, False on fail.
+ def fetch(self):
+ pass # Return True on success, False on fail.
- def check(self): pass # Do checksum.
+ def check(self):
+ pass # Do checksum.
- def expand(self): pass # Expand archive.
+ def expand(self):
+ pass # Expand archive.
- def reset(self): pass # Revert to freshly downloaded state.
+ def reset(self):
+ pass # Revert to freshly downloaded state.
- def archive(self, destination): pass # Used to create tarball for mirror.
+ def archive(self, destination):
+ pass # Used to create tarball for mirror.
def __str__(self): # Should be human readable URL.
return "FetchStrategy.__str___"
@@ -139,10 +146,12 @@ class URLFetchStrategy(FetchStrategy):
# If URL or digest are provided in the kwargs, then prefer
# those values.
self.url = kwargs.get('url', None)
- if not self.url: self.url = url
+ if not self.url:
+ self.url = url
self.digest = kwargs.get('md5', None)
- if not self.digest: self.digest = digest
+ if not self.digest:
+ self.digest = digest
self.expand_archive = kwargs.get('expand', True)
@@ -167,16 +176,20 @@ class URLFetchStrategy(FetchStrategy):
tty.msg("Trying to fetch from %s" % self.url)
if partial_file:
- save_args = ['-C', '-', # continue partial downloads
- '-o', partial_file] # use a .part file
+ save_args = ['-C',
+ '-', # continue partial downloads
+ '-o',
+ partial_file] # use a .part file
else:
save_args = ['-O']
curl_args = save_args + [
- '-f', # fail on >400 errors
- '-D', '-', # print out HTML headers
- '-L', # resolve 3xx redirects
- self.url, ]
+ '-f', # fail on >400 errors
+ '-D',
+ '-', # print out HTML headers
+ '-L', # resolve 3xx redirects
+ self.url,
+ ]
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
@@ -184,8 +197,7 @@ class URLFetchStrategy(FetchStrategy):
curl_args.append('-sS') # just errors when not.
# Run curl but grab the mime type from the http headers
- headers = spack.curl(
- *curl_args, output=str, fail_on_error=False)
+ headers = spack.curl(*curl_args, output=str, fail_on_error=False)
if spack.curl.returncode != 0:
# clean up archive on failure.
@@ -198,33 +210,36 @@ class URLFetchStrategy(FetchStrategy):
if spack.curl.returncode == 22:
# This is a 404. Curl will print the error.
raise FailedDownloadError(
- self.url, "URL %s was not found!" % self.url)
+ self.url, "URL %s was not found!" % self.url)
elif spack.curl.returncode == 60:
# This is a certificate error. Suggest spack -k
raise FailedDownloadError(
- self.url,
- "Curl was unable to fetch due to invalid certificate. "
- "This is either an attack, or your cluster's SSL configuration "
- "is bad. If you believe your SSL configuration is bad, you "
- "can try running spack -k, which will not check SSL certificates."
- "Use this at your own risk.")
+ self.url,
+ "Curl was unable to fetch due to invalid certificate. "
+ "This is either an attack, or your cluster's SSL "
+ "configuration is bad. If you believe your SSL "
+ "configuration is bad, you can try running spack -k, "
+ "which will not check SSL certificates."
+ "Use this at your own risk.")
else:
# This is some other curl error. Curl will print the
# error, but print a spack message too
raise FailedDownloadError(
- self.url, "Curl failed with error %d" % spack.curl.returncode)
+ self.url,
+ "Curl failed with error %d" % spack.curl.returncode)
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
content_types = re.findall(r'Content-Type:[^\r\n]+', headers)
if content_types and 'text/html' in content_types[-1]:
- tty.warn("The contents of " + self.archive_file + " look like HTML.",
- "The checksum will likely be bad. If it is, you can use",
- "'spack clean <package>' to remove the bad archive, then fix",
- "your internet gateway issue and install again.")
+ tty.warn(
+ "The contents of " + self.archive_file + " look like HTML.",
+ "The checksum will likely be bad. If it is, you can use",
+ "'spack clean <package>' to remove the bad archive, then fix",
+ "your internet gateway issue and install again.")
if save_file:
os.rename(partial_file, save_file)
@@ -247,14 +262,16 @@ class URLFetchStrategy(FetchStrategy):
self.stage.chdir()
if not self.archive_file:
- raise NoArchiveFileError("URLFetchStrategy couldn't find archive file",
- "Failed on expand() for URL %s" % self.url)
+ raise NoArchiveFileError(
+ "URLFetchStrategy couldn't find archive file",
+ "Failed on expand() for URL %s" % self.url)
decompress = decompressor_for(self.archive_file)
# Expand all tarballs in their own directory to contain
# exploding tarballs.
- tarball_container = os.path.join(self.stage.path, "spack-expanded-archive")
+ tarball_container = os.path.join(self.stage.path,
+ "spack-expanded-archive")
mkdirp(tarball_container)
os.chdir(tarball_container)
decompress(self.archive_file)
@@ -295,20 +312,25 @@ class URLFetchStrategy(FetchStrategy):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
if not self.digest:
- raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
+ raise NoDigestError(
+ "Attempt to check URLFetchStrategy with no digest.")
checker = crypto.Checker(self.digest)
if not checker.check(self.archive_file):
raise ChecksumError(
- "%s checksum failed for %s" % (checker.hash_name, self.archive_file),
- "Expected %s but got %s" % (self.digest, checker.sum))
+ "%s checksum failed for %s" %
+ (checker.hash_name, self.archive_file),
+ "Expected %s but got %s" % (self.digest, checker.sum))
@_needs_stage
def reset(self):
- """Removes the source path if it exists, then re-expands the archive."""
+ """
+ Removes the source path if it exists, then re-expands the archive.
+ """
if not self.archive_file:
- raise NoArchiveFileError("Tried to reset URLFetchStrategy before fetching",
- "Failed on reset() for URL %s" % self.url)
+ raise NoArchiveFileError(
+ "Tried to reset URLFetchStrategy before fetching",
+ "Failed on reset() for URL %s" % self.url)
# Remove everythigng but the archive from the stage
for filename in os.listdir(self.stage.path):
@@ -337,14 +359,16 @@ class VCSFetchStrategy(FetchStrategy):
# Set a URL based on the type of fetch strategy.
self.url = kwargs.get(name, None)
- if not self.url: raise ValueError(
+ if not self.url:
+ raise ValueError(
"%s requires %s argument." % (self.__class__, name))
# Ensure that there's only one of the rev_types
if sum(k in kwargs for k in rev_types) > 1:
raise FetchStrategyError(
- "Supply only one of %s to fetch with %s" % (
- comma_or(rev_types), name))
+ "Supply only one of %s to fetch with %s" % (
+ comma_or(rev_types), name
+ ))
# Set attributes for each rev type.
for rt in rev_types:
@@ -382,32 +406,93 @@ class VCSFetchStrategy(FetchStrategy):
return "%s<%s>" % (self.__class__, self.url)
+class GoFetchStrategy(VCSFetchStrategy):
+ """
+ Fetch strategy that employs the `go get` infrastructure
+ Use like this in a package:
+
+ version('name',
+ go='github.com/monochromegane/the_platinum_searcher/...')
+
+ Go get does not natively support versions, they can be faked with git
+ """
+ enabled = True
+ required_attributes = ('go', )
+
+ def __init__(self, **kwargs):
+ # Discards the keywords in kwargs that may conflict with the next
+ # call to __init__
+ forwarded_args = copy.copy(kwargs)
+ forwarded_args.pop('name', None)
+
+ super(GoFetchStrategy, self).__init__('go', **forwarded_args)
+ self._go = None
+
+ @property
+ def go_version(self):
+ vstring = self.go('version', output=str).split(' ')[2]
+ return Version(vstring)
+
+ @property
+ def go(self):
+ if not self._go:
+ self._go = which('go', required=True)
+ return self._go
+
+ @_needs_stage
+ def fetch(self):
+ self.stage.chdir()
+
+ tty.msg("Trying to get go resource:", self.url)
+
+ try:
+ os.mkdir('go')
+ except OSError:
+ pass
+ env = dict(os.environ)
+ env['GOPATH'] = os.path.join(os.getcwd(), 'go')
+ self.go('get', '-v', '-d', self.url, env=env)
+
+ def archive(self, destination):
+ super(GoFetchStrategy, self).archive(destination, exclude='.git')
+
+ @_needs_stage
+ def reset(self):
+ self.stage.chdir_to_source()
+ self.go('clean')
+
+ def __str__(self):
+ return "[go] %s" % self.url
+
+
class GitFetchStrategy(VCSFetchStrategy):
- """Fetch strategy that gets source code from a git repository.
- Use like this in a package:
+ """
+ Fetch strategy that gets source code from a git repository.
+ Use like this in a package:
- version('name', git='https://github.com/project/repo.git')
+ version('name', git='https://github.com/project/repo.git')
- Optionally, you can provide a branch, or commit to check out, e.g.:
+ Optionally, you can provide a branch, or commit to check out, e.g.:
- version('1.1', git='https://github.com/project/repo.git', tag='v1.1')
+ version('1.1', git='https://github.com/project/repo.git', tag='v1.1')
- You can use these three optional attributes in addition to ``git``:
+ You can use these three optional attributes in addition to ``git``:
- * ``branch``: Particular branch to build from (default is master)
- * ``tag``: Particular tag to check out
- * ``commit``: Particular commit hash in the repo
+ * ``branch``: Particular branch to build from (default is master)
+ * ``tag``: Particular tag to check out
+ * ``commit``: Particular commit hash in the repo
"""
enabled = True
- required_attributes = ('git',)
+ required_attributes = ('git', )
def __init__(self, **kwargs):
- # Discards the keywords in kwargs that may conflict with the next call to __init__
+ # Discards the keywords in kwargs that may conflict with the next call
+ # to __init__
forwarded_args = copy.copy(kwargs)
forwarded_args.pop('name', None)
super(GitFetchStrategy, self).__init__(
- 'git', 'tag', 'branch', 'commit', **forwarded_args)
+ 'git', 'tag', 'branch', 'commit', **forwarded_args)
self._git = None
@property
@@ -515,12 +600,13 @@ class SvnFetchStrategy(VCSFetchStrategy):
required_attributes = ['svn']
def __init__(self, **kwargs):
- # Discards the keywords in kwargs that may conflict with the next call to __init__
+ # Discards the keywords in kwargs that may conflict with the next call
+ # to __init__
forwarded_args = copy.copy(kwargs)
forwarded_args.pop('name', None)
super(SvnFetchStrategy, self).__init__(
- 'svn', 'revision', **forwarded_args)
+ 'svn', 'revision', **forwarded_args)
self._svn = None
if self.revision is not None:
self.revision = str(self.revision)
@@ -576,32 +662,35 @@ class SvnFetchStrategy(VCSFetchStrategy):
class HgFetchStrategy(VCSFetchStrategy):
- """Fetch strategy that gets source code from a Mercurial repository.
- Use like this in a package:
+ """
+ Fetch strategy that gets source code from a Mercurial repository.
+ Use like this in a package:
- version('name', hg='https://jay.grs.rwth-aachen.de/hg/lwm2')
+ version('name', hg='https://jay.grs.rwth-aachen.de/hg/lwm2')
- Optionally, you can provide a branch, or revision to check out, e.g.:
+ Optionally, you can provide a branch, or revision to check out, e.g.:
- version('torus', hg='https://jay.grs.rwth-aachen.de/hg/lwm2', branch='torus')
+ version('torus',
+ hg='https://jay.grs.rwth-aachen.de/hg/lwm2', branch='torus')
- You can use the optional 'revision' attribute to check out a
- branch, tag, or particular revision in hg. To prevent
- non-reproducible builds, using a moving target like a branch is
- discouraged.
+ You can use the optional 'revision' attribute to check out a
+ branch, tag, or particular revision in hg. To prevent
+ non-reproducible builds, using a moving target like a branch is
+ discouraged.
- * ``revision``: Particular revision, branch, or tag.
+ * ``revision``: Particular revision, branch, or tag.
"""
enabled = True
required_attributes = ['hg']
def __init__(self, **kwargs):
- # Discards the keywords in kwargs that may conflict with the next call to __init__
+ # Discards the keywords in kwargs that may conflict with the next call
+ # to __init__
forwarded_args = copy.copy(kwargs)
forwarded_args.pop('name', None)
super(HgFetchStrategy, self).__init__(
- 'hg', 'revision', **forwarded_args)
+ 'hg', 'revision', **forwarded_args)
self._hg = None
@property
@@ -675,7 +764,8 @@ def from_kwargs(**kwargs):
return fetcher(**kwargs)
# Raise an error in case we can't instantiate any known strategy
message = "Cannot instantiate any FetchStrategy"
- long_message = message + " from the given arguments : {arguments}".format(srguments=kwargs)
+ long_message = message + " from the given arguments : {arguments}".format(
+ srguments=kwargs)
raise FetchError(message, long_message)
@@ -687,7 +777,7 @@ def for_package_version(pkg, version):
"""Determine a fetch strategy based on the arguments supplied to
version() in the package description."""
# If it's not a known version, extrapolate one.
- if not version in pkg.versions:
+ if version not in pkg.versions:
url = pkg.url_for_version(version)
if not url:
raise InvalidArgsError(pkg, version)
@@ -725,7 +815,7 @@ class FailedDownloadError(FetchError):
def __init__(self, url, msg=""):
super(FailedDownloadError, self).__init__(
- "Failed to fetch file from URL: %s" % url, msg)
+ "Failed to fetch file from URL: %s" % url, msg)
self.url = url
@@ -741,7 +831,8 @@ class NoDigestError(FetchError):
class InvalidArgsError(FetchError):
def __init__(self, pkg, version):
- msg = "Could not construct a fetch strategy for package %s at version %s"
+ msg = ("Could not construct a fetch strategy for package %s at "
+ "version %s")
msg %= (pkg.name, version)
super(InvalidArgsError, self).__init__(msg)
@@ -758,4 +849,5 @@ class NoStageError(FetchError):
def __init__(self, method):
super(NoStageError, self).__init__(
- "Must call FetchStrategy.set_stage() before calling %s" % method.__name__)
+ "Must call FetchStrategy.set_stage() before calling %s" %
+ method.__name__)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index a35e21c3db..d2b819e80a 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -485,9 +485,9 @@ class TclModule(EnvModule):
path = join_path(spack.share_path, "modules")
environment_modifications_formats = {
- PrependPath: 'prepend-path {name} \"{value}\"\n',
- AppendPath: 'append-path {name} \"{value}\"\n',
- RemovePath: 'remove-path {name} \"{value}\"\n',
+ PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
+ AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
+ RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n',
SetEnv: 'setenv {name} \"{value}\"\n',
UnsetEnv: 'unsetenv {name}\n'
}
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index 5fda9328d6..170ef3cea2 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -146,12 +146,12 @@ class when(object):
def install(self, prefix):
# Do default install
- @when('=chaos_5_x86_64_ib')
+ @when('arch=chaos_5_x86_64_ib')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
- @when('=bgqos_0")
+ @when('arch=bgqos_0")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 8167341127..2e7d8a7709 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -857,7 +857,8 @@ class Package(object):
skip_patch=False,
verbose=False,
make_jobs=None,
- fake=False):
+ fake=False,
+ explicit=False):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
@@ -887,6 +888,11 @@ class Package(object):
# Ensure package is not already installed
if spack.install_layout.check_installed(self.spec):
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
+ rec = spack.installed_db.get_record(self.spec)
+ if (not rec.explicit) and explicit:
+ with spack.installed_db.write_transaction():
+ rec = spack.installed_db.get_record(self.spec)
+ rec.explicit = True
return
tty.msg("Installing %s" % self.name)
@@ -995,7 +1001,7 @@ class Package(object):
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
- spack.installed_db.add(self.spec, self.prefix)
+ spack.installed_db.add(self.spec, self.prefix, explicit=explicit)
def sanity_check_prefix(self):
"""This function checks whether install succeeded."""
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 89a023a750..58bdc0e430 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -72,7 +72,9 @@ Here is the EBNF grammar for a spec::
dep_list = { ^ spec }
spec = id [ options ]
options = { @version-list | +variant | -variant | ~variant |
- %compiler | =architecture }
+ %compiler | arch=architecture | [ flag ]=value}
+ flag = { cflags | cxxflags | fcflags | fflags | cppflags |
+ ldflags | ldlibs }
variant = id
architecture = id
compiler = id [ version-list ]
@@ -80,6 +82,9 @@ Here is the EBNF grammar for a spec::
version = id | id: | :id | id:id
id = [A-Za-z0-9_][A-Za-z0-9_.-]*
+Identifiers using the <name>=<value> command, such as architectures and
+compiler flags, require a space before the name.
+
There is one context-sensitive part: ids in versions may contain '.', while
other ids may not.
@@ -108,6 +113,8 @@ import spack.parse
import spack.error
import spack.compilers as compilers
+# TODO: move display_specs to some other location.
+from spack.cmd.find import display_specs
from spack.version import *
from spack.util.string import *
from spack.util.prefix import Prefix
@@ -143,7 +150,6 @@ _separators = '[%s]' % ''.join(color_formats.keys())
every time we call str()"""
_any_version = VersionList([':'])
-
def index_specs(specs):
"""Take a list of specs and return a dict of lists. Dict is
keyed by spec name and lists include all specs with the
@@ -296,22 +302,25 @@ class VariantSpec(object):
on the particular package being built, and each named variant can
be enabled or disabled.
"""
- def __init__(self, name, enabled):
+ def __init__(self, name, value):
self.name = name
- self.enabled = enabled
+ self.value = value
def _cmp_key(self):
- return (self.name, self.enabled)
+ return (self.name, self.value)
def copy(self):
- return VariantSpec(self.name, self.enabled)
+ return VariantSpec(self.name, self.value)
def __str__(self):
- out = '+' if self.enabled else '~'
- return out + self.name
+ if self.value in [True,False]:
+ out = '+' if self.value else '~'
+ return out + self.name
+ else:
+ return ' ' + self.name + "=" + self.value
class VariantMap(HashableMap):
@@ -322,10 +331,10 @@ class VariantMap(HashableMap):
def satisfies(self, other, strict=False):
if strict or self.spec._concrete:
- return all(k in self and self[k].enabled == other[k].enabled
+ return all(k in self and self[k].value == other[k].value
for k in other)
else:
- return all(self[k].enabled == other[k].enabled
+ return all(self[k].value == other[k].value
for k in other if k in self)
@@ -343,7 +352,7 @@ class VariantMap(HashableMap):
changed = False
for k in other:
if k in self:
- if self[k].enabled != other[k].enabled:
+ if self[k].value != other[k].value:
raise UnsatisfiableVariantSpecError(self[k], other[k])
else:
self[k] = other[k].copy()
@@ -368,6 +377,70 @@ class VariantMap(HashableMap):
return ''.join(str(self[key]) for key in sorted_keys)
+_valid_compiler_flags = [
+ 'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
+
+class FlagMap(HashableMap):
+ def __init__(self, spec):
+ super(FlagMap, self).__init__()
+ self.spec = spec
+
+
+ def satisfies(self, other, strict=False):
+ if strict or (self.spec and self.spec._concrete):
+ return all(f in self and set(self[f]) <= set(other[f])
+ for f in other)
+ else:
+ return all(set(self[f]) <= set(other[f])
+ for f in other if (other[f] != [] and f in self))
+
+
+ def constrain(self, other):
+ """Add all flags in other that aren't in self to self.
+
+ Return whether the spec changed.
+ """
+ if other.spec and other.spec._concrete:
+ for k in self:
+ if k not in other:
+ raise UnsatisfiableCompilerFlagSpecError(self[k], '<absent>')
+
+ changed = False
+ for k in other:
+ if k in self and not set(self[k]) <= set(other[k]):
+ raise UnsatisfiableCompilerFlagSpecError(
+ ' '.join(f for f in self[k]), ' '.join( f for f in other[k]))
+ elif k not in self:
+ self[k] = other[k]
+ changed = True
+ return changed
+
+ @staticmethod
+ def valid_compiler_flags():
+ return _valid_compiler_flags
+
+ @property
+ def concrete(self):
+ return all(flag in self for flag in _valid_compiler_flags)
+
+
+ def copy(self):
+ clone = FlagMap(None)
+ for name, value in self.items():
+ clone[name] = value
+ return clone
+
+
+ def _cmp_key(self):
+ return ''.join(str(key) + ' '.join(str(v) for v in value) for key, value in sorted(self.items()))
+
+
+ def __str__(self):
+ sorted_keys = filter(lambda flag: self[flag] != [], sorted(self.keys()))
+ cond_symbol = ' ' if len(sorted_keys)>0 else ''
+ return cond_symbol + ' '.join(str(key) + '=\"' + ' '.join(str(f) for f in self[key]) + '\"' for key in sorted_keys)
+
+
class DependencyMap(HashableMap):
"""Each spec has a DependencyMap containing specs for its dependencies.
The DependencyMap is keyed by name. """
@@ -409,10 +482,13 @@ class Spec(object):
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
+ self.compiler_flags = other.compiler_flags
+ self.compiler_flags.spec = self
self.dependencies = other.dependencies
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
+ self._hash = other._hash
# Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal.
@@ -440,13 +516,30 @@ class Spec(object):
self.versions.add(version)
- def _add_variant(self, name, enabled):
+ def _add_variant(self, name, value):
"""Called by the parser to add a variant."""
if name in self.variants: raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name)
- self.variants[name] = VariantSpec(name, enabled)
+ if isinstance(value, basestring) and value.upper() == 'TRUE':
+ value = True
+ elif isinstance(value, basestring) and value.upper() == 'FALSE':
+ value = False
+ self.variants[name] = VariantSpec(name, value)
+ def _add_flag(self, name, value):
+ """Called by the parser to add a known flag.
+ Known flags currently include "arch"
+ """
+ valid_flags = FlagMap.valid_compiler_flags()
+ if name == 'arch':
+ self._set_architecture(value)
+ elif name in valid_flags:
+ assert(self.compiler_flags is not None)
+ self.compiler_flags[name] = value.split()
+ else:
+ self._add_variant(name,value)
+
def _set_compiler(self, compiler):
"""Called by the parser to set the compiler."""
if self.compiler: raise DuplicateCompilerSpecError(
@@ -473,7 +566,7 @@ class Spec(object):
#
@property
def fullname(self):
- return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name
+ return '%s.%s' % (self.namespace, self.name) if self.namespace else (self.name if self.name else '')
@property
@@ -523,7 +616,7 @@ class Spec(object):
@staticmethod
def is_virtual(name):
"""Test if a name is virtual without requiring a Spec."""
- return not spack.repo.exists(name)
+ return (not name is None) and ( not spack.repo.exists(name) )
@property
@@ -541,8 +634,8 @@ class Spec(object):
and self.variants.concrete
and self.architecture
and self.compiler and self.compiler.concrete
+ and self.compiler_flags.concrete
and self.dependencies.concrete)
-
return self._concrete
@@ -664,19 +757,25 @@ class Spec(object):
"""
Return a hash of the entire spec DAG, including connectivity.
"""
- yaml_text = yaml.dump(
- self.to_node_dict(), default_flow_style=True, width=sys.maxint)
- sha = hashlib.sha1(yaml_text)
- return base64.b32encode(sha.digest()).lower()[:length]
-
+ if self._hash:
+ return self._hash[:length]
+ else:
+ yaml_text = yaml.dump(
+ self.to_node_dict(), default_flow_style=True, width=sys.maxint)
+ sha = hashlib.sha1(yaml_text)
+ b32_hash = base64.b32encode(sha.digest()).lower()[:length]
+ if self.concrete:
+ self._hash = b32_hash
+ return b32_hash
def to_node_dict(self):
+ params = dict( (name, v.value) for name, v in self.variants.items() )
+ params.update( dict( (name, value) for name, value in self.compiler_flags.items()) )
d = {
- 'variants' : dict(
- (name,v.enabled) for name, v in self.variants.items()),
+ 'parameters' : params,
'arch' : self.architecture,
'dependencies' : dict((d, self.dependencies[d].dag_hash())
- for d in sorted(self.dependencies))
+ for d in sorted(self.dependencies)),
}
# Older concrete specs do not have a namespace. Omit for
@@ -689,6 +788,7 @@ class Spec(object):
else:
d['compiler'] = None
d.update(self.versions.to_dict())
+
return { self.name : d }
@@ -712,13 +812,27 @@ class Spec(object):
spec.versions = VersionList.from_dict(node)
spec.architecture = node['arch']
+ if 'hash' in node:
+ spec._hash = node['hash']
+
if node['compiler'] is None:
spec.compiler = None
else:
spec.compiler = CompilerSpec.from_dict(node)
- for name, enabled in node['variants'].items():
- spec.variants[name] = VariantSpec(name, enabled)
+ if 'parameters' in node:
+ for name, value in node['parameters'].items():
+ if name in _valid_compiler_flags:
+ spec.compiler_flags[name] = value
+ else:
+ spec.variants[name] = VariantSpec(name, value)
+ elif 'variants' in node:
+ for name, value in node['variants'].items():
+ spec.variants[name] = VariantSpec(name, value)
+ for name in FlagMap.valid_compiler_flags():
+ spec.compiler_flags[name] = []
+ else:
+ raise SpackRecordError("Did not find a valid format for variants in YAML file")
return spec
@@ -781,11 +895,13 @@ class Spec(object):
# Concretize virtual dependencies last. Because they're added
# to presets below, their constraints will all be merged, but we'll
# still need to select a concrete package later.
- changed |= any(
- (spack.concretizer.concretize_architecture(self),
- spack.concretizer.concretize_compiler(self),
- spack.concretizer.concretize_version(self),
- spack.concretizer.concretize_variants(self)))
+ if not self.virtual:
+ changed |= any(
+ (spack.concretizer.concretize_architecture(self),
+ spack.concretizer.concretize_compiler(self),
+ spack.concretizer.concretize_compiler_flags(self),#has to be concretized after compiler
+ spack.concretizer.concretize_version(self),
+ spack.concretizer.concretize_variants(self)))
presets[self.name] = self
visited.add(self.name)
@@ -928,6 +1044,8 @@ class Spec(object):
with requirements of its pacakges. See flatten() and normalize() for
more details on this.
"""
+ if not self.name:
+ raise SpecError("Attempting to concretize anonymous spec")
if self._concrete:
return
@@ -1071,6 +1189,11 @@ class Spec(object):
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
+ # Remove duplicate providers that can concretize to the same result.
+ for provider in providers:
+ for spec in providers:
+ if spec is not provider and provider.satisfies(spec):
+ providers.remove(spec)
# Can't have multiple providers for the same thing in one spec.
if len(providers) > 1:
raise MultipleProviderError(vdep, providers)
@@ -1123,13 +1246,11 @@ class Spec(object):
if required:
raise UnsatisfiableProviderSpecError(required[0], dep)
provider_index.update(dep)
-
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
changed = True
-
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
@@ -1172,7 +1293,6 @@ class Spec(object):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
-
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
@@ -1197,8 +1317,10 @@ class Spec(object):
TODO: normalize should probably implement some form of cycle detection,
to ensure that the spec is actually a DAG.
-
"""
+ if not self.name:
+ raise SpecError("Attempting to normalize anonymous spec")
+
if self._normal and not force:
return False
@@ -1243,7 +1365,7 @@ class Spec(object):
"""
for spec in self.traverse():
# Don't get a package for a virtual name.
- if not spec.virtual:
+ if (not spec.virtual) and spec.name:
spack.repo.get(spec.fullname)
# validate compiler in addition to the package name.
@@ -1264,7 +1386,7 @@ class Spec(object):
"""
other = self._autospec(other)
- if not self.name == other.name:
+ if not (self.name == other.name or (not self.name) or (not other.name) ):
raise UnsatisfiableSpecNameError(self.name, other.name)
if other.namespace is not None:
@@ -1276,7 +1398,7 @@ class Spec(object):
for v in other.variants:
if (v in self.variants and
- self.variants[v].enabled != other.variants[v].enabled):
+ self.variants[v].value != other.variants[v].value):
raise UnsatisfiableVariantSpecError(self.variants[v],
other.variants[v])
@@ -1295,6 +1417,8 @@ class Spec(object):
changed |= self.versions.intersect(other.versions)
changed |= self.variants.constrain(other.variants)
+ changed |= self.compiler_flags.constrain(other.compiler_flags)
+
old = self.architecture
self.architecture = self.architecture or other.architecture
changed |= (self.architecture != old)
@@ -1365,7 +1489,10 @@ class Spec(object):
return spec_like
try:
- return spack.spec.Spec(spec_like)
+ spec = spack.spec.Spec(spec_like)
+ if not spec.name:
+ raise SpecError("anonymous package -- this will always be handled")
+ return spec
except SpecError:
return parse_anonymous_spec(spec_like, self.name)
@@ -1385,7 +1512,7 @@ class Spec(object):
"""
other = self._autospec(other)
- # A concrete provider can satisfy a virtual dependency.
+ # A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
pkg = spack.repo.get(self.fullname)
if pkg.provides(other.name):
@@ -1396,7 +1523,7 @@ class Spec(object):
return False
# Otherwise, first thing we care about is whether the name matches
- if self.name != other.name:
+ if self.name != other.name and self.name and other.name:
return False
# namespaces either match, or other doesn't require one.
@@ -1417,7 +1544,10 @@ class Spec(object):
elif strict and (other.compiler and not self.compiler):
return False
- if not self.variants.satisfies(other.variants, strict=strict):
+ var_strict = strict
+ if (not self.name) or (not other.name):
+ var_strict = True
+ if not self.variants.satisfies(other.variants, strict=var_strict):
return False
# Architecture satisfaction is currently just string equality.
@@ -1428,9 +1558,15 @@ class Spec(object):
elif strict and (other.architecture and not self.architecture):
return False
+ if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
+ return False
+
# If we need to descend into dependencies, do it, otherwise we're done.
if deps:
- return self.satisfies_dependencies(other, strict=strict)
+ deps_strict = strict
+ if not (self.name and other.name):
+ deps_strict=True
+ return self.satisfies_dependencies(other, strict=deps_strict)
else:
return True
@@ -1510,10 +1646,12 @@ class Spec(object):
if kwargs.get('cleardeps', True):
self.dependents = DependencyMap()
self.dependencies = DependencyMap()
+ self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
self.external = other.external
self.namespace = other.namespace
+ self._hash = other._hash
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
@@ -1638,7 +1776,9 @@ class Spec(object):
self.versions,
self.variants,
self.architecture,
- self.compiler)
+ self.compiler,
+ self.compiler_flags,
+ self.dag_hash())
def eq_node(self, other):
@@ -1667,7 +1807,7 @@ class Spec(object):
return colorize_spec(self)
- def format(self, format_string='$_$@$%@$+$=', **kwargs):
+ def format(self, format_string='$_$@$%@+$+$=', **kwargs):
"""Prints out particular pieces of a spec, depending on what is
in the format string. The format strings you can provide are::
@@ -1676,8 +1816,10 @@ class Spec(object):
$@ Version with '@' prefix
$% Compiler with '%' prefix
$%@ Compiler with '%' prefix & compiler version with '@' prefix
+ $%+ Compiler with '%' prefix & compiler flags prefixed by name
+ $%@+ Compiler, compiler version, and compiler flags with same prefixes as above
$+ Options
- $= Architecture with '=' prefix
+ $= Architecture prefixed by 'arch='
$# 7-char prefix of DAG hash with '-' prefix
$$ $
@@ -1688,6 +1830,7 @@ class Spec(object):
${COMPILER} Full compiler string
${COMPILERNAME} Compiler name
${COMPILERVER} Compiler version
+ ${COMPILERFLAGS} Compiler flags
${OPTIONS} Options
${ARCHITECTURE} Architecture
${SHA1} Dependencies 8-char sha1 prefix
@@ -1734,7 +1877,8 @@ class Spec(object):
fmt += 's'
if c == '_':
- out.write(fmt % self.name)
+ name = self.name if self.name else ''
+ out.write(fmt % name)
elif c == '.':
out.write(fmt % self.fullname)
elif c == '@':
@@ -1749,7 +1893,7 @@ class Spec(object):
write(fmt % str(self.variants), c)
elif c == '=':
if self.architecture:
- write(fmt % (c + str(self.architecture)), c)
+ write(fmt % (' arch' + c + str(self.architecture)), c)
elif c == '#':
out.write('-' + fmt % (self.dag_hash(7)))
elif c == '$':
@@ -1766,11 +1910,16 @@ class Spec(object):
if (self.compiler and self.compiler.versions and
self.compiler.versions != _any_version):
write(c + str(self.compiler.versions), '%')
+ elif c == '+':
+ if self.compiler_flags:
+ write(fmt % str(self.compiler_flags), '%')
+ compiler = False
elif c == '$':
escape = True
+ compiler = False
else:
out.write(c)
- compiler = False
+ compiler = False
elif named:
if not c == '}':
@@ -1780,6 +1929,7 @@ class Spec(object):
named_str += c
continue;
if named_str == 'PACKAGE':
+ name = self.name if self.name else ''
write(fmt % self.name, '@')
if named_str == 'VERSION':
if self.versions and self.versions != _any_version:
@@ -1793,6 +1943,9 @@ class Spec(object):
elif named_str == 'COMPILERVER':
if self.compiler:
write(fmt % self.compiler.versions, '%')
+ elif named_str == 'COMPILERFLAGS':
+ if self.compiler:
+ write(fmt % str(self.compiler_flags), '%')
elif named_str == 'OPTIONS':
if self.variants:
write(fmt % str(self.variants), '+')
@@ -1837,7 +1990,7 @@ class Spec(object):
showid = kwargs.pop('ids', False)
cover = kwargs.pop('cover', 'nodes')
indent = kwargs.pop('indent', 0)
- fmt = kwargs.pop('format', '$_$@$%@$+$=')
+ fmt = kwargs.pop('format', '$_$@$%@+$+$=')
prefix = kwargs.pop('prefix', None)
check_kwargs(kwargs, self.tree)
@@ -1869,12 +2022,13 @@ class Spec(object):
#
# These are possible token types in the spec grammar.
#
-DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID = range(9)
+HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, QT, ID = range(11)
class SpecLexer(spack.parse.Lexer):
"""Parses tokens that make up spack specs."""
def __init__(self):
super(SpecLexer, self).__init__([
+ (r'/', lambda scanner, val: self.token(HASH, val)),
(r'\^', lambda scanner, val: self.token(DEP, val)),
(r'\@', lambda scanner, val: self.token(AT, val)),
(r'\:', lambda scanner, val: self.token(COLON, val)),
@@ -1886,6 +2040,7 @@ class SpecLexer(spack.parse.Lexer):
(r'\=', lambda scanner, val: self.token(EQ, val)),
# This is more liberal than identifier_re (see above).
# Checked by check_identifier() for better error messages.
+ (r'([\"\'])(?:(?=(\\?))\2.)*?\1',lambda scanner, val: self.token(QT, val)),
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
(r'\s+', lambda scanner, val: None)])
@@ -1893,27 +2048,52 @@ class SpecLexer(spack.parse.Lexer):
class SpecParser(spack.parse.Parser):
def __init__(self):
super(SpecParser, self).__init__(SpecLexer())
-
+ self.previous = None
def do_parse(self):
specs = []
try:
while self.next:
+ # TODO: clean this parsing up a bit
+ if self.previous:
+ specs.append(self.spec(self.previous.value))
if self.accept(ID):
- specs.append(self.spec())
+ self.previous = self.token
+ if self.accept(EQ):
+ if not specs:
+ specs.append(self.spec(None))
+ if self.accept(QT):
+ self.token.value = self.token.value[1:-1]
+ else:
+ self.expect(ID)
+ specs[-1]._add_flag(self.previous.value, self.token.value)
+ else:
+ specs.append(self.spec(self.previous.value))
+ self.previous = None
+ elif self.accept(HASH):
+ specs.append(self.spec_by_hash())
elif self.accept(DEP):
if not specs:
- self.last_token_error("Dependency has no package")
- self.expect(ID)
- specs[-1]._add_dependency(self.spec())
+ self.previous = self.token
+ specs.append(self.spec(None))
+ self.previous = None
+ if self.accept(HASH):
+ specs[-1]._add_dependency(self.spec_by_hash())
+ else:
+ self.expect(ID)
+ specs[-1]._add_dependency(self.spec(self.token.value))
else:
- self.unexpected_token()
+ # Attempt to construct an anonymous spec, but check that the first token is valid
+ # TODO: Is this check even necessary, or will it all be Lex errors now?
+ specs.append(self.spec(None,True))
+
except spack.parse.ParseError, e:
raise SpecParseError(e)
+
return specs
@@ -1922,15 +2102,35 @@ class SpecParser(spack.parse.Parser):
return self.compiler()
- def spec(self):
+ def spec_by_hash(self):
+ self.expect(ID)
+
+ specs = spack.installed_db.query()
+ matches = [spec for spec in specs if
+ spec.dag_hash()[:len(self.token.value)] == self.token.value]
+
+ if not matches:
+ tty.die("%s does not match any installed packages." %self.token.value)
+
+ if len(matches) != 1:
+ raise AmbiguousHashError("Multiple packages specify hash %s." % self.token.value, *matches)
+
+ return matches[0]
+
+
+ def spec(self, name, check_valid_token = False):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
-
- spec_namespace, dot, spec_name = self.token.value.rpartition('.')
- if not spec_namespace:
+ if name:
+ spec_namespace, dot, spec_name = name.rpartition('.')
+ if not spec_namespace:
+ spec_namespace = None
+ self.check_identifier(spec_name)
+ else:
spec_namespace = None
+ spec_name = None
+
- self.check_identifier(spec_name)
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
@@ -1940,9 +2140,11 @@ class SpecParser(spack.parse.Parser):
spec.architecture = None
spec.compiler = None
spec.external = None
+ spec.compiler_flags = FlagMap(spec)
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
spec.namespace = spec_namespace
+ spec._hash = None
spec._normal = False
spec._concrete = False
@@ -1951,26 +2153,50 @@ class SpecParser(spack.parse.Parser):
# unspecified or not.
added_version = False
+ if self.previous and self.previous.value == DEP:
+ if self.accept(HASH):
+ spec.add_dependency(self.spec_by_hash())
+ else:
+ self.expect(ID)
+ if self.accept(EQ):
+ raise SpecParseError(spack.parse.ParseError("","","Expected dependency received anonymous spec"))
+ spec.add_dependency(self.spec(self.token.value))
+
while self.next:
if self.accept(AT):
vlist = self.version_list()
for version in vlist:
spec._add_version(version)
added_version = True
+ check_valid_token = False
elif self.accept(ON):
spec._add_variant(self.variant(), True)
+ check_valid_token = False
elif self.accept(OFF):
- spec._add_variant(self.variant(), False)
+ spec._add_variant(self.variant(),False)
+ check_valid_token = False
elif self.accept(PCT):
spec._set_compiler(self.compiler())
+ check_valid_token = False
- elif self.accept(EQ):
- spec._set_architecture(self.architecture())
+ elif self.accept(ID):
+ self.previous = self.token
+ if self.accept(EQ):
+ if self.accept(QT):
+ self.token.value = self.token.value[1:-1]
+ else:
+ self.expect(ID)
+ spec._add_flag(self.previous.value, self.token.value)
+ self.previous = None
+ else:
+ return spec
else:
+ if check_valid_token:
+ self.unexpected_token()
break
# If there was no version in the spec, consier it an open range
@@ -1980,13 +2206,17 @@ class SpecParser(spack.parse.Parser):
return spec
- def variant(self):
- self.expect(ID)
- self.check_identifier()
- return self.token.value
-
+ def variant(self,name=None):
+ #TODO: Make generalized variants possible
+ if name:
+ return name
+ else:
+ self.expect(ID)
+ self.check_identifier()
+ return self.token.value
def architecture(self):
+ #TODO: Make this work properly as a subcase of variant (includes adding names to grammar)
self.expect(ID)
return self.token.value
@@ -2068,8 +2298,10 @@ def parse_anonymous_spec(spec_like, pkg_name):
if isinstance(spec_like, str):
try:
anon_spec = Spec(spec_like)
+ if anon_spec.name != pkg_name:
+ raise SpecParseError(spack.parse.ParseError("","","Expected anonymous spec for package %s but found spec for package %s" % (pkg_name, anon_spec.name) ))
except SpecParseError:
- anon_spec = Spec(pkg_name + spec_like)
+ anon_spec = Spec(pkg_name + ' ' + spec_like)
if anon_spec.name != pkg_name: raise ValueError(
"Invalid spec for package %s: %s" % (pkg_name, spec_like))
else:
@@ -2170,7 +2402,6 @@ class MultipleProviderError(SpecError):
self.vpkg = vpkg
self.providers = providers
-
class UnsatisfiableSpecError(SpecError):
"""Raised when a spec conflicts with package constraints.
Provide the requirement that was violated when raising."""
@@ -2209,6 +2440,11 @@ class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
super(UnsatisfiableVariantSpecError, self).__init__(
provided, required, "variant")
+class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
+ """Raised when a spec variant conflicts with package constraints."""
+ def __init__(self, provided, required):
+ super(UnsatisfiableCompilerFlagSpecError, self).__init__(
+ provided, required, "compiler_flags")
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
@@ -2235,3 +2471,13 @@ class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
+
+class SpackRecordError(spack.error.SpackError):
+ def __init__(self, msg):
+ super(SpackRecordError, self).__init__(msg)
+
+class AmbiguousHashError(SpecError):
+ def __init__(self, msg, *specs):
+ super(AmbiguousHashError, self).__init__(msg)
+ for spec in specs:
+ print ' ', spec.format('$.$@$%@+$+$=$#')
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index a630866143..ea2b164462 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -56,11 +56,16 @@ class CompilerTest(unittest.TestCase):
self.cc = Executable(join_path(spack.build_env_path, "cc"))
self.ld = Executable(join_path(spack.build_env_path, "ld"))
self.cpp = Executable(join_path(spack.build_env_path, "cpp"))
+ self.cxx = Executable(join_path(spack.build_env_path, "c++"))
+ self.fc = Executable(join_path(spack.build_env_path, "fc"))
self.realcc = "/bin/mycc"
self.prefix = "/spack-test-prefix"
os.environ['SPACK_CC'] = self.realcc
+ os.environ['SPACK_CXX'] = self.realcc
+ os.environ['SPACK_FC'] = self.realcc
+
os.environ['SPACK_PREFIX'] = self.prefix
os.environ['SPACK_ENV_PATH']="test"
os.environ['SPACK_DEBUG_LOG_DIR'] = "."
@@ -102,6 +107,15 @@ class CompilerTest(unittest.TestCase):
self.assertEqual(self.cc(*args, output=str).strip(), expected)
+ def check_cxx(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.cxx(*args, output=str).strip(), expected)
+
+ def check_fc(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.fc(*args, output=str).strip(), expected)
+
+
def check_ld(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.ld(*args, output=str).strip(), expected)
@@ -142,6 +156,64 @@ class CompilerTest(unittest.TestCase):
self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld")
+ def test_flags(self):
+ os.environ['SPACK_LDFLAGS'] = '-L foo'
+ os.environ['SPACK_LDLIBS'] = '-lfoo'
+ os.environ['SPACK_CPPFLAGS'] = '-g -O1'
+ os.environ['SPACK_CFLAGS'] = '-Wall'
+ os.environ['SPACK_CXXFLAGS'] = '-Werror'
+ os.environ['SPACK_FFLAGS'] = '-w'
+
+ # Test ldflags added properly in ld mode
+ self.check_ld('dump-args', test_command,
+ "ld " +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ # Test cppflags added properly in cpp mode
+ self.check_cpp('dump-args', test_command,
+ "cpp " +
+ '-g -O1 ' +
+ ' '.join(test_command))
+
+ # Test ldflags, cppflags, and language specific flags are added in proper order
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-g -O1 ' +
+ '-Wall ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ self.check_cxx('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-g -O1 ' +
+ '-Werror ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ self.check_fc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-w ' +
+ '-g -O1 ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ os.environ['SPACK_LDFLAGS']=''
+ os.environ['SPACK_LDLIBS']=''
+
+
def test_dep_rpath(self):
"""Ensure RPATHs for root package are added."""
self.check_cc('dump-args', test_command,
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 799fdae3a9..963481054e 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -38,11 +38,20 @@ class ConcretizeTest(MockPackagesTest):
for name in abstract.variants:
avariant = abstract.variants[name]
cvariant = concrete.variants[name]
- self.assertEqual(avariant.enabled, cvariant.enabled)
+ self.assertEqual(avariant.value, cvariant.value)
+
+ if abstract.compiler_flags:
+ for flag in abstract.compiler_flags:
+ aflag = abstract.compiler_flags[flag]
+ cflag = concrete.compiler_flags[flag]
+ self.assertTrue(set(aflag) <= set(cflag))
for name in abstract.package.variants:
self.assertTrue(name in concrete.variants)
+ for flag in concrete.compiler_flags.valid_compiler_flags():
+ self.assertTrue(flag in concrete.compiler_flags)
+
if abstract.compiler and abstract.compiler.concrete:
self.assertEqual(abstract.compiler, concrete.compiler)
@@ -75,9 +84,14 @@ class ConcretizeTest(MockPackagesTest):
def test_concretize_variant(self):
self.check_concretize('mpich+debug')
self.check_concretize('mpich~debug')
+ self.check_concretize('mpich debug=2')
self.check_concretize('mpich')
+ def test_conretize_compiler_flags(self):
+ self.check_concretize('mpich cppflags="-O3"')
+
+
def test_concretize_preferred_version(self):
spec = self.check_concretize('python')
self.assertEqual(spec.versions, ver('2.7.11'))
@@ -231,7 +245,7 @@ class ConcretizeTest(MockPackagesTest):
def test_external_package(self):
- spec = Spec('externaltool')
+ spec = Spec('externaltool%gcc')
spec.concretize()
self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index 56e294de26..c73badf8f2 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -73,7 +73,7 @@ configuration_alter_environment = {
'all': {
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
},
- '=x86-linux': {
+ 'arch=x86-linux': {
'environment': {'set': {'FOO': 'foo'},
'unset': ['BAR']}
}
@@ -123,26 +123,26 @@ class TclTests(MockPackagesTest):
def test_simple_case(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpich@3.0.4=x86-linux')
+ spec = spack.spec.Spec('mpich@3.0.4 arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
def test_autoload(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpileaks=x86-linux')
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
spack.modules.CONFIGURATION = configuration_autoload_all
- spec = spack.spec.Spec('mpileaks=x86-linux')
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
def test_alter_environment(self):
spack.modules.CONFIGURATION = configuration_alter_environment
- spec = spack.spec.Spec('mpileaks=x86-linux')
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x
@@ -152,7 +152,7 @@ class TclTests(MockPackagesTest):
len([x for x in content if 'setenv FOO "foo"' in x]), 1)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
- spec = spack.spec.Spec('libdwarf=x64-linux')
+ spec = spack.spec.Spec('libdwarf arch=x64-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x
@@ -164,14 +164,14 @@ class TclTests(MockPackagesTest):
def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist
- spec = spack.spec.Spec('mpileaks=x86-linux')
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
def test_conflicts(self):
spack.modules.CONFIGURATION = configuration_conflicts
- spec = spack.spec.Spec('mpileaks=x86-linux')
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if x.startswith('conflict')]), 2)
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index f653ca3477..a33656adcc 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -25,9 +25,13 @@
"""
Test for multi_method dispatch.
"""
+import unittest
import spack
from spack.multimethod import *
+from spack.version import *
+from spack.spec import Spec
+from spack.multimethod import when
from spack.test.mock_packages_test import *
from spack.version import *
@@ -89,19 +93,19 @@ class MultiMethodTest(MockPackagesTest):
def test_architecture_match(self):
- pkg = spack.repo.get('multimethod=x86_64')
+ pkg = spack.repo.get('multimethod arch=x86_64')
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
- pkg = spack.repo.get('multimethod=ppc64')
+ pkg = spack.repo.get('multimethod arch=ppc64')
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
- pkg = spack.repo.get('multimethod=ppc32')
+ pkg = spack.repo.get('multimethod arch=ppc32')
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
- pkg = spack.repo.get('multimethod=arm64')
+ pkg = spack.repo.get('multimethod arch=arm64')
self.assertEqual(pkg.different_by_architecture(), 'arm64')
- pkg = spack.repo.get('multimethod=macos')
+ pkg = spack.repo.get('multimethod arch=macos')
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py
index 90382dfc4a..b5ba0ecf35 100644
--- a/lib/spack/spack/test/optional_deps.py
+++ b/lib/spack/spack/test/optional_deps.py
@@ -42,6 +42,13 @@ class ConcretizeTest(MockPackagesTest):
self.check_normalize('optional-dep-test+a',
Spec('optional-dep-test+a', Spec('a')))
+ self.check_normalize('optional-dep-test a=true',
+ Spec('optional-dep-test a=true', Spec('a')))
+
+
+ self.check_normalize('optional-dep-test a=true',
+ Spec('optional-dep-test+a', Spec('a')))
+
self.check_normalize('optional-dep-test@1.1',
Spec('optional-dep-test@1.1', Spec('b')))
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 4645f98565..52f4f7395e 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -31,6 +31,8 @@ You can find the dummy packages here::
import spack
import spack.package
+from llnl.util.lang import list_modules
+
from spack.spec import Spec
from spack.test.mock_packages_test import *
@@ -239,8 +241,8 @@ class SpecDagTest(MockPackagesTest):
def test_unsatisfiable_architecture(self):
- self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
- spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
+ self.set_pkg_dep('mpileaks', 'mpich arch=bgqos_0')
+ spec = Spec('mpileaks ^mpich arch=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 60eb86d652..0cb78b90ed 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -22,6 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import unittest
from spack.spec import *
from spack.test.mock_packages_test import *
@@ -138,11 +139,11 @@ class SpecSematicsTest(MockPackagesTest):
def test_satisfies_architecture(self):
- self.check_satisfies('foo=chaos_5_x86_64_ib', '=chaos_5_x86_64_ib')
- self.check_satisfies('foo=bgqos_0', '=bgqos_0')
+ self.check_satisfies('foo arch=chaos_5_x86_64_ib', ' arch=chaos_5_x86_64_ib')
+ self.check_satisfies('foo arch=bgqos_0', ' arch=bgqos_0')
- self.check_unsatisfiable('foo=bgqos_0', '=chaos_5_x86_64_ib')
- self.check_unsatisfiable('foo=chaos_5_x86_64_ib', '=bgqos_0')
+ self.check_unsatisfiable('foo arch=bgqos_0', ' arch=chaos_5_x86_64_ib')
+ self.check_unsatisfiable('foo arch=chaos_5_x86_64_ib', ' arch=bgqos_0')
def test_satisfies_dependencies(self):
@@ -190,12 +191,20 @@ class SpecSematicsTest(MockPackagesTest):
def test_satisfies_matching_variant(self):
self.check_satisfies('mpich+foo', 'mpich+foo')
self.check_satisfies('mpich~foo', 'mpich~foo')
+ self.check_satisfies('mpich foo=1', 'mpich foo=1')
+
+ #confirm that synonymous syntax works correctly
+ self.check_satisfies('mpich+foo', 'mpich foo=True')
+ self.check_satisfies('mpich foo=true', 'mpich+foo')
+ self.check_satisfies('mpich~foo', 'mpich foo=FALSE')
+ self.check_satisfies('mpich foo=False', 'mpich~foo')
def test_satisfies_unconstrained_variant(self):
# only asked for mpich, no constraints. Either will do.
self.check_satisfies('mpich+foo', 'mpich')
self.check_satisfies('mpich~foo', 'mpich')
+ self.check_satisfies('mpich foo=1', 'mpich')
def test_unsatisfiable_variants(self):
@@ -204,16 +213,44 @@ class SpecSematicsTest(MockPackagesTest):
# 'mpich' is not concrete:
self.check_satisfies('mpich', 'mpich+foo', False)
self.check_satisfies('mpich', 'mpich~foo', False)
+ self.check_satisfies('mpich', 'mpich foo=1', False)
# 'mpich' is concrete:
self.check_unsatisfiable('mpich', 'mpich+foo', True)
self.check_unsatisfiable('mpich', 'mpich~foo', True)
+ self.check_unsatisfiable('mpich', 'mpich foo=1', True)
def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs
self.check_unsatisfiable('mpich~foo', 'mpich+foo')
self.check_unsatisfiable('mpich+foo', 'mpich~foo')
+ self.check_unsatisfiable('mpich foo=1', 'mpich foo=2')
+
+
+ def test_satisfies_matching_compiler_flag(self):
+ self.check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"')
+ self.check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"')
+
+
+ def test_satisfies_unconstrained_compiler_flag(self):
+ # only asked for mpich, no constraints. Any will do.
+ self.check_satisfies('mpich cppflags="-O3"', 'mpich')
+
+
+ def test_unsatisfiable_compiler_flag(self):
+ # This case is different depending on whether the specs are concrete.
+
+ # 'mpich' is not concrete:
+ self.check_satisfies('mpich', 'mpich cppflags="-O3"', False)
+
+ # 'mpich' is concrete:
+ self.check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
+
+
+ def test_unsatisfiable_compiler_flag_mismatch(self):
+ # No matchi in specs
+ self.check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"')
def test_satisfies_virtual(self):
@@ -301,18 +338,26 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
+ self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1')
+ self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1')
+
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
+ def test_constrain_compiler_flags(self):
+ self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cppflags="-Wall"')
+ self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"')
+
+
def test_constrain_arch(self):
- self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
- self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
+ self.check_constrain('libelf arch=bgqos_0', 'libelf arch=bgqos_0', 'libelf arch=bgqos_0')
+ self.check_constrain('libelf arch=bgqos_0', 'libelf', 'libelf arch=bgqos_0')
def test_constrain_compiler(self):
- self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
- self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
+ self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
+ self.check_constrain('libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7')
def test_invalid_constraint(self):
@@ -321,8 +366,11 @@ class SpecSematicsTest(MockPackagesTest):
self.check_invalid_constraint('libelf+debug', 'libelf~debug')
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
+ self.check_invalid_constraint('libelf debug=2', 'libelf debug=1')
- self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
+ self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
+
+ self.check_invalid_constraint('libelf arch=bgqos_0', 'libelf arch=x86_54')
def test_constrain_changed(self):
@@ -332,7 +380,9 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf%gcc', '%gcc@4.5')
self.check_constrain_changed('libelf', '+debug')
self.check_constrain_changed('libelf', '~debug')
- self.check_constrain_changed('libelf', '=bgqos_0')
+ self.check_constrain_changed('libelf', 'debug=2')
+ self.check_constrain_changed('libelf', 'cppflags="-O3"')
+ self.check_constrain_changed('libelf', ' arch=bgqos_0')
def test_constrain_not_changed(self):
@@ -343,7 +393,9 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
self.check_constrain_not_changed('libelf+debug', '+debug')
self.check_constrain_not_changed('libelf~debug', '~debug')
- self.check_constrain_not_changed('libelf=bgqos_0', '=bgqos_0')
+ self.check_constrain_not_changed('libelf debug=2', 'debug=2')
+ self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
+ self.check_constrain_not_changed('libelf arch=bgqos_0', ' arch=bgqos_0')
self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
@@ -355,7 +407,8 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
- self.check_constrain_changed('libelf^foo', 'libelf^foo=bgqos_0')
+ self.check_constrain_changed('libelf^foo', 'libelf^foo cppflags="-O3"')
+ self.check_constrain_changed('libelf^foo', 'libelf^foo arch=bgqos_0')
def test_constrain_dependency_not_changed(self):
@@ -365,4 +418,6 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
- self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0')
+ self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
+ self.check_constrain_not_changed('libelf^foo arch=bgqos_0', 'libelf^foo arch=bgqos_0')
+
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 928d111ea9..c4e4c9cdfe 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -104,6 +104,8 @@ class SpecSyntaxTest(unittest.TestCase):
def test_full_specs(self):
self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e")
+ self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4^stackwalker@8.1_1e")
+ self.check_parse('mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4^stackwalker@8.1_1e')
def test_canonicalize(self):
self.check_parse(
@@ -128,7 +130,10 @@ class SpecSyntaxTest(unittest.TestCase):
def test_duplicate_variant(self):
self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug")
- self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug+debug")
+ self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug debug=true")
+ self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false debug=true")
+ self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false~debug")
+
def test_duplicate_depdendence(self):
self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index d2ccfde69b..38b778fa00 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -22,10 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-__all__ = ['Executable', 'which', 'ProcessError']
import os
-import sys
import re
import subprocess
import inspect
@@ -34,9 +32,12 @@ import llnl.util.tty as tty
import spack
import spack.error
+__all__ = ['Executable', 'which', 'ProcessError']
+
class Executable(object):
"""Class representing a program that can be run on the command line."""
+
def __init__(self, name):
self.exe = name.split(' ')
self.returncode = None
@@ -44,16 +45,13 @@ class Executable(object):
if not self.exe:
raise ProcessError("Cannot construct executable for '%s'" % name)
-
def add_default_arg(self, arg):
self.exe.append(arg)
-
@property
def command(self):
return ' '.join(self.exe)
-
def __call__(self, *args, **kwargs):
"""Run this executable in a subprocess.
@@ -105,6 +103,8 @@ class Executable(object):
fail_on_error = kwargs.pop("fail_on_error", True)
ignore_errors = kwargs.pop("ignore_errors", ())
+ env = kwargs.get('env', None)
+
# TODO: This is deprecated. Remove in a future version.
return_output = kwargs.pop("return_output", False)
@@ -114,8 +114,8 @@ class Executable(object):
else:
output = kwargs.pop("output", None)
- error = kwargs.pop("error", None)
- input = kwargs.pop("input", None)
+ error = kwargs.pop("error", None)
+ input = kwargs.pop("input", None)
if input is str:
raise ValueError("Cannot use `str` as input stream.")
@@ -126,85 +126,90 @@ class Executable(object):
return subprocess.PIPE, False
else:
return arg, False
+
ostream, close_ostream = streamify(output, 'w')
- estream, close_estream = streamify(error, 'w')
- istream, close_istream = streamify(input, 'r')
+ estream, close_estream = streamify(error, 'w')
+ istream, close_istream = streamify(input, 'r')
# if they just want to ignore one error code, make it a tuple.
if isinstance(ignore_errors, int):
- ignore_errors = (ignore_errors,)
+ ignore_errors = (ignore_errors, )
quoted_args = [arg for arg in args if re.search(r'^"|^\'|"$|\'$', arg)]
if quoted_args:
- tty.warn("Quotes in command arguments can confuse scripts like configure.",
- "The following arguments may cause problems when executed:",
- str("\n".join([" "+arg for arg in quoted_args])),
- "Quotes aren't needed because spack doesn't use a shell.",
- "Consider removing them")
+ tty.warn(
+ "Quotes in command arguments can confuse scripts like"
+ " configure.",
+ "The following arguments may cause problems when executed:",
+ str("\n".join([" " + arg for arg in quoted_args])),
+ "Quotes aren't needed because spack doesn't use a shell.",
+ "Consider removing them")
cmd = self.exe + list(args)
- cmd_line = "'%s'" % "' '".join(map(lambda arg: arg.replace("'", "'\"'\"'"), cmd))
+ cmd_line = "'%s'" % "' '".join(
+ map(lambda arg: arg.replace("'", "'\"'\"'"), cmd))
tty.debug(cmd_line)
try:
proc = subprocess.Popen(
- cmd, stdin=istream, stderr=estream, stdout=ostream)
+ cmd,
+ stdin=istream,
+ stderr=estream,
+ stdout=ostream,
+ env=env)
out, err = proc.communicate()
rc = self.returncode = proc.returncode
if fail_on_error and rc != 0 and (rc not in ignore_errors):
- raise ProcessError("Command exited with status %d:"
- % proc.returncode, cmd_line)
+ raise ProcessError("Command exited with status %d:" %
+ proc.returncode, cmd_line)
if output is str or error is str:
result = ''
- if output is str: result += out
- if error is str: result += err
+ if output is str:
+ result += out
+ if error is str:
+ result += err
return result
except OSError, e:
raise ProcessError(
- "%s: %s" % (self.exe[0], e.strerror),
- "Command: " + cmd_line)
+ "%s: %s" % (self.exe[0], e.strerror), "Command: " + cmd_line)
except subprocess.CalledProcessError, e:
if fail_on_error:
raise ProcessError(
- str(e),
- "\nExit status %d when invoking command: %s"
- % (proc.returncode, cmd_line))
+ str(e), "\nExit status %d when invoking command: %s" %
+ (proc.returncode, cmd_line))
finally:
- if close_ostream: output.close()
- if close_estream: error.close()
- if close_istream: input.close()
-
+ if close_ostream:
+ output.close()
+ if close_estream:
+ error.close()
+ if close_istream:
+ input.close()
def __eq__(self, other):
return self.exe == other.exe
-
def __neq__(self, other):
return not (self == other)
-
def __hash__(self):
- return hash((type(self),) + tuple(self.exe))
-
+ return hash((type(self), ) + tuple(self.exe))
def __repr__(self):
return "<exe: %s>" % self.exe
-
def __str__(self):
return ' '.join(self.exe)
-
def which(name, **kwargs):
"""Finds an executable in the path like command-line which."""
- path = kwargs.get('path', os.environ.get('PATH', '').split(os.pathsep))
+ path = kwargs.get('path', os.environ.get('PATH', '').split(os.pathsep))
required = kwargs.get('required', False)
if not path:
@@ -233,14 +238,16 @@ class ProcessError(spack.error.SpackError):
@property
def long_message(self):
msg = self._long_message
- if msg: msg += "\n\n"
+ if msg:
+ msg += "\n\n"
if self.build_log:
msg += "See build log for details:\n"
msg += " %s" % self.build_log
if self.package_context:
- if msg: msg += "\n\n"
+ if msg:
+ msg += "\n\n"
msg += '\n'.join(self.package_context)
return msg
@@ -267,7 +274,7 @@ def _get_package_context():
frame = f[0]
# Find a frame with 'self' in the local variables.
- if not 'self' in frame.f_locals:
+ if 'self' not in frame.f_locals:
continue
# Look only at a frame in a subclass of spack.Package
@@ -280,9 +287,8 @@ def _get_package_context():
# Build a message showing where in install we failed.
lines.append("%s:%d, in %s:" % (
- inspect.getfile(frame.f_code),
- frame.f_lineno,
- frame.f_code.co_name))
+ inspect.getfile(frame.f_code), frame.f_lineno, frame.f_code.co_name
+ ))
sourcelines, start = inspect.getsourcelines(frame)
for i, line in enumerate(sourcelines):
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 20686d44b2..ad875f5ef5 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -32,5 +32,5 @@ currently variants are just flags.
class Variant(object):
"""Represents a variant on a build. Can be either on or off."""
def __init__(self, default, description):
- self.default = bool(default)
+ self.default = default
self.description = str(description)
diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py
index 91ad77c8fd..bb8333f023 100644
--- a/lib/spack/spack/virtual.py
+++ b/lib/spack/spack/virtual.py
@@ -67,10 +67,15 @@ class ProviderIndex(object):
if type(spec) != spack.spec.Spec:
spec = spack.spec.Spec(spec)
+ if not spec.name:
+ # Empty specs do not have a package
+ return
+
assert(not spec.virtual)
pkg = spec.package
for provided_spec, provider_spec in pkg.provided.iteritems():
+ provider_spec.compiler_flags = spec.compiler_flags.copy()#We want satisfaction other than flags
if provider_spec.satisfies(spec, deps=False):
provided_name = provided_spec.name
diff --git a/share/spack/qa/run-flake8 b/share/spack/qa/run-flake8
index 722c7fcba6..44eb0167fb 100755
--- a/share/spack/qa/run-flake8
+++ b/share/spack/qa/run-flake8
@@ -20,10 +20,18 @@ fi
# Check if changed files are flake8 conformant [framework]
changed=$(git diff --name-only develop... | grep '.py$')
-# Exempt url lines in changed packages from overlong line errors.
+# Add approved style exemptions to the changed packages.
for file in $changed; do
if [[ $file = *package.py ]]; then
- perl -i~ -pe 's/^(\s*url\s*=.*)$/\1 # NOQA: ignore=E501/' $file;
+ cp "$file" "$file~"
+
+ # Exempt lines with urls and descriptions from overlong line errors.
+ perl -i -pe 's/^(\s*url\s*=.*)$/\1 # NOQA: ignore=E501/' $file
+ perl -i -pe 's/^(\s*version\(.*\).*)$/\1 # NOQA: ignore=E501/' $file
+ perl -i -pe 's/^(\s*variant\(.*\).*)$/\1 # NOQA: ignore=E501/' $file
+
+ # Exempt '@when' decorated functions from redefinition errors.
+ perl -i -pe 's/^(\s*\@when\(.*\).*)$/\1 # NOQA: ignore=F811/' $file
fi
done
diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py
index 2d15722470..def73ad82e 100644
--- a/var/spack/repos/builtin.mock/packages/multimethod/package.py
+++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py
@@ -103,19 +103,19 @@ class Multimethod(Package):
#
# Make sure we can switch methods on different architectures
#
- @when('=x86_64')
+ @when('arch=x86_64')
def different_by_architecture(self):
return 'x86_64'
- @when('=ppc64')
+ @when('arch=ppc64')
def different_by_architecture(self):
return 'ppc64'
- @when('=ppc32')
+ @when('arch=ppc32')
def different_by_architecture(self):
return 'ppc32'
- @when('=arm64')
+ @when('arch=arm64')
def different_by_architecture(self):
return 'arm64'
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
index b1b9c58b32..c3d09d29ac 100644
--- a/var/spack/repos/builtin/packages/boost/package.py
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -127,7 +127,7 @@ class Boost(Package):
dots, underscores)
def determine_toolset(self, spec):
- if spec.satisfies("=darwin-x86_64"):
+ if spec.satisfies("arch=darwin-x86_64"):
return 'darwin'
toolsets = {'g++': 'gcc',
diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py
index 994a38ca87..b2e9582b5a 100644
--- a/var/spack/repos/builtin/packages/doxygen/package.py
+++ b/var/spack/repos/builtin/packages/doxygen/package.py
@@ -22,30 +22,31 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-#------------------------------------------------------------------------------
-# Author: Justin Too <justin@doubleotoo.com>
-# Date: September 11, 2015
-#------------------------------------------------------------------------------
-
from spack import *
-import sys
+
class Doxygen(Package):
"""Doxygen is the de facto standard tool for generating documentation
from annotated C++ sources, but it also supports other popular programming
languages such as C, Objective-C, C#, PHP, Java, Python, IDL (Corba,
- Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some extent D..
- """
+ Microsoft, and UNO/OpenOffice flavors), Fortran, VHDL, Tcl, and to some
+ extent D.."""
+
homepage = "http://www.stack.nl/~dimitri/doxygen/"
url = "http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.10.src.tar.gz"
+ version('1.8.11', 'f4697a444feaed739cfa2f0644abc19b')
version('1.8.10', '79767ccd986f12a0f949015efb5f058f')
+ # graphviz appears to be a run-time optional dependency
+ variant('graphviz', default=True, description='Build with dot command support from Graphviz.') # NOQA: ignore=E501
+
depends_on("cmake@2.8.12:")
- # flex does not build on OSX, but it's provided there anyway
- depends_on("flex", sys.platform != 'darwin')
- depends_on("bison", sys.platform != 'darwin')
+ depends_on("flex")
+ depends_on("bison")
+ # optional dependencies
+ depends_on("graphviz", when="+graphviz")
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py
index 926651010f..b778538606 100644
--- a/var/spack/repos/builtin/packages/flex/package.py
+++ b/var/spack/repos/builtin/packages/flex/package.py
@@ -24,15 +24,18 @@
##############################################################################
from spack import *
+
class Flex(Package):
"""Flex is a tool for generating scanners."""
homepage = "http://flex.sourceforge.net/"
- url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz"
+ url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz"
version('2.6.0', '5724bcffed4ebe39e9b55a9be80859ec')
version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
+ depends_on("bison")
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py
index 47dbeb2a99..224105ea0f 100644
--- a/var/spack/repos/builtin/packages/gcc/package.py
+++ b/var/spack/repos/builtin/packages/gcc/package.py
@@ -1,33 +1,9 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
from spack import *
from contextlib import closing
from glob import glob
import sys
-import os
+
class Gcc(Package):
"""The GNU Compiler Collection includes front ends for C, C++,
@@ -50,10 +26,12 @@ class Gcc(Package):
version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4')
version('4.5.4', '27e459c2566b8209ab064570e1b378f7')
- variant('binutils', default=sys.platform != 'darwin',
- description="Build via binutils")
- variant('gold', default=sys.platform != 'darwin',
- description="Build the gold linker plugin for ld-based LTO")
+ variant('binutils',
+ default=sys.platform != 'darwin',
+ description="Build via binutils")
+ variant('gold',
+ default=sys.platform != 'darwin',
+ description="Build the gold linker plugin for ld-based LTO")
depends_on("mpfr")
depends_on("gmp")
@@ -63,16 +41,18 @@ class Gcc(Package):
depends_on("binutils~libiberty+gold", when='+binutils +gold')
# TODO: integrate these libraries.
- #depends_on("ppl")
- #depends_on("cloog")
+ # depends_on("ppl")
+ # depends_on("cloog")
if sys.platform == 'darwin':
patch('darwin/gcc-4.9.patch1', when='@4.9.3')
patch('darwin/gcc-4.9.patch2', when='@4.9.3')
+ else:
+ provides('golang', when='@4.7.1:')
def install(self, spec, prefix):
# libjava/configure needs a minor fix to install into spack paths.
filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure',
- string=True)
+ string=True)
enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc'))
@@ -80,62 +60,59 @@ class Gcc(Package):
enabled_languages.add('go')
# Generic options to compile GCC
- options = ["--prefix=%s" % prefix,
- "--libdir=%s/lib64" % prefix,
+ options = ["--prefix=%s" % prefix, "--libdir=%s/lib64" % prefix,
"--disable-multilib",
"--enable-languages=" + ','.join(enabled_languages),
- "--with-mpc=%s" % spec['mpc'].prefix,
- "--with-mpfr=%s" % spec['mpfr'].prefix,
- "--with-gmp=%s" % spec['gmp'].prefix,
- "--enable-lto",
- "--with-quad"]
+ "--with-mpc=%s" % spec['mpc'].prefix, "--with-mpfr=%s" %
+ spec['mpfr'].prefix, "--with-gmp=%s" % spec['gmp'].prefix,
+ "--enable-lto", "--with-quad"]
# Binutils
if spec.satisfies('+binutils'):
static_bootstrap_flags = "-static-libstdc++ -static-libgcc"
- binutils_options = ["--with-sysroot=/",
- "--with-stage1-ldflags=%s %s" %
- (self.rpath_args, static_bootstrap_flags),
- "--with-boot-ldflags=%s %s" %
- (self.rpath_args, static_bootstrap_flags),
- "--with-gnu-ld",
- "--with-ld=%s/bin/ld" % spec['binutils'].prefix,
- "--with-gnu-as",
- "--with-as=%s/bin/as" % spec['binutils'].prefix]
+ binutils_options = [
+ "--with-sysroot=/", "--with-stage1-ldflags=%s %s" %
+ (self.rpath_args, static_bootstrap_flags),
+ "--with-boot-ldflags=%s %s" %
+ (self.rpath_args, static_bootstrap_flags), "--with-gnu-ld",
+ "--with-ld=%s/bin/ld" % spec['binutils'].prefix,
+ "--with-gnu-as",
+ "--with-as=%s/bin/as" % spec['binutils'].prefix
+ ]
options.extend(binutils_options)
# Isl
if 'isl' in spec:
isl_options = ["--with-isl=%s" % spec['isl'].prefix]
options.extend(isl_options)
- if sys.platform == 'darwin' :
- darwin_options = [ "--with-build-config=bootstrap-debug" ]
+ if sys.platform == 'darwin':
+ darwin_options = ["--with-build-config=bootstrap-debug"]
options.extend(darwin_options)
build_dir = join_path(self.stage.path, 'spack-build')
- configure = Executable( join_path(self.stage.source_path, 'configure') )
+ configure = Executable(join_path(self.stage.source_path, 'configure'))
with working_dir(build_dir, create=True):
# Rest of install is straightforward.
configure(*options)
- if sys.platform == 'darwin' : make("bootstrap")
- else: make()
+ if sys.platform == 'darwin':
+ make("bootstrap")
+ else:
+ make()
make("install")
self.write_rpath_specs()
-
@property
def spec_dir(self):
# e.g. lib64/gcc/x86_64-unknown-linux-gnu/4.9.2
spec_dir = glob("%s/lib64/gcc/*/*" % self.prefix)
return spec_dir[0] if spec_dir else None
-
def write_rpath_specs(self):
"""Generate a spec file so the linker adds a rpath to the libs
the compiler used to build the executable."""
if not self.spec_dir:
tty.warn("Could not install specs for %s." %
- self.spec.format('$_$@'))
+ self.spec.format('$_$@'))
return
gcc = Executable(join_path(self.prefix.bin, 'gcc'))
@@ -146,5 +123,5 @@ class Gcc(Package):
out.write(line + "\n")
if line.startswith("*link:"):
out.write("-rpath %s/lib:%s/lib64 \\\n" %
- (self.prefix, self.prefix))
+ (self.prefix, self.prefix))
set_install_permissions(specs_file)
diff --git a/var/spack/repos/builtin/packages/ghostscript/package.py b/var/spack/repos/builtin/packages/ghostscript/package.py
index ba787f858f..c22b90088e 100644
--- a/var/spack/repos/builtin/packages/ghostscript/package.py
+++ b/var/spack/repos/builtin/packages/ghostscript/package.py
@@ -24,18 +24,18 @@
##############################################################################
from spack import *
+
class Ghostscript(Package):
"""an interpreter for the PostScript language and for PDF. """
homepage = "http://ghostscript.com/"
- url = "http://downloads.ghostscript.com/public/ghostscript-9.16.tar.gz"
+ url = "http://downloads.ghostscript.com/public/old-gs-releases/ghostscript-9.18.tar.gz"
- version('9.16', '829319325bbdb83f5c81379a8f86f38f')
+ version('9.18', '33a47567d7a591c00a253caddd12a88a')
parallel = False
def install(self, spec, prefix):
- configure("--prefix=%s" %prefix, "--enable-shared")
+ configure("--prefix=%s" % prefix, "--enable-shared")
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/go-bootstrap/package.py b/var/spack/repos/builtin/packages/go-bootstrap/package.py
new file mode 100644
index 0000000000..b0e2109fd3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/go-bootstrap/package.py
@@ -0,0 +1,51 @@
+import os
+import shutil
+import glob
+from spack import *
+
+# THIS PACKAGE SHOULD NOT EXIST
+# it exists to make up for the inability to:
+# * use an external go compiler
+# * have go depend on itself
+# * have a sensible way to find gccgo without a dep on gcc
+
+
+class GoBootstrap(Package):
+ """Old C-bootstrapped go to bootstrap real go"""
+ homepage = "https://golang.org"
+ url = "https://go.googlesource.com/go"
+
+ extendable = True
+
+ # temporary fix until tags are pulled correctly
+ version('1.4.2', git='https://go.googlesource.com/go', tag='go1.4.2')
+
+ variant('test',
+ default=True,
+ description="Run tests as part of build, a good idea but quite"
+ " time consuming")
+
+ provides('golang@:1.4.2')
+
+ depends_on('git')
+
+ def install(self, spec, prefix):
+ bash = which('bash')
+ with working_dir('src'):
+ if '+test' in spec:
+ bash('all.bash')
+ else:
+ bash('make.bash')
+
+ try:
+ os.makedirs(prefix)
+ except OSError:
+ pass
+ for f in glob.glob('*'):
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(prefix, f))
+ else:
+ shutil.copy2(f, os.path.join(prefix, f))
+
+ def setup_environment(self, spack_env, run_env):
+ spack_env.set('GOROOT_FINAL', self.spec.prefix)
diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py
new file mode 100644
index 0000000000..13b83517d1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/go/package.py
@@ -0,0 +1,80 @@
+import os
+import shutil
+import glob
+import llnl.util.tty as tty
+from spack import *
+
+
+class Go(Package):
+ """The golang compiler and build environment"""
+ homepage = "https://golang.org"
+ url = "https://go.googlesource.com/go"
+
+ extendable = True
+
+ version('1.5.4', git='https://go.googlesource.com/go', tag='go1.5.4')
+ version('1.6.2', git='https://go.googlesource.com/go', tag='go1.6.2')
+
+ variant('test',
+ default=True,
+ description="Run tests as part of build, a good idea but quite"
+ " time consuming")
+
+ provides('golang')
+
+ # to-do, make non-c self-hosting compilers feasible without backflips
+ # should be a dep on external go compiler
+ depends_on('go-bootstrap')
+ depends_on('git')
+
+ def install(self, spec, prefix):
+ bash = which('bash')
+ with working_dir('src'):
+ if '+test' in spec:
+ bash('all.bash')
+ else:
+ bash('make.bash')
+
+ try:
+ os.makedirs(prefix)
+ except OSError:
+ pass
+ for f in glob.glob('*'):
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(prefix, f))
+ else:
+ shutil.copy2(f, os.path.join(prefix, f))
+
+ def setup_environment(self, spack_env, run_env):
+ spack_env.set('GOROOT_FINAL', self.spec.prefix)
+ spack_env.set('GOROOT_BOOTSTRAP', self.spec['go-bootstrap'].prefix)
+
+ def setup_dependent_package(self, module, ext_spec):
+ """Called before go modules' install() methods.
+
+ In most cases, extensions will only need to set GOPATH and use go::
+
+ env = os.environ
+ env['GOPATH'] = self.source_path + ':' + env['GOPATH']
+ go('get', '<package>', env=env)
+ shutil.copytree('bin', os.path.join(prefix, '/bin'))
+ """
+ # Add a go command/compiler for extensions
+ module.go = Executable(join_path(self.spec.prefix.bin, 'go'))
+
+ def setup_dependent_environment(self, spack_env, run_env, ext_spec):
+ if os.environ.get('GOROOT', False):
+ tty.warn('GOROOT is set, this is not recommended')
+
+ path_components = []
+ # Set GOPATH to include paths of dependencies
+ for d in ext_spec.traverse():
+ if d.package.extends(self.spec):
+ path_components.append(d.prefix)
+
+ # This *MUST* be first, this is where new code is installed
+ spack_env.set('GOPATH', ':'.join(path_components))
+
+ # Allow packages to find this when using module or dotkit
+ run_env.prepend_path('GOPATH', ':'.join(
+ [ext_spec.prefix] + path_components))
diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py
index 203e7b7f3c..2f99015ba2 100644
--- a/var/spack/repos/builtin/packages/graphviz/package.py
+++ b/var/spack/repos/builtin/packages/graphviz/package.py
@@ -23,6 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import sys
+
class Graphviz(Package):
"""Graph Visualization Software"""
@@ -35,19 +37,27 @@ class Graphviz(Package):
# related to missing Perl packages. If spack begins support for Perl in the
# future, this package can be updated to depend_on('perl') and the
# ncecessary devel packages.
- variant('perl', default=False, description='Enable if you need the optional Perl language bindings.')
+ variant('perl', default=False, description='Enable if you need the optional Perl language bindings.') # NOQA: ignore=E501
parallel = False
depends_on("swig")
depends_on("python")
depends_on("ghostscript")
+ depends_on("pkg-config")
def install(self, spec, prefix):
options = ['--prefix=%s' % prefix]
- if not '+perl' in spec:
+ if '+perl' not in spec:
options.append('--disable-perl')
+ # On OSX fix the compiler error:
+ # In file included from tkStubLib.c:15:
+ # /usr/include/tk.h:78:11: fatal error: 'X11/Xlib.h' file not found
+ # include <X11/Xlib.h>
+ if sys.platform == 'darwin':
+ options.append('CFLAGS=-I/opt/X11/include')
+
configure(*options)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/hub/package.py b/var/spack/repos/builtin/packages/hub/package.py
new file mode 100644
index 0000000000..ed8b742e42
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hub/package.py
@@ -0,0 +1,24 @@
+from spack import *
+import os
+
+
+class Hub(Package):
+ """The github git wrapper"""
+ homepage = "https://github.com/github/hub"
+ url = "https://github.com/github/hub/archive/v2.2.3.tar.gz"
+
+ version('head', git='https://github.com/github/hub')
+ version('2.2.3', '6675992ddd16d186eac7ba4484d57f5b')
+ version('2.2.2', '7edc8f5b5d3c7c392ee191dd999596fc')
+ version('2.2.1', '889a31ee9d10ae9cb333480d8dbe881f')
+ version('2.2.0', 'eddce830a079b8480f104aa7496f46fe')
+ version('1.12.4', '4f2ebb14834c9981b04e40b0d1754717')
+
+ extends("go")
+
+ def install(self, spec, prefix):
+ env = os.environ
+ env['GOPATH'] = self.stage.source_path + ':' + env['GOPATH']
+ bash = which('bash')
+ bash(os.path.join('script', 'build'), '-o', os.path.join(prefix, 'bin',
+ 'hub'))
diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py
index 5d1e93eab7..42e8711a7d 100644
--- a/var/spack/repos/builtin/packages/libpciaccess/package.py
+++ b/var/spack/repos/builtin/packages/libpciaccess/package.py
@@ -37,7 +37,7 @@ class Libpciaccess(Package):
def install(self, spec, prefix):
# libpciaccess does not support OS X
- if spec.satisfies('=darwin-x86_64'):
+ if spec.satisfies('arch=darwin-x86_64'):
# create a dummy directory
mkdir(prefix.lib)
return
diff --git a/var/spack/repos/builtin/packages/lua-luaposix/package.py b/var/spack/repos/builtin/packages/lua-luaposix/package.py
new file mode 100644
index 0000000000..9e96548f08
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lua-luaposix/package.py
@@ -0,0 +1,16 @@
+from spack import *
+import glob
+
+
+class LuaLuaposix(Package):
+ """Lua posix bindings, including ncurses"""
+ homepage = "https://github.com/luaposix/luaposix/"
+ url = "https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz"
+
+ version('33.4.0', 'b36ff049095f28752caeb0b46144516c')
+
+ extends("lua")
+
+ def install(self, spec, prefix):
+ rockspec = glob.glob('luaposix-*.rockspec')
+ luarocks('--tree=' + prefix, 'install', rockspec[0])
diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py
index 9a73a22645..170f90516a 100644
--- a/var/spack/repos/builtin/packages/lua/package.py
+++ b/var/spack/repos/builtin/packages/lua/package.py
@@ -25,10 +25,11 @@
from spack import *
import os
+
class Lua(Package):
""" The Lua programming language interpreter and library """
homepage = "http://www.lua.org"
- url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz"
+ url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz"
version('5.3.2', '33278c2ab5ee3c1a875be8d55c1ca2a1')
version('5.3.1', '797adacada8d85761c079390ff1d9961')
@@ -42,17 +43,115 @@ class Lua(Package):
version('5.1.4', 'd0870f2de55d59c1c8419f36e8fac150')
version('5.1.3', 'a70a8dfaa150e047866dc01a46272599')
+ extendable = True
+
depends_on('ncurses')
depends_on('readline')
+ resource(
+ name="luarocks",
+ url="https://keplerproject.github.io/luarocks/releases/"
+ "luarocks-2.3.0.tar.gz",
+ md5="a38126684cf42b7d0e7a3c7cf485defb",
+ destination="luarocks",
+ placement='luarocks')
+
def install(self, spec, prefix):
- if spec.satisfies("=darwin-i686") or spec.satisfies("=darwin-x86_64"):
+ if spec.satisfies("arch=darwin-i686") or spec.satisfies("arch=darwin-x86_64"):
target = 'macosx'
else:
target = 'linux'
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s -lncurses' % spec['ncurses'].prefix.lib,
+ 'MYLDFLAGS=-L%s -L%s ' % (
+ spec['readline'].prefix.lib,
+ spec['ncurses'].prefix.lib),
+ 'MYLIBS=-lncurses',
target)
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s -lncurses' % spec['ncurses'].prefix.lib,
+ 'MYLDFLAGS=-L%s -L%s ' % (
+ spec['readline'].prefix.lib,
+ spec['ncurses'].prefix.lib),
+ 'MYLIBS=-lncurses',
'install')
+
+ with working_dir(os.path.join('luarocks', 'luarocks')):
+ configure('--prefix=' + prefix, '--with-lua=' + prefix)
+ make('build')
+ make('install')
+
+ def append_paths(self, paths, cpaths, path):
+ paths.append(os.path.join(path, '?.lua'))
+ paths.append(os.path.join(path, '?', 'init.lua'))
+ cpaths.append(os.path.join(path, '?.so'))
+
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ lua_paths = []
+ for d in extension_spec.traverse():
+ if d.package.extends(self.spec):
+ lua_paths.append(os.path.join(d.prefix, self.lua_lib_dir))
+ lua_paths.append(os.path.join(d.prefix, self.lua_share_dir))
+
+ lua_patterns = []
+ lua_cpatterns = []
+ for p in lua_paths:
+ if os.path.isdir(p):
+ self.append_paths(lua_patterns, lua_cpatterns, p)
+
+ # Always add this package's paths
+ for p in (os.path.join(self.spec.prefix, self.lua_lib_dir),
+ os.path.join(self.spec.prefix, self.lua_share_dir)):
+ self.append_paths(lua_patterns, lua_cpatterns, p)
+
+ spack_env.set('LUA_PATH', ';'.join(lua_patterns), separator=';')
+ spack_env.set('LUA_CPATH', ';'.join(lua_cpatterns), separator=';')
+
+ # For run time environment set only the path for extension_spec and
+ # prepend it to LUAPATH
+ if extension_spec.package.extends(self.spec):
+ run_env.prepend_path('LUA_PATH', ';'.join(lua_patterns),
+ separator=';')
+ run_env.prepend_path('LUA_CPATH', ';'.join(lua_cpatterns),
+ separator=';')
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.prepend_path(
+ 'LUA_PATH',
+ os.path.join(self.spec.prefix, self.lua_share_dir, '?.lua'),
+ separator=';')
+ run_env.prepend_path(
+ 'LUA_PATH', os.path.join(self.spec.prefix, self.lua_share_dir, '?',
+ 'init.lua'),
+ separator=';')
+ run_env.prepend_path(
+ 'LUA_PATH',
+ os.path.join(self.spec.prefix, self.lua_lib_dir, '?.lua'),
+ separator=';')
+ run_env.prepend_path(
+ 'LUA_PATH',
+ os.path.join(self.spec.prefix, self.lua_lib_dir, '?', 'init.lua'),
+ separator=';')
+ run_env.prepend_path(
+ 'LUA_CPATH',
+ os.path.join(self.spec.prefix, self.lua_lib_dir, '?.so'),
+ separator=';')
+
+ @property
+ def lua_lib_dir(self):
+ return os.path.join('lib', 'lua', '%d.%d' % self.version[:2])
+
+ @property
+ def lua_share_dir(self):
+ return os.path.join('share', 'lua', '%d.%d' % self.version[:2])
+
+ def setup_dependent_package(self, module, ext_spec):
+ """
+ Called before lua modules's install() methods.
+
+ In most cases, extensions will only need to have two lines::
+
+ luarocks('--tree=' + prefix, 'install', rock_spec_path)
+ """
+ # Lua extension builds can have lua and luarocks executable functions
+ module.lua = Executable(join_path(self.spec.prefix.bin, 'lua'))
+ module.luarocks = Executable(join_path(self.spec.prefix.bin,
+ 'luarocks'))
diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
index b67ea299a8..f8af76429b 100644
--- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
@@ -24,16 +24,21 @@
##############################################################################
from spack import *
+
class NetcdfCxx4(Package):
"""C++ interface for NetCDF4"""
homepage = "http://www.unidata.ucar.edu/software/netcdf"
- url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-cxx4-4.2.tar.gz"
+ url = "https://www.github.com/unidata/netcdf-cxx4/tarball/v4.3.0"
- version('4.2', 'd019853802092cf686254aaba165fc81')
+ version('4.3.0', '0dde8b9763eecdafbd69d076e687337e')
+ version('4.2.1', 'd019853802092cf686254aaba165fc81')
depends_on('netcdf')
+ depends_on("autoconf")
def install(self, spec, prefix):
+ # Rebuild to prevent problems of inconsistency in git repo
+ which('autoreconf')('-ivf')
configure('--prefix=%s' % prefix)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index ebfec4bded..22e49daaa7 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -48,11 +48,13 @@ class Openblas(Package):
patch('make.patch')
def install(self, spec, prefix):
- # Openblas is picky about compilers. Configure fails with
- # FC=/abs/path/to/f77, whereas FC=f77 works fine.
- # To circumvent this, provide basename only:
- make_defs = ['CC=%s' % os.path.basename(spack_cc),
- 'FC=%s' % os.path.basename(spack_f77),
+ # Configure fails to pick up fortran from FC=/abs/path/to/f77, but
+ # works fine with FC=/abs/path/to/gfortran.
+ # When mixing compilers make sure that
+ # $SPACK_ROOT/lib/spack/env/<compiler> have symlinks with reasonable
+ # names and hack them inside lib/spack/spack/compilers/<compiler>.py
+ make_defs = ['CC=%s' % spack_cc,
+ 'FC=%s' % spack_f77,
'MAKE_NO_J=1']
make_targets = ['libs', 'netlib']
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
index 119cdd83c2..34ab0703ad 100644
--- a/var/spack/repos/builtin/packages/openssl/package.py
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -35,7 +35,7 @@ class Openssl(Package):
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
- url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
+ url = "https://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.1r', '1abd905e079542ccae948af37e393d28')
@@ -100,7 +100,7 @@ class Openssl(Package):
# in the environment, then this will override what is set in the
# Makefile, leading to build errors.
env.pop('APPS', None)
- if spec.satisfies("=darwin-x86_64") or spec.satisfies("=ppc64"):
+ if spec.satisfies("arch=darwin-x86_64") or spec.satisfies("arch=ppc64"):
# This needs to be done for all 64-bit architectures (except Linux,
# where it happens automatically?)
env['KERNEL_BITS'] = '64'
diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py
index e82c3acd42..3abbf3c665 100644
--- a/var/spack/repos/builtin/packages/scotch/package.py
+++ b/var/spack/repos/builtin/packages/scotch/package.py
@@ -22,8 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import os
+import re
from spack import *
-import os, re
+
class Scotch(Package):
"""Scotch is a software package for graph and mesh/hypergraph
@@ -38,10 +40,10 @@ class Scotch(Package):
version('6.0.0', 'c50d6187462ba801f9a82133ee666e8e')
version('5.1.10b', 'f587201d6cf5cf63527182fbfba70753')
- variant('mpi', default=False, description='Activate the compilation of PT-Scotch')
+ variant('mpi', default=False, description='Activate the compilation of parallel libraries')
variant('compression', default=True, description='Activate the posibility to use compressed files')
- variant('esmumps', default=False, description='Activate the compilation of the lib esmumps needed by mumps')
- variant('shared', default=True, description='Build shared libraries')
+ variant('esmumps', default=False, description='Activate the compilation of esmumps needed by mumps')
+ variant('shared', default=True, description='Build a shared version of the library')
depends_on('flex')
depends_on('bison')
@@ -51,7 +53,7 @@ class Scotch(Package):
# NOTE: Versions of Scotch up to version 6.0.0 don't include support for
# building with 'esmumps' in their default packages. In order to enable
# support for this feature, we must grab the 'esmumps' enabled archives
- # from the Scotch hosting site. These alternative archives include a strict
+ # from the Scotch hosting site. These alternative archives include a
# superset of the behavior in their default counterparts, so we choose to
# always grab these versions for older Scotch versions for simplicity.
@when('@:6.0.0')
@@ -62,32 +64,34 @@ class Scotch(Package):
def url_for_version(self, version):
return super(Scotch, self).url_for_version(version)
- # NOTE: Several of the 'esmumps' enabled Scotch releases up to version 6.0.0
- # have broken build scripts that don't properly build 'esmumps' as a separate
- # target, so we need a patch procedure to remove 'esmumps' from existing targets
- # and to add it as a standalone target.
+ # NOTE: Several of the 'esmumps' enabled Scotch releases up to version
+ # 6.0.0 have broken build scripts that don't properly build 'esmumps' as a
+ # separate target, so we need a patch procedure to remove 'esmumps' from
+ # existing targets and to add it as a standalone target.
@when('@:6.0.0')
def patch(self):
makefile_path = os.path.join('src', 'Makefile')
with open(makefile_path, 'r') as makefile:
- esmumps_enabled = any(re.search(r'^esmumps(\s*):(.*)$', line) for line in makefile.readlines())
+ esmumps_enabled = any(re.search(r'^esmumps(\s*):(.*)$', line)
+ for line in makefile.readlines())
if not esmumps_enabled:
mff = FileFilter(makefile_path)
mff.filter(r'^.*((esmumps)|(ptesmumps)).*(install).*$', '')
- makefile_esmumps_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Makefile.esmumps')
+ mfesmumps_dir = os.path.dirname(os.path.realpath(__file__))
+ mfesmumps_path = os.path.join(mfesmumps_dir, 'Makefile.esmumps')
with open(makefile_path, 'a') as makefile:
- makefile.write('\ninclude %s\n' % makefile_esmumps_path)
+ makefile.write('\ninclude %s\n' % mfesmumps_path)
@when('@6.0.1:')
def patch(self):
pass
- # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc' file
- # that contains all of the configuration variables and their desired values
- # for the installation. This function writes this file based on the given
- # installation variants.
+ # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc'
+ # file that contains all of the configuration variables and their desired
+ # values for the installation. This function writes this file based on
+ # the given installation variants.
def configure(self):
makefile_inc = []
cflags = [
@@ -96,9 +100,9 @@ class Scotch(Package):
'-DSCOTCH_DETERMINISTIC',
'-DSCOTCH_RENAME',
'-DIDXSIZE64'
- ]
+ ]
- ## Library Build Type ##
+ # Library Build Type #
if '+shared' in self.spec:
makefile_inc.extend([
@@ -107,7 +111,7 @@ class Scotch(Package):
'RANLIB = echo',
'AR = $(CC)',
'ARFLAGS = -shared $(LDFLAGS) -o'
- ])
+ ])
cflags.append('-fPIC')
else:
makefile_inc.extend([
@@ -116,21 +120,21 @@ class Scotch(Package):
'RANLIB = ranlib',
'AR = ar',
'ARFLAGS = -ruv '
- ])
+ ])
- ## Compiler-Specific Options ##
+ # Compiler-Specific Options #
if self.compiler.name == 'gcc':
cflags.append('-Drestrict=__restrict')
elif self.compiler.name == 'intel':
cflags.append('-restrict')
+ mpicc_path = self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc'
makefile_inc.append('CCS = $(CC)')
- makefile_inc.append('CCP = %s' %
- (self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc'))
+ makefile_inc.append('CCP = %s' % mpicc_path)
makefile_inc.append('CCD = $(CCS)')
- ## Extra Features ##
+ # Extra Features #
ldflags = []
@@ -143,8 +147,10 @@ class Scotch(Package):
makefile_inc.append('LDFLAGS = %s' % ' '.join(ldflags))
- ## General Features ##
+ # General Features #
+ flex_path = os.path.join(self.spec['flex'].prefix.bin, 'flex')
+ bison_path = os.path.join(self.spec['bison'].prefix.bin, 'bison')
makefile_inc.extend([
'EXE =',
'OBJ = .o',
@@ -155,10 +161,10 @@ class Scotch(Package):
'MV = mv',
'CP = cp',
'CFLAGS = %s' % ' '.join(cflags),
- 'LEX = %s -Pscotchyy -olex.yy.c' % os.path.join(self.spec['flex'].prefix.bin , 'flex'),
- 'YACC = %s -pscotchyy -y -b y' % os.path.join(self.spec['bison'].prefix.bin, 'bison'),
+ 'LEX = %s -Pscotchyy -olex.yy.c' % flex_path,
+ 'YACC = %s -pscotchyy -y -b y' % bison_path,
'prefix = %s' % self.prefix
- ])
+ ])
with working_dir('src'):
with open('Makefile.inc', 'w') as fh:
@@ -178,7 +184,7 @@ class Scotch(Package):
with working_dir('src'):
for target in targets:
- make(target, parallel=(target!='ptesmumps'))
+ make(target, parallel=(target != 'ptesmumps'))
install_tree('bin', prefix.bin)
install_tree('lib', prefix.lib)
diff --git a/var/spack/repos/builtin/packages/sed/package.py b/var/spack/repos/builtin/packages/sed/package.py
new file mode 100644
index 0000000000..f2a240e1b3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sed/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Sed(Package):
+ """GNU implementation of the famous stream editor."""
+ homepage = "http://www.gnu.org/software/sed/"
+ url = "http://ftpmirror.gnu.org/sed/sed-4.2.2.tar.bz2"
+
+ version('4.2.2', '7ffe1c7cdc3233e1e0c4b502df253974')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/the_platinum_searcher/package.py b/var/spack/repos/builtin/packages/the_platinum_searcher/package.py
new file mode 100644
index 0000000000..9c9a66cdef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/the_platinum_searcher/package.py
@@ -0,0 +1,21 @@
+from spack import *
+import os
+import shutil
+
+
+class ThePlatinumSearcher(Package):
+ """Fast parallel recursive grep alternative"""
+ homepage = "https://github.com/monochromegane/the_platinum_searcher"
+ url = "https://github.com/monochromegane/the_platinum_searcher"
+
+ package = 'github.com/monochromegane/the_platinum_searcher/...'
+
+ version('head', go=package)
+
+ extends("go")
+
+ def install(self, spec, prefix):
+ env = os.environ
+ env['GOPATH'] = self.stage.source_path + ':' + env['GOPATH']
+ go('install', self.package, env=env)
+ shutil.copytree('bin', os.path.join(prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/turbomole/package.py b/var/spack/repos/builtin/packages/turbomole/package.py
index 3362113d13..6ccce23f97 100644
--- a/var/spack/repos/builtin/packages/turbomole/package.py
+++ b/var/spack/repos/builtin/packages/turbomole/package.py
@@ -26,21 +26,22 @@ from spack import *
import os
import subprocess
+
class Turbomole(Package):
"""TURBOMOLE: Program Package for ab initio Electronic Structure
Calculations. NB: Requires a license to download."""
-
- # NOTE: Turbomole requires purchase of a license to download. Go to the
- # NOTE: Turbomole home page, http://www.turbomole-gmbh.com, for details.
- # NOTE: Spack will search the current directory for this file. It is
- # NOTE: probably best to add this file to a Spack mirror so that it can be
- # NOTE: found from anywhere. For information on setting up a Spack mirror
- # NOTE: see http://software.llnl.gov/spack/mirrors.html
+
+ # NOTE: Turbomole requires purchase of a license to download. Go to the
+ # NOTE: Turbomole home page, http://www.turbomole-gmbh.com, for details.
+ # NOTE: Spack will search the current directory for this file. It is
+ # NOTE: probably best to add this file to a Spack mirror so that it can be
+ # NOTE: found from anywhere. For information on setting up a Spack mirror
+ # NOTE: see http://software.llnl.gov/spack/mirrors.html
homepage = "http://www.turbomole-gmbh.com/"
version('7.0.2', '92b97e1e52e8dcf02a4d9ac0147c09d6',
- url="file://%s/turbolinux702.tar.gz" % os.getcwd())
+ url="file://%s/turbolinux702.tar.gz" % os.getcwd())
variant('mpi', default=False, description='Set up MPI environment')
variant('smp', default=False, description='Set up SMP environment')
@@ -56,33 +57,31 @@ class Turbomole(Package):
def do_fetch(self, mirror_only=True):
if '+mpi' in self.spec and '+smp' in self.spec:
- raise InstallError('Can not have both SMP and MPI enabled in the same build.')
+ raise InstallError('Can not have both SMP and MPI enabled in the '
+ 'same build.')
super(Turbomole, self).do_fetch(mirror_only)
def get_tm_arch(self):
- # For python-2.7 we could use `tm_arch = subprocess.check_output()`
- # Use the following for compatibility with python 2.6
if 'TURBOMOLE' in os.getcwd():
- tm_arch = subprocess.Popen(['sh', 'scripts/sysname'],
- stdout=subprocess.PIPE).communicate()[0]
+ tm_sysname = Executable('./scripts/sysname')
+ tm_arch = tm_sysname(output=str)
return tm_arch.rstrip('\n')
else:
return
-
+
def install(self, spec, prefix):
if spec.satisfies('@:7.0.2'):
calculate_version = 'calculate_2.4_linux64'
molecontrol_version = 'MoleControl_2.5'
- tm_arch=self.get_tm_arch()
+ tm_arch = self.get_tm_arch()
tar = which('tar')
dst = join_path(prefix, 'TURBOMOLE')
tar('-x', '-z', '-f', 'thermocalc.tar.gz')
with working_dir('thermocalc'):
- cmd = 'sh install <<<y'
- subprocess.call(cmd, shell=True)
+ subprocess.call('./install<<<y', shell=True)
install_tree('basen', join_path(dst, 'basen'))
install_tree('cabasen', join_path(dst, 'cabasen'))
@@ -108,13 +107,19 @@ class Turbomole(Package):
install('TURBOMOLE_702_LinuxPC', dst)
if '+mpi' in spec:
- install_tree('bin/%s_mpi' % tm_arch, join_path(dst, 'bin', '%s_mpi' % tm_arch))
- install_tree('libso/%s_mpi' % tm_arch, join_path(dst, 'libso', '%s_mpi' % tm_arch))
- install_tree('mpirun_scripts/%s_mpi' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_mpi' % tm_arch))
+ install_tree('bin/%s_mpi' % tm_arch,
+ join_path(dst, 'bin', '%s_mpi' % tm_arch))
+ install_tree('libso/%s_mpi' % tm_arch,
+ join_path(dst, 'libso', '%s_mpi' % tm_arch))
+ install_tree('mpirun_scripts/%s_mpi' % tm_arch,
+ join_path(dst, 'mpirun_scripts', '%s_mpi' % tm_arch))
elif '+smp' in spec:
- install_tree('bin/%s_smp' % tm_arch, join_path(dst, 'bin', '%s_smp' % tm_arch))
- install_tree('libso/%s_smp' % tm_arch, join_path(dst, 'libso', '%s_smp' % tm_arch))
- install_tree('mpirun_scripts/%s_smp' % tm_arch, join_path(dst, 'mpirun_scripts', '%s_smp' % tm_arch))
+ install_tree('bin/%s_smp' % tm_arch,
+ join_path(dst, 'bin', '%s_smp' % tm_arch))
+ install_tree('libso/%s_smp' % tm_arch,
+ join_path(dst, 'libso', '%s_smp' % tm_arch))
+ install_tree('mpirun_scripts/%s_smp' % tm_arch,
+ join_path(dst, 'mpirun_scripts', '%s_smp' % tm_arch))
else:
install_tree('bin/%s' % tm_arch, join_path(dst, 'bin', tm_arch))
if '+mpi' in spec or '+smp' in spec:
@@ -131,18 +136,29 @@ class Turbomole(Package):
if self.spec.satisfies('@:7.0.2'):
molecontrol_version = 'MoleControl_2.5'
- tm_arch=self.get_tm_arch()
+ tm_arch = self.get_tm_arch()
run_env.set('TURBODIR', join_path(self.prefix, 'TURBOMOLE'))
- run_env.set('MOLE_CONTROL', join_path(self.prefix, 'TURBOMOLE', molecontrol_version))
+ run_env.set('MOLE_CONTROL',
+ join_path(self.prefix, 'TURBOMOLE', molecontrol_version))
- run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'thermocalc'))
- run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'scripts'))
+ run_env.prepend_path('PATH',
+ join_path(self.prefix, 'TURBOMOLE', 'thermocalc'))
+ run_env.prepend_path('PATH',
+ join_path(self.prefix, 'TURBOMOLE', 'scripts'))
if '+mpi' in self.spec:
run_env.set('PARA_ARCH', 'MPI')
- run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_mpi' % tm_arch))
+ run_env.prepend_path('PATH',
+ join_path(self.prefix,
+ 'TURBOMOLE', 'bin', '%s_mpi'
+ % tm_arch))
elif '+smp' in self.spec:
run_env.set('PARA_ARCH', 'SMP')
- run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', '%s_smp' % tm_arch))
+ run_env.prepend_path('PATH',
+ join_path(self.prefix,
+ 'TURBOMOLE', 'bin', '%s_smp'
+ % tm_arch))
else:
- run_env.prepend_path('PATH', join_path(self.prefix, 'TURBOMOLE', 'bin', tm_arch))
+ run_env.prepend_path('PATH',
+ join_path(self.prefix,
+ 'TURBOMOLE', 'bin', tm_arch))