summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rwxr-xr-xlib/spack/env/cc24
-rw-r--r--lib/spack/spack/architecture.py2
-rw-r--r--lib/spack/spack/build_environment.py160
-rw-r--r--lib/spack/spack/cmd/checksum.py8
-rw-r--r--lib/spack/spack/cmd/create.py4
-rw-r--r--lib/spack/spack/cmd/install.py10
-rw-r--r--lib/spack/spack/cmd/uninstall.py2
-rw-r--r--lib/spack/spack/directory_layout.py7
-rw-r--r--lib/spack/spack/error.py4
-rw-r--r--lib/spack/spack/globals.py18
-rw-r--r--lib/spack/spack/package.py250
-rw-r--r--lib/spack/spack/test/install.py2
-rw-r--r--lib/spack/spack/url.py2
-rw-r--r--lib/spack/spack/util/web.py34
-rw-r--r--lib/spack/spack/version.py19
15 files changed, 341 insertions, 205 deletions
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 0711c873e3..e5dbf21beb 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -62,15 +62,17 @@ options, other_args = parser.parse_known_args()
rpaths, other_args = parse_rpaths(other_args)
# Add dependencies' include and lib paths to our compiler flags.
-def append_if_dir(path_list, *dirs):
- full_path = os.path.join(*dirs)
- if os.path.isdir(full_path):
- path_list.append(full_path)
+def add_if_dir(path_list, directory, index=None):
+ if os.path.isdir(directory):
+ if index is None:
+ path_list.append(directory)
+ else:
+ path_list.insert(index, directory)
for dep_dir in spack_deps:
- append_if_dir(options.include_path, dep_dir, "include")
- append_if_dir(options.lib_path, dep_dir, "lib")
- append_if_dir(options.lib_path, dep_dir, "lib64")
+ add_if_dir(options.include_path, os.path.join(dep_dir, "include"))
+ add_if_dir(options.lib_path, os.path.join(dep_dir, "lib"))
+ add_if_dir(options.lib_path, os.path.join(dep_dir, "lib64"))
# Add our modified arguments to it.
arguments = ['-I%s' % path for path in options.include_path]
@@ -95,11 +97,9 @@ for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]:
os.environ.pop(var)
# Ensure that the delegated command doesn't just call this script again.
-clean_path = get_path("PATH")
-for item in ['.'] + spack_env_path:
- if item in clean_path:
- clean_path.remove(item)
-os.environ["PATH"] = ":".join(clean_path)
+remove_paths = ['.'] + spack_env_path
+path = [p for p in get_path("PATH") if p not in remove_paths]
+os.environ["PATH"] = ":".join(path)
full_command = [command] + arguments
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index 7fbf1dadcf..9b4c6e576d 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -84,7 +84,7 @@ def sys_type():
# Couldn't determine the sys_type for this machine.
if sys_type is None:
- raise NoSysTypeError()
+ return "unknown_arch"
if not isinstance(sys_type, basestring):
raise InvalidSysTypeError(sys_type)
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
new file mode 100644
index 0000000000..d6becb77db
--- /dev/null
+++ b/lib/spack/spack/build_environment.py
@@ -0,0 +1,160 @@
+"""
+This module contains all routines related to setting up the package
+build environment. All of this is set up by package.py just before
+install() is called.
+
+There are two parts to the bulid environment:
+
+1. Python build environment (i.e. install() method)
+
+ This is how things are set up when install() is called. Spack
+ takes advantage of each package being in its own module by adding a
+ bunch of command-like functions (like configure(), make(), etc.) in
+ the package's module scope. Ths allows package writers to call
+ them all directly in Package.install() without writing 'self.'
+ everywhere. No, this isn't Pythonic. Yes, it makes the code more
+ readable and more like the shell script from whcih someone is
+ likely porting.
+
+2. Build execution environment
+
+ This is the set of environment variables, like PATH, CC, CXX,
+ etc. that control the build. There are also a number of
+ environment variables used to pass information (like RPATHs and
+ other information about dependencies) to Spack's compiler wrappers.
+ All of these env vars are also set up here.
+
+Skimming this module is a nice way to get acquainted with the types of
+calls you can make from within the install() function.
+"""
+import os
+import shutil
+import multiprocessing
+import platform
+from llnl.util.filesystem import *
+
+import spack
+from spack.util.executable import Executable, which
+from spack.util.environment import *
+
+#
+# This can be set by the user to globally disable parallel builds.
+#
+SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
+
+#
+# These environment variables are set by
+# set_build_environment_variables and used to pass parameters to
+# Spack's compiler wrappers.
+#
+SPACK_LIB = 'SPACK_LIB'
+SPACK_ENV_PATH = 'SPACK_ENV_PATH'
+SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
+SPACK_PREFIX = 'SPACK_PREFIX'
+SPACK_BUILD_ROOT = 'SPACK_BUILD_ROOT'
+
+
+class MakeExecutable(Executable):
+ """Special callable executable object for make so the user can
+ specify parallel or not on a per-invocation basis. Using
+ 'parallel' as a kwarg will override whatever the package's
+ global setting is, so you can either default to true or false
+ and override particular calls.
+
+ Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
+ everything.
+ """
+ def __init__(self, name, parallel):
+ super(MakeExecutable, self).__init__(name)
+ self.parallel = parallel
+
+ def __call__(self, *args, **kwargs):
+ parallel = kwargs.get('parallel', self.parallel)
+ disable_parallel = env_flag(SPACK_NO_PARALLEL_MAKE)
+
+ if parallel and not disable_parallel:
+ jobs = "-j%d" % multiprocessing.cpu_count()
+ args = (jobs,) + args
+
+ super(MakeExecutable, self).__call__(*args, **kwargs)
+
+
+def set_build_environment_variables(pkg):
+ """This ensures a clean install environment when we build packages.
+ """
+ # This tells the compiler script where to find the Spack installation.
+ os.environ[SPACK_LIB] = spack.lib_path
+
+ # Add spack build environment path with compiler wrappers first in
+ # the path. We handle case sensitivity conflicts like "CC" and
+ # "cc" by putting one in the <build_env_path>/case-insensitive
+ # directory. Add that to the path too.
+ env_paths = [spack.build_env_path,
+ join_path(spack.build_env_path, 'case-insensitive')]
+ path_put_first("PATH", env_paths)
+ path_set(SPACK_ENV_PATH, env_paths)
+
+ # Prefixes of all of the package's dependencies go in
+ # SPACK_DEPENDENCIES
+ dep_prefixes = [d.package.prefix for d in pkg.spec.dependencies.values()]
+ path_set(SPACK_DEPENDENCIES, dep_prefixes)
+
+ # Install prefix
+ os.environ[SPACK_PREFIX] = pkg.prefix
+
+ # Build root for logging.
+ os.environ[SPACK_BUILD_ROOT] = pkg.stage.expanded_archive_path
+
+ # Remove these vars from the environment during build becaus they
+ # can affect how some packages find libraries. We want to make
+ # sure that builds never pull in unintended external dependencies.
+ pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
+
+ # Add bin directories from dependencies to the PATH for the build.
+ bin_dirs = ['%s/bin' % prefix for prefix in dep_prefixes]
+ path_put_first('PATH', [bin for bin in bin_dirs if os.path.isdir(bin)])
+
+
+def set_module_variables_for_package(pkg):
+ """Populate the module scope of install() with some useful functions.
+ This makes things easier for package writers.
+ """
+ m = pkg.module
+
+ m.make = MakeExecutable('make', pkg.parallel)
+ m.gmake = MakeExecutable('gmake', pkg.parallel)
+
+ # number of jobs spack prefers to build with.
+ m.make_jobs = multiprocessing.cpu_count()
+
+ # Find the configure script in the archive path
+ # Don't use which for this; we want to find it in the current dir.
+ m.configure = Executable('./configure')
+
+ # TODO: shouldn't really use "which" here. Consider adding notion
+ # TODO: of build dependencies, as opposed to link dependencies.
+ # TODO: Currently, everything is a link dependency, but tools like
+ # TODO: this shouldn't be.
+ m.cmake = which("cmake")
+
+ # standard CMake arguments
+ m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
+ '-DCMAKE_BUILD_TYPE=None']
+ if platform.mac_ver()[0]:
+ m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
+
+ # Emulate some shell commands for convenience
+ m.cd = os.chdir
+ m.mkdir = os.mkdir
+ m.makedirs = os.makedirs
+ m.remove = os.remove
+ m.removedirs = os.removedirs
+
+ m.mkdirp = mkdirp
+ m.install = install
+ m.rmtree = shutil.rmtree
+ m.move = shutil.move
+
+ # Useful directories within the prefix are encapsulated in
+ # a Prefix object.
+ m.prefix = pkg.prefix
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 97ce91386b..f5cf0d0143 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -44,7 +44,7 @@ def setup_parser(subparser):
subparser.add_argument(
'package', metavar='PACKAGE', help='Package to list versions for')
subparser.add_argument(
- '-d', '--dirty', action='store_true', dest='dirty',
+ '--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
@@ -54,6 +54,8 @@ def get_checksums(versions, urls, **kwargs):
# Allow commands like create() to do some analysis on the first
# archive after it is downloaded.
first_stage_function = kwargs.get('first_stage_function', None)
+ keep_stage = kwargs.get('keep_stage', False)
+
tty.msg("Downloading...")
hashes = []
@@ -71,7 +73,7 @@ def get_checksums(versions, urls, **kwargs):
continue
finally:
- if not kwargs.get('dirty', False):
+ if not keep_stage:
stage.destroy()
return zip(versions, hashes)
@@ -110,7 +112,7 @@ def checksum(parser, args):
return
version_hashes = get_checksums(
- versions[:archives_to_fetch], urls[:archives_to_fetch], dirty=args.dirty)
+ versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage)
if not version_hashes:
tty.die("Could not fetch any available versions for %s." % pkg.name)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index bc47b77258..8653fafa5f 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -85,7 +85,7 @@ class ${class_name}(Package):
def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive")
subparser.add_argument(
- '-d', '--dirty', action='store_true', dest='dirty',
+ '--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
@@ -174,7 +174,7 @@ def create(parser, args):
guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions[:archives_to_fetch], urls[:archives_to_fetch],
- first_stage_function=guesser, dirty=args.dirty)
+ first_stage_function=guesser, keep_stage=args.keep_stage)
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s." % name)
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 02194c1b3f..ea11cb89a9 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -32,10 +32,10 @@ description = "Build and install packages"
def setup_parser(subparser):
subparser.add_argument(
- '-i', '--ignore-dependencies', action='store_true', dest='ignore_dependencies',
+ '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
help="Do not try to install dependencies of requested packages.")
subparser.add_argument(
- '-d', '--dirty', action='store_true', dest='dirty',
+ '--keep-prefix', action='store_true', dest='keep_prefix',
help="Don't clean up staging area when install completes.")
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
@@ -51,10 +51,8 @@ def install(parser, args):
if args.no_checksum:
spack.do_checksum = False
- spack.ignore_dependencies = args.ignore_dependencies
specs = spack.cmd.parse_specs(args.packages, concretize=True)
-
for spec in specs:
package = spack.db.get(spec)
- package.dirty = args.dirty
- package.do_install()
+ package.do_install(keep_prefix=args.keep_prefix,
+ ignore_deps=args.ignore_deps)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 7982892d81..df208b3a6a 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -68,4 +68,4 @@ def uninstall(parser, args):
# Uninstall packages in order now.
for pkg in pkgs:
- pkg.do_uninstall()
+ pkg.do_uninstall(force=args.force)
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index ad9f669f90..28719521d2 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -86,8 +86,11 @@ class DirectoryLayout(object):
shutil.rmtree(path, True)
path = os.path.dirname(path)
- while not os.listdir(path) and path != self.root:
- os.rmdir(path)
+ while path != self.root:
+ if os.path.isdir(path):
+ if os.listdir(path):
+ return
+ os.rmdir(path)
path = os.path.dirname(path)
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index 47fb858f3f..40e0e75fdb 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -41,5 +41,7 @@ class UnsupportedPlatformError(SpackError):
class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection."""
def __init__(self, message, url):
- super(NoNetworkConnectionError, self).__init__(message)
+ super(NoNetworkConnectionError, self).__init__(
+ "No network connection: " + str(message),
+ "URL was: " + str(url))
self.url = url
diff --git a/lib/spack/spack/globals.py b/lib/spack/spack/globals.py
index 4ff50a3e7e..9fc40845b0 100644
--- a/lib/spack/spack/globals.py
+++ b/lib/spack/spack/globals.py
@@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import tempfile
from llnl.util.filesystem import *
@@ -40,7 +41,7 @@ spack_file = join_path(prefix, "bin", "spack")
# spack directory hierarchy
lib_path = join_path(prefix, "lib", "spack")
-env_path = join_path(lib_path, "env")
+build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
compilers_path = join_path(module_path, "compilers")
test_path = join_path(module_path, "test")
@@ -92,9 +93,11 @@ use_tmp_stage = True
# Use a %u to add a username to the stage paths here, in case this
# is a shared filesystem. Spack will use the first of these paths
# that it can create.
-tmp_dirs = ['/nfs/tmp2/%u/spack-stage',
- '/var/tmp/%u/spack-stage',
- '/tmp/%u/spack-stage']
+tmp_dirs = []
+_default_tmp = tempfile.gettempdir()
+if _default_tmp != os.getcwd():
+ tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage'))
+tmp_dirs.append('/nfs/tmp2/%u/spack-stage')
# Whether spack should allow installation of unsafe versions of
# software. "Unsafe" versions are ones it doesn't have a checksum
@@ -130,10 +133,3 @@ sys_type = None
#
mirrors = []
-# Important environment variables
-SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
-SPACK_LIB = 'SPACK_LIB'
-SPACK_ENV_PATH = 'SPACK_ENV_PATH'
-SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
-SPACK_PREFIX = 'SPACK_PREFIX'
-SPACK_BUILD_ROOT = 'SPACK_BUILD_ROOT'
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 27b7b8aef5..4e0c4c87a9 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -33,30 +33,27 @@ Homebrew makes it very easy to create packages. For a complete
rundown on spack and how it differs from homebrew, look at the
README.
"""
-import inspect
import os
import re
+import inspect
import subprocess
import platform as py_platform
-import shutil
import multiprocessing
from urlparse import urlparse
import llnl.util.tty as tty
-from llnl.util.tty.color import cwrite
from llnl.util.filesystem import *
from llnl.util.lang import *
import spack
import spack.spec
import spack.error
+import spack.build_environment as build_env
import spack.url as url
import spack.util.crypto as crypto
from spack.version import *
from spack.stage import Stage
from spack.util.web import get_pages
-from spack.util.environment import *
-from spack.util.executable import Executable, which
from spack.util.compression import allowed_archive
"""Allowed URL schemes for spack packages."""
@@ -320,12 +317,6 @@ class Package(object):
"""By default we build in parallel. Subclasses can override this."""
parallel = True
- """Remove tarball and build by default. If this is true, leave them."""
- dirty = False
-
- """Controls whether install and uninstall check deps before running."""
- ignore_dependencies = False
-
"""Dirty hack for forcing packages with uninterpretable URLs
TODO: get rid of this.
"""
@@ -390,8 +381,9 @@ class Package(object):
try:
return url.parse_version(self.__class__.url)
except UndetectableVersionError:
- tty.die("Couldn't extract a default version from %s. You " +
- "must specify it explicitly in the package." % self.url)
+ raise PackageError(
+ "Couldn't extract a default version from %s." % self.url,
+ " You must specify it explicitly in the package file.")
@property
@@ -413,46 +405,6 @@ class Package(object):
return self._stage
- def add_commands_to_module(self):
- """Populate the module scope of install() with some useful functions.
- This makes things easier for package writers.
- """
- m = self.module
-
- m.make = MakeExecutable('make', self.parallel)
- m.gmake = MakeExecutable('gmake', self.parallel)
-
- # number of jobs spack prefers to build with.
- m.make_jobs = multiprocessing.cpu_count()
-
- # Find the configure script in the archive path
- # Don't use which for this; we want to find it in the current dir.
- m.configure = Executable('./configure')
- m.cmake = which("cmake")
-
- # standard CMake arguments
- m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % self.prefix,
- '-DCMAKE_BUILD_TYPE=None']
- if py_platform.mac_ver()[0]:
- m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
-
- # Emulate some shell commands for convenience
- m.cd = os.chdir
- m.mkdir = os.mkdir
- m.makedirs = os.makedirs
- m.remove = os.remove
- m.removedirs = os.removedirs
-
- m.mkdirp = mkdirp
- m.install = install
- m.rmtree = shutil.rmtree
- m.move = shutil.move
-
- # Useful directories within the prefix are encapsulated in
- # a Prefix object.
- m.prefix = self.prefix
-
-
def preorder_traversal(self, visited=None, **kwargs):
"""This does a preorder traversal of the package's dependence DAG."""
virtual = kwargs.get("virtual", False)
@@ -533,7 +485,7 @@ class Package(object):
@property
def installed(self):
- return os.path.exists(self.prefix)
+ return os.path.isdir(self.prefix)
@property
@@ -574,8 +526,6 @@ class Package(object):
def remove_prefix(self):
"""Removes the prefix for a package along with any empty parent directories."""
- if self.dirty:
- return
spack.install_layout.remove_path_for_spec(self.spec)
@@ -587,10 +537,11 @@ class Package(object):
raise ValueError("Can only fetch concrete packages.")
if spack.do_checksum and not self.version in self.versions:
- tty.die("Cannot fetch %s@%s safely; there is no checksum on file for this "
- "version." % (self.name, self.version),
- "Add a checksum to the package file, or use --no-checksum to "
- "skip this check.")
+ raise ChecksumError(
+ "Cannot fetch %s safely; there is no checksum on file for version %s."
+ % self.version,
+ "Add a checksum to the package file, or use --no-checksum to "
+ "skip this check.")
self.stage.fetch()
@@ -600,8 +551,9 @@ class Package(object):
if checker.check(self.stage.archive_file):
tty.msg("Checksum passed for %s" % self.name)
else:
- tty.die("%s checksum failed for %s. Expected %s but got %s."
- % (checker.hash_name, self.name, digest, checker.sum))
+ raise ChecksumError(
+ "%s checksum failed for %s." % checker.hash_name,
+ "Expected %s but got %s." % (self.name, digest, checker.sum))
def do_stage(self):
@@ -616,8 +568,9 @@ class Package(object):
if not archive_dir:
tty.msg("Staging archive: %s" % self.stage.archive_file)
self.stage.expand_archive()
+ tty.msg("Created stage directory in %s." % self.stage.path)
else:
- tty.msg("Already staged %s" % self.name)
+ tty.msg("Already staged %s in %s." % (self.name, self.stage.path))
self.stage.chdir_to_archive()
@@ -667,80 +620,84 @@ class Package(object):
touch(good_file)
- def do_install(self):
+ def do_install(self, **kwargs):
"""This class should call this version of the install method.
Package implementations should override install().
"""
+ # whether to keep the prefix on failure. Default is to destroy it.
+ keep_prefix = kwargs.get('keep_prefix', False)
+ ignore_deps = kwargs.get('ignore_deps', False)
+
if not self.spec.concrete:
raise ValueError("Can only install concrete packages.")
if os.path.exists(self.prefix):
- tty.msg("%s is already installed." % self.name)
+ tty.msg("%s is already installed in %s." % (self.name, self.prefix))
return
- if not self.ignore_dependencies:
+ if not ignore_deps:
self.do_install_dependencies()
self.do_patch()
- self.setup_install_environment()
-
- # Add convenience commands to the package's module scope to
- # make building easier.
- self.add_commands_to_module()
-
- tty.msg("Building %s." % self.name)
-
- # create the install directory (allow the layout to handle this in
- # case it needs to add extra files)
- spack.install_layout.make_path_for_spec(self.spec)
+ # Fork a child process to do the build. This allows each
+ # package authors to have full control over their environment,
+ # etc. without offecting other builds that might be executed
+ # in the same spack call.
try:
- self.install(self.spec, self.prefix)
- if not os.path.isdir(self.prefix):
- tty.die("Install failed for %s. No install dir created." % self.name)
+ pid = os.fork()
+ except OSError, e:
+ raise InstallError("Unable to fork build process: %s" % e)
- tty.msg("Successfully installed %s" % self.name)
- print_pkg(self.prefix)
+ if pid == 0:
+ tty.msg("Building %s." % self.name)
- except Exception, e:
- self.remove_prefix()
- raise
+ # create the install directory (allow the layout to handle
+ # this in case it needs to add extra files)
+ spack.install_layout.make_path_for_spec(self.spec)
- finally:
- # Once the install is done, destroy the stage where we built it,
- # unless the user wants it kept around.
- if not self.dirty:
- self.stage.destroy()
+ # Set up process's build environment before running install.
+ build_env.set_build_environment_variables(self)
+ build_env.set_module_variables_for_package(self)
+ try:
+ # Subclasses implement install() to do the build &
+ # install work.
+ self.install(self.spec, self.prefix)
- def setup_install_environment(self):
- """This ensures a clean install environment when we build packages."""
- pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
+ if not os.listdir(self.prefix):
+ raise InstallError(
+ "Install failed for %s. Nothing was installed!"
+ % self.name)
- # Add spack environment at front of path and pass the
- # lib location along so the compiler script can find spack
- os.environ[spack.SPACK_LIB] = spack.lib_path
+ # On successful install, remove the stage.
+ # Leave if there is an error
+ self.stage.destroy()
- # Fix for case-insensitive file systems. Conflicting links are
- # in directories called "case*" within the env directory.
- env_paths = [spack.env_path]
- for file in os.listdir(spack.env_path):
- path = join_path(spack.env_path, file)
- if file.startswith("case") and os.path.isdir(path):
- env_paths.append(path)
- path_put_first("PATH", env_paths)
- path_set(spack.SPACK_ENV_PATH, env_paths)
+ tty.msg("Successfully installed %s" % self.name)
+ print_pkg(self.prefix)
- # Pass along prefixes of dependencies here
- path_set(
- spack.SPACK_DEPENDENCIES,
- [dep.package.prefix for dep in self.spec.dependencies.values()])
+ # Use os._exit here to avoid raising a SystemExit exception,
+ # which interferes with unit tests.
+ os._exit(0)
- # Install location
- os.environ[spack.SPACK_PREFIX] = self.prefix
+ except:
+ if not keep_prefix:
+ # If anything goes wrong, remove the install prefix
+ self.remove_prefix()
+ else:
+ tty.warn("Keeping install prefix in place despite error.",
+ "Spack will think this package is installed." +
+ "Manually remove this directory to fix:",
+ self.prefix)
+ raise
- # Build root for logging.
- os.environ[spack.SPACK_BUILD_ROOT] = self.stage.expanded_archive_path
+ # Parent process just waits for the child to complete. If the
+ # child exited badly, assume it already printed an appropriate
+ # message. Just make the parent exit with an error code.
+ pid, returncode = os.waitpid(pid, 0)
+ if returncode != 0:
+ sys.exit(1)
def do_install_dependencies(self):
@@ -760,16 +717,18 @@ class Package(object):
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
- tty.die("Packages must provide an install method!")
+ raise InstallError("Package %s provides no install method!" % self.name)
+
+ def do_uninstall(self, **kwargs):
+ force = kwargs.get('force', False)
- def do_uninstall(self):
if not self.installed:
- tty.die(self.name + " is not installed.")
+ raise InstallError(self.name + " is not installed.")
- if not self.ignore_dependencies:
+ if not force:
deps = self.installed_dependents
- if deps: tty.die(
+ if deps: raise InstallError(
"Cannot uninstall %s. The following installed packages depend on it: %s"
% (self.name, deps))
@@ -786,7 +745,8 @@ class Package(object):
def clean(self):
"""By default just runs make clean. Override if this isn't good."""
try:
- make = MakeExecutable('make', self.parallel)
+ # TODO: should we really call make clean, ro just blow away the directory?
+ make = build_env.MakeExecutable('make', self.parallel)
make('clean')
tty.msg("Successfully cleaned %s" % self.name)
except subprocess.CalledProcessError, e:
@@ -871,30 +831,6 @@ def find_versions_of_archive(archive_url, **kwargs):
return versions
-class MakeExecutable(Executable):
- """Special Executable for make so the user can specify parallel or
- not on a per-invocation basis. Using 'parallel' as a kwarg will
- override whatever the package's global setting is, so you can
- either default to true or false and override particular calls.
-
- Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
- everything.
- """
- def __init__(self, name, parallel):
- super(MakeExecutable, self).__init__(name)
- self.parallel = parallel
-
- def __call__(self, *args, **kwargs):
- parallel = kwargs.get('parallel', self.parallel)
- disable_parallel = env_flag(spack.SPACK_NO_PARALLEL_MAKE)
-
- if parallel and not disable_parallel:
- jobs = "-j%d" % multiprocessing.cpu_count()
- args = (jobs,) + args
-
- super(MakeExecutable, self).__call__(*args, **kwargs)
-
-
def validate_package_url(url_string):
"""Determine whether spack can handle a particular URL or not."""
url = urlparse(url_string)
@@ -911,11 +847,37 @@ def print_pkg(message):
if mac_ver and Version(mac_ver) >= Version('10.7'):
print u"\U0001F4E6" + tty.indent,
else:
+ from llnl.util.tty.color import cwrite
cwrite('@*g{[+]} ')
print message
-class InvalidPackageDependencyError(spack.error.SpackError):
+
+class FetchError(spack.error.SpackError):
+ """Raised when something goes wrong during fetch."""
+ def __init__(self, message, long_msg=None):
+ super(FetchError, self).__init__(message, long_msg)
+
+
+class ChecksumError(FetchError):
+ """Raised when archive fails to checksum."""
+ def __init__(self, message, long_msg):
+ super(ChecksumError, self).__init__(message, long_msg)
+
+
+class InstallError(spack.error.SpackError):
+ """Raised when something goes wrong during install or uninstall."""
+ def __init__(self, message, long_msg=None):
+ super(InstallError, self).__init__(message, long_msg)
+
+
+class PackageError(spack.error.SpackError):
+ """Raised when something is wrong with a package definition."""
+ def __init__(self, message, long_msg=None):
+ super(PackageError, self).__init__(message, long_msg)
+
+
+class InvalidPackageDependencyError(PackageError):
"""Raised when package specification is inconsistent with requirements of
its dependencies."""
def __init__(self, message):
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index e567f6f9b5..ac3753c948 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -93,6 +93,6 @@ class InstallTest(MockPackagesTest):
try:
pkg.do_install()
pkg.do_uninstall()
- except:
+ except Exception, e:
if pkg: pkg.remove_prefix()
raise
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index f56aaee493..deac156571 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -206,7 +206,7 @@ def wildcard_version(path):
ver, start, end = parse_version_string_with_indices(path)
v = Version(ver)
- parts = list(re.escape(p) for p in path.split(str(v)))
+ parts = [re.escape(p) for p in re.split(v.wildcard(), path)]
# Make a group for the wildcard, so it will be captured by the regex.
version_group = '(%s)' % v.wildcard()
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index b5104eb076..ba42cb37b5 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -23,11 +23,12 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
+import sys
import subprocess
import urllib2
import urlparse
from multiprocessing import Pool
-from HTMLParser import HTMLParser
+from HTMLParser import HTMLParser, HTMLParseError
import llnl.util.tty as tty
@@ -67,7 +68,7 @@ def _spider(args):
pool. Firing off all the child links at once makes the fetch MUCH
faster for pages with lots of children.
"""
- url, depth, max_depth = args
+ url, depth, max_depth, raise_on_error = args
pages = {}
try:
@@ -81,11 +82,12 @@ def _spider(args):
resp = urllib2.urlopen(req, timeout=TIMEOUT)
if not "Content-type" in resp.headers:
- print "ignoring page " + url
+ tty.warn("ignoring page " + url)
return pages
if not resp.headers["Content-type"].startswith('text/html'):
- print "ignoring page " + url + " with content type " + resp.headers["Content-type"]
+ tty.warn("ignoring page " + url + " with content type " +
+ resp.headers["Content-type"])
return pages
# Do the real GET request when we know it's just HTML.
@@ -100,9 +102,9 @@ def _spider(args):
# If we're not at max depth, parse out the links in the page
if depth < max_depth:
link_parser = LinkParser()
-
subcalls = []
link_parser.feed(page)
+
while link_parser.links:
raw_link = link_parser.links.pop()
@@ -112,7 +114,7 @@ def _spider(args):
# Evaluate the link relative to the page it came from.
abs_link = urlparse.urljoin(response_url, raw_link)
- subcalls.append((abs_link, depth+1, max_depth))
+ subcalls.append((abs_link, depth+1, max_depth, raise_on_error))
if subcalls:
pool = Pool(processes=len(subcalls))
@@ -121,13 +123,21 @@ def _spider(args):
pages.update(d)
except urllib2.URLError, e:
- # Only report it if it's the root page. We ignore errors when spidering.
- if depth == 1:
- raise spack.error.NoNetworkConnectionError(e.reason, url)
+ if raise_on_error:
+ raise spack.error.NoNetworkConnectionError(str(e), url)
+
+ except HTMLParseError, e:
+ # This error indicates that Python's HTML parser sucks.
+ msg = "Got an error parsing HTML."
+
+ # Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
+ if sys.version_info[:3] < (2,7,3):
+ msg += " Use Python 2.7.3 or newer for better HTML parsing."
+
+ tty.warn(msg, url, "HTMLParseError: " + str(e))
except Exception, e:
- # Other types of errors are completely ignored.
- pass
+ pass # Other types of errors are completely ignored.
return pages
@@ -141,5 +151,5 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages = _spider((root_url, 1, max_depth))
+ pages = _spider((root_url, 1, max_depth, False))
return pages
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 1f44c5f39b..0b5125fdf0 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -152,21 +152,24 @@ class Version(object):
return r'[a-zA-Z]+'
version = self.version
- separators = ('',) + self.separators
+
+ # Use a wildcard for separators, in case a version is written
+ # two different ways (e.g., boost writes 1_55_0 and 1.55.0)
+ sep_re = '[_.-]'
+ separators = ('',) + (sep_re,) * len(self.separators)
version += (version[-1],) * 2
- separators += (separators[-1],) * 2
+ separators += (sep_re,) * 2
- sep_res = [re.escape(sep) for sep in separators]
- seg_res = [a_or_n(seg) for seg in version]
+ segments = [a_or_n(seg) for seg in version]
- wc = seg_res[0]
- for i in xrange(1, len(sep_res)):
- wc += '(?:' + sep_res[i] + seg_res[i]
+ wc = segments[0]
+ for i in xrange(1, len(separators)):
+ wc += '(?:' + separators[i] + segments[i]
# Add possible alpha or beta indicator at the end of each segemnt
# We treat these specially b/c they're so common.
- wc += '[ab]?)?' * (len(seg_res) - 1)
+ wc += '[ab]?)?' * (len(segments) - 1)
return wc