diff options
Diffstat (limited to 'lib')
-rw-r--r-- | lib/spack/docs/packaging_guide.rst | 38 | ||||
-rw-r--r-- | lib/spack/llnl/util/filesystem.py | 9 | ||||
-rw-r--r-- | lib/spack/spack/build_environment.py | 9 | ||||
-rw-r--r-- | lib/spack/spack/cmd/checksum.py | 38 | ||||
-rw-r--r-- | lib/spack/spack/cmd/diy.py | 4 | ||||
-rw-r--r-- | lib/spack/spack/cmd/md5.py | 54 | ||||
-rw-r--r-- | lib/spack/spack/cmd/patch.py | 1 | ||||
-rw-r--r-- | lib/spack/spack/directory_layout.py | 56 | ||||
-rw-r--r-- | lib/spack/spack/fetch_strategy.py | 7 | ||||
-rw-r--r-- | lib/spack/spack/mirror.py | 13 | ||||
-rw-r--r-- | lib/spack/spack/package.py | 115 | ||||
-rw-r--r-- | lib/spack/spack/stage.py | 55 |
12 files changed, 244 insertions, 155 deletions
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 07a17ebd0a..b7e0b6a4f3 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -401,6 +401,35 @@ construct the new one for ``8.2.1``. When you supply a custom URL for a version, Spack uses that URL *verbatim* and does not perform extrapolation. +Skipping the expand step +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Spack normally expands archives automatically after downloading +them. If you want to skip this step (e.g., for self-extracting +executables and other custom archive types), you can add +``expand=False`` to a ``version`` directive. + +.. code-block:: python + + version('8.2.1', '4136d7b4c04df68b686570afa26988ac', + url='http://example.com/foo-8.2.1-special-version.tar.gz', 'expand=False') + +When ``expand`` is set to ``False``, Spack sets the current working +directory to the directory containing the downloaded archive before it +calls your ``install`` method. Within ``install``, the path to the +downloaded archive is available as ``self.stage.archive_file``. + +Here is an example snippet for packages distribuetd as self-extracting +archives. The example sets permissions on the downloaded file to make +it executable, then runs it with some arguments. + +.. code-block:: python + + def install(self, spec, prefix): + set_executable(self.stage.archive_file) + installer = Executable(self.stage.archive_file) + installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.') + Checksums ~~~~~~~~~~~~~~~~~ @@ -2108,6 +2137,15 @@ Filtering functions Examples: + #. Filtering a Makefile to force it to use Spack's compiler wrappers: + + .. code-block:: python + + filter_file(r'^CC\s*=.*', spack_cc, 'Makefile') + filter_file(r'^CXX\s*=.*', spack_cxx, 'Makefile') + filter_file(r'^F77\s*=.*', spack_f77, 'Makefile') + filter_file(r'^FC\s*=.*', spack_fc, 'Makefile') + #. Replacing ``#!/usr/bin/perl`` with ``#!/usr/bin/env perl`` in ``bib2xhtml``: .. code-block:: python diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 9ba662d0e3..c4665c284c 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -26,7 +26,8 @@ __all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree' 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', - 'remove_dead_links', 'remove_linked_tree', 'copy_mode', 'unset_executable_mode'] + 'set_executable', 'copy_mode', 'unset_executable_mode', + 'remove_dead_links', 'remove_linked_tree'] import os import sys @@ -354,6 +355,12 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): if order == 'post': yield (source_path, dest_path) + +def set_executable(path): + st = os.stat(path) + os.chmod(path, st.st_mode | stat.S_IEXEC) + + def remove_dead_links(root): """ Removes any dead link that is present in root diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 1b87778080..87fc310b5a 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -177,8 +177,6 @@ def set_module_variables_for_package(pkg, m): """Populate the module scope of install() with some useful functions. This makes things easier for package writers. """ - m = pkg.module - # number of jobs spack will to build with. jobs = multiprocessing.cpu_count() if not pkg.parallel: @@ -214,6 +212,13 @@ def set_module_variables_for_package(pkg, m): m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg))) + # Put spack compiler paths in module scope. + link_dir = spack.build_env_path + m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) + m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx']) + m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77']) + m.spack_f90 = join_path(link_dir, pkg.compiler.link_paths['fc']) + # Emulate some shell commands for convenience m.pwd = os.getcwd m.cd = os.chdir diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py index 966ff9a5e9..518d2703dc 100644 --- a/lib/spack/spack/cmd/checksum.py +++ b/lib/spack/spack/cmd/checksum.py @@ -22,23 +22,18 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os -import re import argparse import hashlib -from pprint import pprint -from subprocess import CalledProcessError import llnl.util.tty as tty -from llnl.util.tty.colify import colify - import spack import spack.cmd import spack.util.crypto from spack.stage import Stage, FailedDownloadError from spack.version import * -description ="Checksum available versions of a package." +description = "Checksum available versions of a package." + def setup_parser(subparser): subparser.add_argument( @@ -60,30 +55,23 @@ def get_checksums(versions, urls, **kwargs): hashes = [] i = 0 for url, version in zip(urls, versions): - stage = Stage(url) try: - stage.fetch() - if i == 0 and first_stage_function: - first_stage_function(stage) - - hashes.append((version, - spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) - except FailedDownloadError, e: + with Stage(url, keep=keep_stage) as stage: + stage.fetch() + if i == 0 and first_stage_function: + first_stage_function(stage) + + hashes.append((version, + spack.util.crypto.checksum(hashlib.md5, stage.archive_file))) + i += 1 + except FailedDownloadError as e: tty.msg("Failed to fetch %s" % url) - continue - except Exception, e: + except Exception as e: tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e)) - continue - - finally: - if not keep_stage: - stage.destroy() - i += 1 return hashes - def checksum(parser, args): # get the package we're going to generate checksums for pkg = spack.repo.get(args.package) @@ -106,7 +94,7 @@ def checksum(parser, args): tty.msg("Found %s versions of %s" % (len(versions), pkg.name), *spack.cmd.elide_list( - ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) + ["%-10s%s" % (v, versions[v]) for v in sorted_versions])) print archives_to_fetch = tty.get_number( "How many would you like to checksum?", default=5, abort='q') diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 9df53312f8..2c3a8761ab 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -46,6 +46,9 @@ def setup_parser(subparser): '--skip-patch', action='store_true', help="Skip patching for the DIY build.") subparser.add_argument( + '-q', '--quiet', action='store_true', dest='quiet', + help="Do not display verbose build output while installing.") + subparser.add_argument( 'spec', nargs=argparse.REMAINDER, help="specs to use for install. Must contain package AND verison.") @@ -92,4 +95,5 @@ def diy(self, args): package.do_install( keep_prefix=args.keep_prefix, ignore_deps=args.ignore_deps, + verbose=not args.quiet, keep_stage=True) # don't remove source dir for DIY. diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py index 879ef9f7b7..f99fc0f8c2 100644 --- a/lib/spack/spack/cmd/md5.py +++ b/lib/spack/spack/cmd/md5.py @@ -22,51 +22,51 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -import os import argparse import hashlib - -from contextlib import contextmanager +import os import llnl.util.tty as tty -from llnl.util.filesystem import * - import spack.util.crypto from spack.stage import Stage, FailedDownloadError description = "Calculate md5 checksums for files/urls." -@contextmanager -def stager(url): - _cwd = os.getcwd() - _stager = Stage(url) - try: - _stager.fetch() - yield _stager - except FailedDownloadError: - tty.msg("Failed to fetch %s" % url) - finally: - _stager.destroy() - os.chdir(_cwd) # the Stage class changes the current working dir so it has to be restored def setup_parser(subparser): setup_parser.parser = subparser subparser.add_argument('files', nargs=argparse.REMAINDER, help="Files to checksum.") + +def compute_md5_checksum(url): + if not os.path.isfile(url): + with Stage(url) as stage: + stage.fetch() + value = spack.util.crypto.checksum(hashlib.md5, stage.archive_file) + else: + value = spack.util.crypto.checksum(hashlib.md5, url) + return value + + def md5(parser, args): if not args.files: setup_parser.parser.print_help() return 1 - for f in args.files: - if not os.path.isfile(f): - with stager(f) as stage: - checksum = spack.util.crypto.checksum(hashlib.md5, stage.archive_file) - print "%s %s" % (checksum, f) - else: - if not can_access(f): - tty.die("Cannot read file: %s" % f) + results = [] + for url in args.files: + try: + checksum = compute_md5_checksum(url) + results.append((checksum, url)) + except FailedDownloadError as e: + tty.warn("Failed to fetch %s" % url) + tty.warn("%s" % e) + except IOError as e: + tty.warn("Error when reading %s" % url) + tty.warn("%s" % e) - checksum = spack.util.crypto.checksum(hashlib.md5, f) - print "%s %s" % (checksum, f) + # Dump the MD5s at last without interleaving them with downloads + tty.msg("%d MD5 checksums:" % len(results)) + for checksum, url in results: + print "%s %s" % (checksum, url) diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py index 44fc8696db..b04b402738 100644 --- a/lib/spack/spack/cmd/patch.py +++ b/lib/spack/spack/cmd/patch.py @@ -24,6 +24,7 @@ ############################################################################## import argparse +import llnl.util.tty as tty import spack.cmd import spack diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 08c23627f4..242eb1afa0 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -85,6 +85,16 @@ class DirectoryLayout(object): raise NotImplementedError() + def check_installed(self, spec): + """Checks whether a spec is installed. + + Return the spec's prefix, if it is installed, None otherwise. + + Raise an exception if the install is inconsistent or corrupt. + """ + raise NotImplementedError() + + def extension_map(self, spec): """Get a dict of currently installed extension packages for a spec. @@ -246,26 +256,36 @@ class YamlDirectoryLayout(DirectoryLayout): def create_install_directory(self, spec): _check_concrete(spec) + prefix = self.check_installed(spec) + if prefix: + raise InstallDirectoryAlreadyExistsError(prefix) + + mkdirp(self.metadata_path(spec)) + self.write_spec(spec, self.spec_file_path(spec)) + + + def check_installed(self, spec): + _check_concrete(spec) path = self.path_for_spec(spec) spec_file_path = self.spec_file_path(spec) - if os.path.isdir(path): - if not os.path.isfile(spec_file_path): - raise InconsistentInstallDirectoryError( - 'No spec file found at path %s' % spec_file_path) + if not os.path.isdir(path): + return None - installed_spec = self.read_spec(spec_file_path) - if installed_spec == self.spec: - raise InstallDirectoryAlreadyExistsError(path) + if not os.path.isfile(spec_file_path): + raise InconsistentInstallDirectoryError( + 'Inconsistent state: install prefix exists but contains no spec.yaml:', + " " + path) - if spec.dag_hash() == installed_spec.dag_hash(): - raise SpecHashCollisionError(installed_hash, spec_hash) - else: - raise InconsistentInstallDirectoryError( - 'Spec file in %s does not match hash!' % spec_file_path) + installed_spec = self.read_spec(spec_file_path) + if installed_spec == spec: + return path - mkdirp(self.metadata_path(spec)) - self.write_spec(spec, spec_file_path) + if spec.dag_hash() == installed_spec.dag_hash(): + raise SpecHashCollisionError(installed_hash, spec_hash) + else: + raise InconsistentInstallDirectoryError( + 'Spec file in %s does not match hash!' % spec_file_path) def all_specs(self): @@ -399,8 +419,8 @@ class YamlDirectoryLayout(DirectoryLayout): class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" - def __init__(self, message): - super(DirectoryLayoutError, self).__init__(message) + def __init__(self, message, long_msg=None): + super(DirectoryLayoutError, self).__init__(message, long_msg) class SpecHashCollisionError(DirectoryLayoutError): @@ -422,8 +442,8 @@ class RemoveFailedError(DirectoryLayoutError): class InconsistentInstallDirectoryError(DirectoryLayoutError): """Raised when a package seems to be installed to the wrong place.""" - def __init__(self, message): - super(InconsistentInstallDirectoryError, self).__init__(message) + def __init__(self, message, long_msg=None): + super(InconsistentInstallDirectoryError, self).__init__(message, long_msg) class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index ec17cb97f1..0d0a7db8a9 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -82,7 +82,6 @@ class FetchStrategy(object): class __metaclass__(type): """This metaclass registers all fetch strategies in a list.""" - def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) if cls.enabled: all_strategies.append(cls) @@ -145,6 +144,8 @@ class URLFetchStrategy(FetchStrategy): self.digest = kwargs.get('md5', None) if not self.digest: self.digest = digest + self.expand_archive = kwargs.get('expand', True) + if not self.url: raise ValueError("URLFetchStrategy requires a url for fetching.") @@ -218,6 +219,10 @@ class URLFetchStrategy(FetchStrategy): @_needs_stage def expand(self): + if not self.expand_archive: + tty.msg("Skipping expand step for %s" % self.archive_file) + return + tty.msg("Staging archive: %s" % self.archive_file) self.stage.chdir() diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index fdc4e7967f..6981f69ac0 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -51,13 +51,20 @@ def mirror_archive_filename(spec, fetcher): raise ValueError("mirror.path requires spec with concrete version.") if isinstance(fetcher, fs.URLFetchStrategy): - # If we fetch this version with a URLFetchStrategy, use URL's archive type - ext = url.downloaded_file_extension(fetcher.url) + if fetcher.expand_archive: + # If we fetch this version with a URLFetchStrategy, use URL's archive type + ext = url.downloaded_file_extension(fetcher.url) + else: + # If the archive shouldn't be expanded, don't check for its extension. + ext = None else: # Otherwise we'll make a .tar.gz ourselves ext = 'tar.gz' - return "%s-%s.%s" % (spec.package.name, spec.version, ext) + filename = "%s-%s" % (spec.package.name, spec.version) + if ext: + filename += ".%s" % ext + return filename def mirror_archive_path(spec, fetcher): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index d4acbf5024..972a0410b9 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -845,7 +845,8 @@ class Package(object): if not self.spec.concrete: raise ValueError("Can only install concrete packages.") - if os.path.exists(self.prefix): + # Ensure package is not already installed + if spack.install_layout.check_installed(self.spec): tty.msg("%s is already installed in %s" % (self.name, self.prefix)) return @@ -857,18 +858,11 @@ class Package(object): keep_prefix=keep_prefix, keep_stage=keep_stage, ignore_deps=ignore_deps, fake=fake, skip_patch=skip_patch, verbose=verbose, make_jobs=make_jobs) - def cleanup(): - """Handles removing install prefix on error.""" - if not keep_prefix: - self.remove_prefix() - else: - tty.warn("Keeping install prefix in place despite error.", - "Spack will think this package is installed. " + - "Manually remove this directory to fix:", - self.prefix, wrap=True) + # Set parallelism before starting build. + self.make_jobs = make_jobs # Then install the package itself. - def real_work(): + def build_process(): """Forked for each build. Has its own process and python module space set up by build_environment.fork().""" start_time = time.time() @@ -878,30 +872,24 @@ class Package(object): else: self.do_stage() - # create the install directory. The install layout - # handles this in case so that it can use whatever - # package naming scheme it likes. - spack.install_layout.create_install_directory(self.spec) - - try: - tty.msg("Building %s" % self.name) + tty.msg("Building %s" % self.name) - self.stage.keep = keep_stage - with self.stage: - # Run the pre-install hook in the child process after - # the directory is created. - spack.hooks.pre_install(self) + self.stage.keep = keep_stage + with self.stage: + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) - if fake: - self.do_fake_install() - - else: - # Do the real install in the source directory. - self.stage.chdir_to_source() + if fake: + self.do_fake_install() + else: + # Do the real install in the source directory. + self.stage.chdir_to_source() - # Save the build environment in a file before building. - env_path = join_path(os.getcwd(), 'spack-build.env') + # Save the build environment in a file before building. + env_path = join_path(os.getcwd(), 'spack-build.env') + try: # Redirect I/O to a build log (and optionally to the terminal) log_path = join_path(os.getcwd(), 'spack-build.out') log_file = open(log_path, 'w') @@ -909,43 +897,46 @@ class Package(object): dump_environment(env_path) self.install(self.spec, self.prefix) - # Ensure that something was actually installed. - self._sanity_check_install() + except ProcessError as e: + # Annotate ProcessErrors with the location of the build log. + e.build_log = log_path + raise e - # Copy provenance into the install directory on success - log_install_path = spack.install_layout.build_log_path(self.spec) - env_install_path = spack.install_layout.build_env_path(self.spec) - packages_dir = spack.install_layout.build_packages_path(self.spec) + # Ensure that something was actually installed. + self._sanity_check_install() - install(log_path, log_install_path) - install(env_path, env_install_path) - dump_packages(self.spec, packages_dir) + # Copy provenance into the install directory on success + log_install_path = spack.install_layout.build_log_path(self.spec) + env_install_path = spack.install_layout.build_env_path(self.spec) + packages_dir = spack.install_layout.build_packages_path(self.spec) - # Stop timer. - self._total_time = time.time() - start_time - build_time = self._total_time - self._fetch_time + install(log_path, log_install_path) + install(env_path, env_install_path) + dump_packages(self.spec, packages_dir) - tty.msg("Successfully installed %s" % self.name, - "Fetch: %s. Build: %s. Total: %s." - % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) - print_pkg(self.prefix) + # Stop timer. + self._total_time = time.time() - start_time + build_time = self._total_time - self._fetch_time - except ProcessError as e: - # Annotate with location of build log. - e.build_log = log_path - cleanup() - raise e + tty.msg("Successfully installed %s" % self.name, + "Fetch: %s. Build: %s. Total: %s." + % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time))) + print_pkg(self.prefix) - except: - # other exceptions just clean up and raise. - cleanup() - raise - - # Set parallelism before starting build. - self.make_jobs = make_jobs - - # Do the build. - spack.build_environment.fork(self, real_work) + try: + # Create the install prefix and fork the build process. + spack.install_layout.create_install_directory(self.spec) + spack.build_environment.fork(self, build_process) + except: + # remove the install prefix if anything went wrong during install. + if not keep_prefix: + self.remove_prefix() + else: + tty.warn("Keeping install prefix in place despite error.", + "Spack will think this package is installed. " + + "Manually remove this directory to fix:", + self.prefix, wrap=True) + raise # note: PARENT of the build process adds the new package to # the database, so that we don't need to re-read from file. diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b117c76aa1..5354135e6a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -229,13 +229,22 @@ class Stage(object): @property def source_path(self): - """Returns the path to the expanded/checked out source code - within this fetch strategy's path. + """Returns the path to the expanded/checked out source code. - This assumes nothing else is going ot be put in the - FetchStrategy's path. It searches for the first - subdirectory of the path it can find, then returns that. + To find the source code, this method searches for the first + subdirectory of the stage that it can find, and returns it. + This assumes nothing besides the archive file will be in the + stage path, but it has the advantage that we don't need to + know the name of the archive or its contents. + + If the fetch strategy is not supposed to expand the downloaded + file, it will just return the stage path. If the archive needs + to be expanded, it will return None when no archive is found. """ + if isinstance(self.fetcher, fs.URLFetchStrategy): + if not self.fetcher.expand_archive: + return self.path + for p in [os.path.join(self.path, f) for f in os.listdir(self.path)]: if os.path.isdir(p): return p @@ -416,21 +425,15 @@ class ResourceStage(Stage): shutil.move(source_path, destination_path) -@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy']) +@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy']) class StageComposite: """ Composite for Stage type objects. The first item in this composite is considered to be the root package, and operations that return a value are forwarded to it. """ - - @property - def source_path(self): - return self[0].source_path - - @property - def path(self): - return self[0].path - + # + # __enter__ and __exit__ delegate to all stages in the composite. + # def __enter__(self): for item in self: item.__enter__() @@ -438,11 +441,27 @@ class StageComposite: def __exit__(self, exc_type, exc_val, exc_tb): for item in reversed(self): + item.keep = getattr(self, 'keep', None) item.__exit__(exc_type, exc_val, exc_tb) + # + # Below functions act only on the *first* stage in the composite. + # + @property + def source_path(self): + return self[0].source_path + + @property + def path(self): + return self[0].path + def chdir_to_source(self): return self[0].chdir_to_source() + @property + def archive_file(self): + return self[0].archive_file + class DIYStage(object): """Simple class that allows any directory to be a spack stage.""" @@ -458,10 +477,14 @@ class DIYStage(object): else: raise ChdirError("Setup failed: no such directory: " + self.path) + # DIY stages do nothing as context managers. + def __enter__(self): pass + def __exit__(self, exc_type, exc_val, exc_tb): pass + def chdir_to_source(self): self.chdir() - def fetch(self): + def fetch(self, mirror_only): tty.msg("No need to fetch for DIY.") def check(self): |