diff options
author | Alfredo Gimenez <gimenez1@llnl.gov> | 2015-02-06 16:27:33 -0800 |
---|---|---|
committer | Alfredo Gimenez <gimenez1@llnl.gov> | 2015-02-06 16:27:33 -0800 |
commit | a4ac1977a48ad2f8dddf5f26964e4c93bec490d1 (patch) | |
tree | 108cf1b354faee2659a6dba68cd97669e54647ea /lib | |
parent | 81a4d89e94a53b087bee18c900f6de19142602f2 (diff) | |
parent | 5bde8359e8378bf8595a4bef343d1f50258f663d (diff) | |
download | spack-a4ac1977a48ad2f8dddf5f26964e4c93bec490d1.tar.gz spack-a4ac1977a48ad2f8dddf5f26964e4c93bec490d1.tar.bz2 spack-a4ac1977a48ad2f8dddf5f26964e4c93bec490d1.tar.xz spack-a4ac1977a48ad2f8dddf5f26964e4c93bec490d1.zip |
merge with python-modules
Diffstat (limited to 'lib')
-rw-r--r-- | lib/spack/llnl/util/filesystem.py | 15 | ||||
-rw-r--r-- | lib/spack/llnl/util/link_tree.py | 193 | ||||
-rw-r--r-- | lib/spack/spack/__init__.py | 2 | ||||
-rw-r--r-- | lib/spack/spack/cmd/__init__.py | 15 | ||||
-rw-r--r-- | lib/spack/spack/cmd/activate.py | 50 | ||||
-rw-r--r-- | lib/spack/spack/cmd/deactivate.py | 50 | ||||
-rw-r--r-- | lib/spack/spack/cmd/extensions.py | 97 | ||||
-rw-r--r-- | lib/spack/spack/cmd/find.py | 69 | ||||
-rw-r--r-- | lib/spack/spack/cmd/location.py | 48 | ||||
-rw-r--r-- | lib/spack/spack/cmd/uninstall.py | 1 | ||||
-rw-r--r-- | lib/spack/spack/directory_layout.py | 122 | ||||
-rw-r--r-- | lib/spack/spack/hooks/__init__.py | 7 | ||||
-rw-r--r-- | lib/spack/spack/hooks/extensions.py | 40 | ||||
-rw-r--r-- | lib/spack/spack/mirror.py | 13 | ||||
-rw-r--r-- | lib/spack/spack/modules.py | 8 | ||||
-rw-r--r-- | lib/spack/spack/package.py | 235 | ||||
-rw-r--r-- | lib/spack/spack/packages.py | 13 | ||||
-rw-r--r-- | lib/spack/spack/relations.py | 33 | ||||
-rw-r--r-- | lib/spack/spack/spec.py | 7 | ||||
-rw-r--r-- | lib/spack/spack/test/__init__.py | 3 | ||||
-rw-r--r-- | lib/spack/spack/test/link_tree.py | 153 | ||||
-rw-r--r-- | lib/spack/spack/test/mirror.py | 8 |
22 files changed, 1096 insertions, 86 deletions
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 9f08832598..576aeb16bd 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -23,8 +23,8 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## __all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir', - 'touch', 'mkdirp', 'force_remove', 'join_path', 'ancestor', - 'can_access', 'filter_file', 'change_sed_delimiter'] + 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor', + 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe'] import os import sys @@ -154,6 +154,11 @@ def install(src, dest): os.chmod(dest, dest_mode) +def is_exe(path): + """True if path is an executable file.""" + return os.path.isfile(path) and os.access(path, os.X_OK) + + def expand_user(path): """Find instances of '%u' in a path and replace with the current user's username.""" @@ -199,6 +204,12 @@ def touch(path): os.utime(path, None) +def touchp(path): + """Like touch, but creates any parent directories needed for the file.""" + mkdirp(os.path.dirname(path)) + touch(path) + + def join_path(prefix, *args): path = str(prefix) for elt in args: diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py new file mode 100644 index 0000000000..4e4e48316e --- /dev/null +++ b/lib/spack/llnl/util/link_tree.py @@ -0,0 +1,193 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +"""LinkTree class for setting up trees of symbolic links.""" +__all__ = ['LinkTree'] + +import os +import shutil +from llnl.util.filesystem import * + +empty_file_name = '.spack-empty' + + +def traverse_tree(source_root, dest_root, rel_path='', **kwargs): + """Traverse two filesystem trees simultaneously. + + Walks the LinkTree directory in pre or post order. Yields each + file in the source directory with a matching path from the dest + directory, along with whether the file is a directory. + e.g., for this tree:: + + root/ + a/ + file1 + file2 + b/ + file3 + + When called on dest, this yields:: + + ('root', 'dest') + ('root/a', 'dest/a') + ('root/a/file1', 'dest/a/file1') + ('root/a/file2', 'dest/a/file2') + ('root/b', 'dest/b') + ('root/b/file3', 'dest/b/file3') + + Optional args: + + order=[pre|post] -- Whether to do pre- or post-order traveral. + + ignore=<predicate> -- Predicate indicating which files to ignore. + + follow_nonexisting -- Whether to descend into directories in + src that do not exit in dest. Default True. + + follow_links -- Whether to descend into symlinks in src. + + """ + follow_nonexisting = kwargs.get('follow_nonexisting', True) + follow_links = kwargs.get('follow_link', False) + + # Yield in pre or post order? + order = kwargs.get('order', 'pre') + if order not in ('pre', 'post'): + raise ValueError("Order must be 'pre' or 'post'.") + + # List of relative paths to ignore under the src root. + ignore = kwargs.get('ignore', lambda filename: False) + + # Don't descend into ignored directories + if ignore(rel_path): + return + + source_path = os.path.join(source_root, rel_path) + dest_path = os.path.join(dest_root, rel_path) + + # preorder yields directories before children + if order == 'pre': + yield (source_path, dest_path) + + for f in os.listdir(source_path): + source_child = os.path.join(source_path, f) + dest_child = os.path.join(dest_path, f) + rel_child = os.path.join(rel_path, f) + + # Treat as a directory + if os.path.isdir(source_child) and ( + follow_links or not os.path.islink(source_child)): + + # When follow_nonexisting isn't set, don't descend into dirs + # in source that do not exist in dest + if follow_nonexisting or os.path.exists(dest_child): + tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) + for t in tuples: yield t + + # Treat as a file. + elif not ignore(os.path.join(rel_path, f)): + yield (source_child, dest_child) + + if order == 'post': + yield (source_path, dest_path) + + + +class LinkTree(object): + """Class to create trees of symbolic links from a source directory. + + LinkTree objects are constructed with a source root. Their + methods allow you to create and delete trees of symbolic links + back to the source tree in specific destination directories. + Trees comprise symlinks only to files; directries are never + symlinked to, to prevent the source directory from ever being + modified. + + """ + def __init__(self, source_root): + if not os.path.exists(source_root): + raise IOError("No such file or directory: '%s'", source_root) + + self._root = source_root + + + def find_conflict(self, dest_root, **kwargs): + """Returns the first file in dest that conflicts with src""" + kwargs['follow_nonexisting'] = False + for src, dest in traverse_tree(self._root, dest_root, **kwargs): + if os.path.isdir(src): + if os.path.exists(dest) and not os.path.isdir(dest): + return dest + elif os.path.exists(dest): + return dest + return None + + + def merge(self, dest_root, **kwargs): + """Link all files in src into dest, creating directories if necessary.""" + kwargs['order'] = 'pre' + for src, dest in traverse_tree(self._root, dest_root, **kwargs): + if os.path.isdir(src): + if not os.path.exists(dest): + mkdirp(dest) + continue + + if not os.path.isdir(dest): + raise ValueError("File blocks directory: %s" % dest) + + # mark empty directories so they aren't removed on unmerge. + if not os.listdir(dest): + marker = os.path.join(dest, empty_file_name) + touch(marker) + + else: + assert(not os.path.exists(dest)) + os.symlink(src, dest) + + + def unmerge(self, dest_root, **kwargs): + """Unlink all files in dest that exist in src. + + Unlinks directories in dest if they are empty. + + """ + kwargs['order'] = 'post' + for src, dest in traverse_tree(self._root, dest_root, **kwargs): + if os.path.isdir(src): + if not os.path.isdir(dest): + raise ValueError("File blocks directory: %s" % dest) + + # remove directory if it is empty. + if not os.listdir(dest): + shutil.rmtree(dest, ignore_errors=True) + + # remove empty dir marker if present. + marker = os.path.join(dest, empty_file_name) + if os.path.exists(marker): + os.remove(marker) + + elif os.path.exists(dest): + if not os.path.islink(dest): + raise ValueError("%s is not a link tree!" % dest) + os.remove(dest) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 6697e00e40..6763411f7d 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -138,7 +138,7 @@ sys_type = None # should live. This file is overloaded for spack core vs. for packages. # __all__ = ['Package', 'Version', 'when', 'ver'] -from spack.package import Package +from spack.package import Package, ExtensionConflictError from spack.version import Version, ver from spack.multimethod import when diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py index 537db536dd..b96ac5af51 100644 --- a/lib/spack/spack/cmd/__init__.py +++ b/lib/spack/spack/cmd/__init__.py @@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10): return line_list[:max_num-1] + ['...'] + line_list[-1:] else: return line_list + + +def disambiguate_spec(spec): + matching_specs = spack.db.get_installed(spec) + if not matching_specs: + tty.die("Spec '%s' matches no installed packages." % spec) + + elif len(matching_specs) > 1: + args = ["%s matches multiple packages." % spec, + "Matching packages:"] + args += [" " + str(s) for s in matching_specs] + args += ["Use a more specific spec."] + tty.die(*args) + + return matching_specs[0] diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py new file mode 100644 index 0000000000..c1e23852d6 --- /dev/null +++ b/lib/spack/spack/cmd/activate.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import llnl.util.tty as tty +import spack +import spack.cmd + +description = "Activate a package extension." + +def setup_parser(subparser): + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.") + + +def activate(parser, args): + specs = spack.cmd.parse_specs(args.spec, concretize=True) + if len(specs) != 1: + tty.die("activate requires one spec. %d given." % len(specs)) + + # TODO: remove this hack when DAG info is stored in dir layout. + # This ensures the ext spec is always normalized properly. + spack.db.get(specs[0]) + + spec = spack.cmd.disambiguate_spec(specs[0]) + if spec.package.activated: + tty.die("Package %s is already activated." % specs[0].short_spec) + + spec.package.do_activate() diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py new file mode 100644 index 0000000000..fd13f051df --- /dev/null +++ b/lib/spack/spack/cmd/deactivate.py @@ -0,0 +1,50 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from external import argparse +import llnl.util.tty as tty +import spack +import spack.cmd + +description = "Deactivate a package extension." + +def setup_parser(subparser): + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.") + + +def deactivate(parser, args): + specs = spack.cmd.parse_specs(args.spec, concretize=True) + if len(specs) != 1: + tty.die("deactivate requires one spec. %d given." % len(specs)) + + # TODO: remove this hack when DAG info is stored in dir layout. + # This ensures the ext spec is always normalized properly. + spack.db.get(specs[0]) + + spec = spack.cmd.disambiguate_spec(specs[0]) + if not spec.package.activated: + tty.die("Package %s is not activated." % specs[0].short_spec) + + spec.package.do_deactivate() diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py new file mode 100644 index 0000000000..f6ccd7b515 --- /dev/null +++ b/lib/spack/spack/cmd/extensions.py @@ -0,0 +1,97 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import sys +from external import argparse + +import llnl.util.tty as tty +from llnl.util.tty.colify import colify + +import spack +import spack.cmd +import spack.cmd.find + +description = "List extensions for package." + +def setup_parser(subparser): + format_group = subparser.add_mutually_exclusive_group() + format_group.add_argument( + '-l', '--long', action='store_const', dest='mode', const='long', + help='Show dependency hashes as well as versions.') + format_group.add_argument( + '-p', '--paths', action='store_const', dest='mode', const='paths', + help='Show paths to extension install directories') + format_group.add_argument( + '-d', '--deps', action='store_const', dest='mode', const='deps', + help='Show full dependency DAG of extensions') + + subparser.add_argument( + 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for') + + +def extensions(parser, args): + if not args.spec: + tty.die("extensions requires a package spec.") + + # Checks + spec = spack.cmd.parse_specs(args.spec) + if len(spec) > 1: + tty.die("Can only list extensions for one package.") + + if not spec[0].package.extendable: + tty.die("%s is not an extendable package." % spec[0].name) + + spec = spack.cmd.disambiguate_spec(spec[0]) + + if not spec.package.extendable: + tty.die("%s does not have extensions." % spec.short_spec) + + if not args.mode: + args.mode = 'short' + + # List package names of extensions + extensions = spack.db.extensions_for(spec) + if not extensions: + tty.msg("%s has no extensions." % spec.cshort_spec) + return + tty.msg("%s extensions:" % spec.cshort_spec) + colify(ext.name for ext in extensions) + + # List specs of installed extensions. + installed = [s.spec for s in spack.db.installed_extensions_for(spec)] + print + if not installed: + tty.msg("None activated.") + return + tty.msg("%d installed:" % len(installed)) + spack.cmd.find.display_specs(installed, mode=args.mode) + + # List specs of activated extensions. + activated = spack.install_layout.get_extensions(spec) + print + if not activated: + tty.msg("None activated.") + return + tty.msg("%d currently activated:" % len(exts)) + spack.cmd.find.display_specs(installed, mode=args.mode) diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py index 1de3413d42..f6f503afe5 100644 --- a/lib/spack/spack/cmd/find.py +++ b/lib/spack/spack/cmd/find.py @@ -41,13 +41,13 @@ description ="Find installed spack packages" def setup_parser(subparser): format_group = subparser.add_mutually_exclusive_group() format_group.add_argument( - '-l', '--long', action='store_true', dest='long', + '-l', '--long', action='store_const', dest='mode', const='long', help='Show dependency hashes as well as versions.') format_group.add_argument( - '-p', '--paths', action='store_true', dest='paths', + '-p', '--paths', action='store_const', dest='mode', const='paths', help='Show paths to package install directories') format_group.add_argument( - '-d', '--deps', action='store_true', dest='full_deps', + '-d', '--deps', action='store_const', dest='mode', const='deps', help='Show full dependency DAG of installed packages') subparser.add_argument( @@ -55,26 +55,8 @@ def setup_parser(subparser): help='optional specs to filter results') -def find(parser, args): - # Filter out specs that don't exist. - query_specs = spack.cmd.parse_specs(args.query_specs) - query_specs, nonexisting = partition_list( - query_specs, lambda s: spack.db.exists(s.name)) - - if nonexisting: - msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '') - msg += ", ".join(s.name for s in nonexisting) - tty.msg(msg) - - if not query_specs: - return - - # Get all the specs the user asked for - if not query_specs: - specs = set(spack.db.installed_package_specs()) - else: - results = [set(spack.db.get_installed(qs)) for qs in query_specs] - specs = set.union(*results) +def display_specs(specs, **kwargs): + mode = kwargs.get('mode', 'short') # Make a dict with specs keyed by architecture and compiler. index = index_by(specs, ('architecture', 'compiler')) @@ -92,7 +74,7 @@ def find(parser, args): specs.sort() abbreviated = [s.format('$_$@$+', color=True) for s in specs] - if args.paths: + if mode == 'paths': # Print one spec per line along with prefix path width = max(len(s) for s in abbreviated) width += 2 @@ -101,11 +83,44 @@ def find(parser, args): for abbrv, spec in zip(abbreviated, specs): print format % (abbrv, spec.prefix) - elif args.full_deps: + elif mode == 'deps': for spec in specs: print spec.tree(indent=4, format='$_$@$+', color=True), - else: + + elif mode in ('short', 'long'): fmt = '$-_$@$+' - if args.long: + if mode == 'long': fmt += '$#' colify(s.format(fmt, color=True) for s in specs) + + else: + raise ValueError( + "Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode) + + + +def find(parser, args): + # Filter out specs that don't exist. + query_specs = spack.cmd.parse_specs(args.query_specs) + query_specs, nonexisting = partition_list( + query_specs, lambda s: spack.db.exists(s.name)) + + if nonexisting: + msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '') + msg += ", ".join(s.name for s in nonexisting) + tty.msg(msg) + + if not query_specs: + return + + # Get all the specs the user asked for + if not query_specs: + specs = set(spack.db.installed_package_specs()) + else: + results = [set(spack.db.get_installed(qs)) for qs in query_specs] + specs = set.union(*results) + + if not args.mode: + args.mode = 'short' + display_specs(specs, mode=args.mode) + diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py index e422eaf966..709e894b7f 100644 --- a/lib/spack/spack/cmd/location.py +++ b/lib/spack/spack/cmd/location.py @@ -78,38 +78,30 @@ def location(parser, args): tty.die("You must supply a spec.") if len(specs) != 1: tty.die("Too many specs. Supply only one.") - spec = specs[0] if args.install_dir: # install_dir command matches against installed specs. - matching_specs = spack.db.get_installed(spec) - if not matching_specs: - tty.die("Spec '%s' matches no installed packages." % spec) + spec = spack.cmd.disambiguate_spec(specs[0]) + print spec.prefix - elif len(matching_specs) > 1: - tty.error("%s matches multiple packages:" % spec) - for s in matching_specs: - sys.stderr.write(s.tree(color=True)) - sys.stderr.write("\n") - sys.stderr.write("Use a more specific spec.\n") - sys.exit(1) + else: + spec = specs[0] - print matching_specs[0].prefix + if args.package_dir: + # This one just needs the spec name. + print join_path(spack.db.root, spec.name) - elif args.package_dir: - # This one just needs the spec name. - print join_path(spack.db.root, spec.name) + else: + # These versions need concretized specs. + spec.concretize() + pkg = spack.db.get(spec) + + if args.stage_dir: + print pkg.stage.path + + else: # args.build_dir is the default. + if not pkg.stage.source_path: + tty.die("Build directory does not exist yet. Run this to create it:", + "spack stage " + " ".join(args.spec)) + print pkg.stage.source_path - else: - # These versions need concretized specs. - spec.concretize() - pkg = spack.db.get(spec) - - if args.stage_dir: - print pkg.stage.path - - else: # args.build_dir is the default. - if not pkg.stage.source_path: - tty.die("Build directory does not exist yet. Run this to create it:", - "spack stage " + " ".join(args.spec)) - print pkg.stage.source_path diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index e787c460ad..0962942f43 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -65,7 +65,6 @@ def uninstall(parser, args): " b) use a more specific spec."] tty.die(*args) - if len(matching_specs) == 0: tty.die("%s does not match any installed packages." % spec) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 42cac0c9d2..efc40a17a4 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -53,6 +53,19 @@ class DirectoryLayout(object): self.root = root + @property + def hidden_file_paths(self): + """Return a list of hidden files used by the directory layout. + + Paths are relative to the root of an install directory. + + If the directory layout uses no hidden files to maintain + state, this should return an empty container, e.g. [] or (,). + + """ + raise NotImplementedError() + + def all_specs(self): """To be implemented by subclasses to traverse all specs for which there is a directory within the root. @@ -71,6 +84,21 @@ class DirectoryLayout(object): raise NotImplementedError() + def get_extensions(self, spec): + """Get a set of currently installed extension packages for a spec.""" + raise NotImplementedError() + + + def add_extension(self, spec, extension_spec): + """Add to the list of currently installed extensions.""" + raise NotImplementedError() + + + def remove_extension(self, spec, extension_spec): + """Remove from the list of currently installed extensions.""" + raise NotImplementedError() + + def path_for_spec(self, spec): """Return an absolute path from the root to a directory for the spec.""" _check_concrete(spec) @@ -134,9 +162,16 @@ class SpecHashDirectoryLayout(DirectoryLayout): """Prefix size is number of characters in the SHA-1 prefix to use to make each hash unique. """ - spec_file_name = kwargs.get('spec_file_name', '.spec') + spec_file_name = kwargs.get('spec_file_name', '.spec') + extension_file_name = kwargs.get('extension_file_name', '.extensions') super(SpecHashDirectoryLayout, self).__init__(root) self.spec_file_name = spec_file_name + self.extension_file_name = extension_file_name + + + @property + def hidden_file_paths(self): + return ('.spec', '.extensions') def relative_path_for_spec(self, spec): @@ -225,6 +260,62 @@ class SpecHashDirectoryLayout(DirectoryLayout): yield spec + def extension_file_path(self, spec): + """Gets full path to an installed package's extension file""" + _check_concrete(spec) + return join_path(self.path_for_spec(spec), self.extension_file_name) + + + def get_extensions(self, spec): + _check_concrete(spec) + + extensions = set() + path = self.extension_file_path(spec) + if os.path.exists(path): + with closing(open(path)) as ext_file: + for line in ext_file: + try: + extensions.add(Spec(line.strip())) + except spack.error.SpackError, e: + raise InvalidExtensionSpecError(str(e)) + return extensions + + + def write_extensions(self, spec, extensions): + path = self.extension_file_path(spec) + with closing(open(path, 'w')) as spec_file: + for extension in sorted(extensions): + spec_file.write("%s\n" % extension) + + + def add_extension(self, spec, extension_spec): + _check_concrete(spec) + _check_concrete(extension_spec) + + exts = self.get_extensions(spec) + if extension_spec in exts: + raise ExtensionAlreadyInstalledError(spec, extension_spec) + else: + for already_installed in exts: + if spec.name == extension_spec.name: + raise ExtensionConflictError(spec, extension_spec, already_installed) + + exts.add(extension_spec) + self.write_extensions(spec, exts) + + + def remove_extension(self, spec, extension_spec): + _check_concrete(spec) + _check_concrete(extension_spec) + + exts = self.get_extensions(spec) + if not extension_spec in exts: + raise NoSuchExtensionError(spec, extension_spec) + + exts.remove(extension_spec) + self.write_extensions(spec, exts) + + class DirectoryLayoutError(SpackError): """Superclass for directory layout errors.""" def __init__(self, message): @@ -250,3 +341,32 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError): def __init__(self, path): super(InstallDirectoryAlreadyExistsError, self).__init__( "Install path %s already exists!") + + +class InvalidExtensionSpecError(DirectoryLayoutError): + """Raised when an extension file has a bad spec in it.""" + def __init__(self, message): + super(InvalidExtensionSpecError, self).__init__(message) + + +class ExtensionAlreadyInstalledError(DirectoryLayoutError): + """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, extension_spec): + super(ExtensionAlreadyInstalledError, self).__init__( + "%s is already installed in %s" % (extension_spec.short_spec, spec.short_spec)) + + +class ExtensionConflictError(DirectoryLayoutError): + """Raised when an extension is added to a package that already has it.""" + def __init__(self, spec, extension_spec, conflict): + super(ExtensionConflictError, self).__init__( + "%s cannot be installed in %s because it conflicts with %s."% ( + extension_spec.short_spec, spec.short_spec, conflict.short_spec)) + + +class NoSuchExtensionError(DirectoryLayoutError): + """Raised when an extension isn't there on remove.""" + def __init__(self, spec, extension_spec): + super(NoSuchExtensionError, self).__init__( + "%s cannot be removed from %s because it's not installed."% ( + extension_spec.short_spec, spec.short_spec)) diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py index 98b7f2323f..1c44e8abaa 100644 --- a/lib/spack/spack/hooks/__init__.py +++ b/lib/spack/spack/hooks/__init__.py @@ -31,7 +31,9 @@ Currently the following hooks are supported: + * pre_install() * post_install() + * pre_uninstall() * post_uninstall() This can be used to implement support for things like module @@ -70,5 +72,8 @@ class HookRunner(object): # # Define some functions that can be called to fire off hooks. # -post_install = HookRunner('post_install') +pre_install = HookRunner('pre_install') +post_install = HookRunner('post_install') + +pre_uninstall = HookRunner('pre_uninstall') post_uninstall = HookRunner('post_uninstall') diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py new file mode 100644 index 0000000000..718b24b965 --- /dev/null +++ b/lib/spack/spack/hooks/extensions.py @@ -0,0 +1,40 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## + +import spack + + +def post_install(pkg): + if pkg.is_extension: + pkg.do_activate() + + +def pre_uninstall(pkg): + # Need to do this b/c uninstall does not automatically do it. + # TODO: store full graph info in stored .spec file. + pkg.spec.normalize() + + if pkg.is_extension: + pkg.do_deactivate() diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 929c514b61..114c7b6a35 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -46,7 +46,7 @@ from spack.util.compression import extension def mirror_archive_filename(spec): - """Get the path that this spec will live at within a mirror.""" + """Get the name of the spec's archive in the mirror.""" if not spec.version.concrete: raise ValueError("mirror.path requires spec with concrete version.") @@ -61,6 +61,11 @@ def mirror_archive_filename(spec): return "%s-%s.%s" % (spec.package.name, spec.version, ext) +def mirror_archive_path(spec): + """Get the relative path to the spec's archive within a mirror.""" + return join_path(spec.name, mirror_archive_filename(spec)) + + def get_matching_versions(specs, **kwargs): """Get a spec for EACH known version matching any spec in the list.""" matching = [] @@ -141,12 +146,10 @@ def create(path, specs, **kwargs): stage = None try: # create a subdirectory for the current package@version - subdir = join_path(mirror_root, pkg.name) + archive_path = join_path(path, mirror_archive_path(spec)) + subdir = os.path.dirname(archive_path) mkdirp(subdir) - archive_file = mirror_archive_filename(spec) - archive_path = join_path(subdir, archive_file) - if os.path.exists(archive_path): tty.msg("Already added %s" % spec.format("$_$@")) present.append(spec) diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 755e9ea900..7d2ca97a62 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -49,6 +49,7 @@ import os import re import textwrap import shutil +from glob import glob from contextlib import closing import llnl.util.tty as tty @@ -123,6 +124,13 @@ class EnvModule(object): if os.path.isdir(directory): add_path(var, directory) + # Add python path unless it's an actual python installation + # TODO: is there a better way to do this? + if self.spec.name != 'python': + site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages")) + if site_packages: + add_path('PYTHONPATH', site_packages[0]) + # short description is just the package + version # TODO: maybe packages can optionally provide it. self.short_description = self.spec.format("$_ $@") diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 1a797e88b1..b905968540 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -45,6 +45,7 @@ import textwrap from StringIO import StringIO import llnl.util.tty as tty +from llnl.util.link_tree import LinkTree from llnl.util.filesystem import * from llnl.util.lang import * @@ -320,12 +321,21 @@ class Package(object): """Patches to apply to newly expanded source, if any.""" patches = {} + """Specs of package this one extends, or None. + + Currently, ppackages can extend at most one other package. + """ + extendees = {} + # # These are default values for instance variables. # """By default we build in parallel. Subclasses can override this.""" parallel = True + """Most packages are NOT extendable. Set to True if you want extensions.""" + extendable = False + def __init__(self, spec): # this determines how the package should be built. @@ -395,6 +405,9 @@ class Package(object): self._fetch_time = 0.0 self._total_time = 0.0 + if self.is_extension: + spack.db.get(self.extendee_spec)._check_extendable() + @property def version(self): @@ -459,7 +472,7 @@ class Package(object): raise ValueError("Can only get a stage for a concrete package.") if self._stage is None: - mp = spack.mirror.mirror_archive_filename(self.spec) + mp = spack.mirror.mirror_archive_path(self.spec) self._stage = Stage( self.fetcher, mirror_path=mp, name=self.spec.short_spec) return self._stage @@ -481,6 +494,49 @@ class Package(object): self._fetcher = f + @property + def extendee_spec(self): + """Spec of the extendee of this package, or None if it is not an extension.""" + if not self.extendees: + return None + name = next(iter(self.extendees)) + if not name in self.spec: + spec, kwargs = self.extendees[name] + return spec + + # Need to do this to get the concrete version of the spec + return self.spec[name] + + + @property + def extendee_args(self): + """Spec of the extendee of this package, or None if it is not an extension.""" + if not self.extendees: + return None + name = next(iter(self.extendees)) + return self.extendees[name][1] + + + @property + def is_extension(self): + return len(self.extendees) > 0 + + + def extends(self, spec): + return (spec.name in self.extendees and + spec.satisfies(self.extendees[spec.name][0])) + + + @property + def activated(self): + if not self.spec.concrete: + raise ValueError("Only concrete package extensions can be activated.") + if not self.is_extension: + raise ValueError("is_extension called on package that is not an extension.") + + return self.spec in spack.install_layout.get_extensions(self.extendee_spec) + + def preorder_traversal(self, visited=None, **kwargs): """This does a preorder traversal of the package's dependence DAG.""" virtual = kwargs.get("virtual", False) @@ -713,6 +769,14 @@ class Package(object): tty.msg("Patched %s" % self.name) + def do_fake_install(self): + """Make a fake install directory contaiing a 'fake' file in bin.""" + mkdirp(self.prefix.bin) + touch(join_path(self.prefix.bin, 'fake')) + mkdirp(self.prefix.lib) + mkdirp(self.prefix.man1) + + def do_install(self, **kwargs): """This class should call this version of the install method. Package implementations should override install(). @@ -733,7 +797,7 @@ class Package(object): tty.msg("Installing %s" % self.name) if not ignore_deps: - self.do_install_dependencies() + self.do_install_dependencies(**kwargs) start_time = time.time() if not fake_install: @@ -757,23 +821,27 @@ class Package(object): # package naming scheme it likes. spack.install_layout.make_path_for_spec(self.spec) + # Run the pre-install hook in the child process after + # the directory is created. + spack.hooks.pre_install(self) + # Set up process's build environment before running install. + self.stage.chdir_to_source() build_env.setup_package(self) + # Allow extendees to further set up the environment. + if self.is_extension: + self.extendee_spec.package.setup_extension_environment( + self.module, self.extendee_spec, self.spec) + if fake_install: - mkdirp(self.prefix.bin) - touch(join_path(self.prefix.bin, 'fake')) - mkdirp(self.prefix.lib) - mkdirp(self.prefix.man1) + self.do_fake_install() else: # Subclasses implement install() to do the real work. self.install(self.spec, self.prefix) # Ensure that something was actually installed. - if not os.listdir(self.prefix): - raise InstallError( - "Install failed for %s. Nothing was installed!" - % self.name) + self._sanity_check_install() # On successful install, remove the stage. if not keep_stage: @@ -814,15 +882,23 @@ class Package(object): if returncode != 0: sys.exit(1) - # Once everything else is done, run post install hooks spack.hooks.post_install(self) - def do_install_dependencies(self): + + def _sanity_check_install(self): + installed = set(os.listdir(self.prefix)) + installed.difference_update(spack.install_layout.hidden_file_paths) + if not installed: + raise InstallError( + "Install failed for %s. Nothing was installed!" % self.name) + + + def do_install_dependencies(self, **kwargs): # Pass along paths of dependencies here for dep in self.spec.dependencies.values(): - dep.package.do_install() + dep.package.do_install(**kwargs) @property @@ -834,6 +910,30 @@ class Package(object): fromlist=[self.__class__.__name__]) + def setup_extension_environment(self, module, spec, ext_spec): + """Called before the install() method of extensions. + + Default implementation does nothing, but this can be + overridden by an extendable package to set up the install + environment for its extensions. This is useful if there are + some common steps to installing all extensions for a + certain package. + + Some examples: + + 1. Installing python modules generally requires PYTHONPATH to + point to the lib/pythonX.Y/site-packages directory in the + module's install prefix. This could set that variable. + + 2. Extensions often need to invoke the 'python' interpreter + from the Python installation being extended. This routine can + put a 'python' Execuable object in the module scope for the + extension package to simplify extension installs. + + """ + pass + + def install(self, spec, prefix): """Package implementations override this with their own build configuration.""" raise InstallError("Package %s provides no install method!" % self.name) @@ -853,6 +953,10 @@ class Package(object): "The following installed packages depend on it: %s" % ' '.join(formatted_deps)) + # Pre-uninstall hook runs first. + spack.hooks.pre_uninstall(self) + + # Uninstalling in Spack only requires removing the prefix. self.remove_prefix() tty.msg("Successfully uninstalled %s." % self.spec.short_spec) @@ -860,6 +964,88 @@ class Package(object): spack.hooks.post_uninstall(self) + def _check_extendable(self): + if not self.extendable: + raise ValueError("Package %s is not extendable!" % self.name) + + + def _sanity_check_extension(self): + if not self.is_extension: + raise ValueError("This package is not an extension.") + extendee_package = self.extendee_spec.package + extendee_package._check_extendable() + + if not extendee_package.installed: + raise ValueError("Can only (de)activate extensions for installed packages.") + if not self.installed: + raise ValueError("Extensions must first be installed.") + if not self.extendee_spec.name in self.extendees: + raise ValueError("%s does not extend %s!" % (self.name, self.extendee.name)) + + + def do_activate(self): + """Called on an etension to invoke the extendee's activate method. + + Commands should call this routine, and should not call + activate() directly. + """ + self._sanity_check_extension() + self.extendee_spec.package.activate(self, **self.extendee_args) + + spack.install_layout.add_extension(self.extendee_spec, self.spec) + tty.msg("Activated extension %s for %s." + % (self.spec.short_spec, self.extendee_spec.short_spec)) + + + def activate(self, extension, **kwargs): + """Symlinks all files from the extension into extendee's install dir. + + Package authors can override this method to support other + extension mechanisms. Spack internals (commands, hooks, etc.) + should call do_activate() method so that proper checks are + always executed. + + """ + def ignore(filename): + return (filename in spack.install_layout.hidden_file_paths or + kwargs.get('ignore', lambda f: False)(filename)) + + tree = LinkTree(extension.prefix) + conflict = tree.find_conflict(self.prefix, ignore=ignore) + if conflict: + raise ExtensionConflictError(conflict) + tree.merge(self.prefix, ignore=ignore) + + + def do_deactivate(self): + """Called on the extension to invoke extendee's deactivate() method.""" + self._sanity_check_extension() + self.extendee_spec.package.deactivate(self, **self.extendee_args) + + if self.spec in spack.install_layout.get_extensions(self.extendee_spec): + spack.install_layout.remove_extension(self.extendee_spec, self.spec) + + tty.msg("Deactivated extension %s for %s." + % (self.spec.short_spec, self.extendee_spec.short_spec)) + + + def deactivate(self, extension, **kwargs): + """Unlinks all files from extension out of this package's install dir. + + Package authors can override this method to support other + extension mechanisms. Spack internals (commands, hooks, etc.) + should call do_deactivate() method so that proper checks are + always executed. + + """ + def ignore(filename): + return (filename in spack.install_layout.hidden_file_paths or + kwargs.get('ignore', lambda f: False)(filename)) + + tree = LinkTree(extension.prefix) + tree.unmerge(self.prefix, ignore=ignore) + + def do_clean(self): if self.stage.expanded_archive_path: self.stage.chdir_to_source() @@ -925,6 +1111,23 @@ class Package(object): e.url, e.message) + @property + def rpath(self): + """Get the rpath this package links with, as a list of paths.""" + rpaths = [self.prefix.lib, self.prefix.lib64] + rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib)) + rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False) + if os.path.isdir(d.prefix.lib64)) + return rpaths + + + @property + def rpath_args(self): + """Get the rpath args as a string, with -Wl,-rpath= for each element.""" + return " ".join("-Wl,-rpath=%s" % p for p in self.rpath) + + def find_versions_of_archive(*archive_urls, **kwargs): list_url = kwargs.get('list_url', None) list_depth = kwargs.get('list_depth', 1) @@ -1034,3 +1237,9 @@ class NoURLError(PackageError): def __init__(self, cls): super(NoURLError, self).__init__( "Package %s has no version with a URL." % cls.__name__) + + +class ExtensionConflictError(PackageError): + def __init__(self, path): + super(ExtensionConflictError, self).__init__( + "Extension blocked by file: %s" % path) diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py index db43d3909a..7ef8135c1a 100644 --- a/lib/spack/spack/packages.py +++ b/lib/spack/spack/packages.py @@ -77,6 +77,8 @@ class PackageDB(object): copy = spec.copy() self.instances[copy] = package_class(copy) except Exception, e: + if spack.debug: + sys.excepthook(*sys.exc_info()) raise FailedConstructorError(spec.name, e) return self.instances[spec] @@ -110,6 +112,17 @@ class PackageDB(object): return providers + @_autospec + def extensions_for(self, extendee_spec): + return [p for p in self.all_packages() if p.extends(extendee_spec)] + + + @_autospec + def installed_extensions_for(self, extendee_spec): + return [s.package for s in self.installed_package_specs() + if s.package.extends(extendee_spec)] + + def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py index b1f4348945..a0c7723473 100644 --- a/lib/spack/spack/relations.py +++ b/lib/spack/spack/relations.py @@ -68,7 +68,7 @@ provides spack install mpileaks ^mvapich spack install mpileaks ^mpich """ -__all__ = [ 'depends_on', 'provides', 'patch', 'version' ] +__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ] import re import inspect @@ -107,8 +107,9 @@ def depends_on(*specs): """Adds a dependencies local variable in the locals of the calling class, based on args. """ pkg = get_calling_package_name() + clocals = caller_locals() + dependencies = clocals.setdefault('dependencies', {}) - dependencies = caller_locals().setdefault('dependencies', {}) for string in specs: for spec in spack.spec.parse(string): if pkg == spec.name: @@ -116,6 +117,34 @@ def depends_on(*specs): dependencies[spec.name] = spec +def extends(spec, **kwargs): + """Same as depends_on, but dependency is symlinked into parent prefix. + + This is for Python and other language modules where the module + needs to be installed into the prefix of the Python installation. + Spack handles this by installing modules into their own prefix, + but allowing ONE module version to be symlinked into a parent + Python install at a time. + + keyword arguments can be passed to extends() so that extension + packages can pass parameters to the extendee's extension + mechanism. + + """ + pkg = get_calling_package_name() + clocals = caller_locals() + dependencies = clocals.setdefault('dependencies', {}) + extendees = clocals.setdefault('extendees', {}) + if extendees: + raise RelationError("Packages can extend at most one other package.") + + spec = Spec(spec) + if pkg == spec.name: + raise CircularReferenceError('extends', pkg) + dependencies[spec.name] = spec + extendees[spec.name] = (spec, kwargs) + + def provides(*specs, **kwargs): """Allows packages to provide a virtual dependency. If a package provides 'mpi', other packages can declare that they depend on "mpi", and spack diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 2f4fe9ca24..dffdccaddb 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -553,6 +553,13 @@ class Spec(object): @property + def cshort_spec(self): + """Returns a version of the spec with the dependencies hashed + instead of completely enumerated.""" + return self.format('$_$@$%@$+$=$#', color=True) + + + @property def prefix(self): return Prefix(spack.install_layout.path_for_spec(self)) diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 0eda667abc..c53e6774fc 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -51,7 +51,8 @@ test_names = ['versions', 'hg_fetch', 'mirror', 'url_extrapolate', - 'cc'] + 'cc', + 'link_tree'] def list_tests(): diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py new file mode 100644 index 0000000000..bc7c2c6b5e --- /dev/null +++ b/lib/spack/spack/test/link_tree.py @@ -0,0 +1,153 @@ +############################################################################## +# Copyright (c) 2013, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://scalability-llnl.github.io/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License (as published by +# the Free Software Foundation) version 2.1 dated February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +import os +import unittest +import shutil +import tempfile +from contextlib import closing + +from llnl.util.filesystem import * +from llnl.util.link_tree import LinkTree + +from spack.stage import Stage + + +class LinkTreeTest(unittest.TestCase): + """Tests Spack's LinkTree class.""" + + def setUp(self): + self.stage = Stage('link-tree-test') + + with working_dir(self.stage.path): + touchp('source/1') + touchp('source/a/b/2') + touchp('source/a/b/3') + touchp('source/c/4') + touchp('source/c/d/5') + touchp('source/c/d/6') + touchp('source/c/d/e/7') + + source_path = os.path.join(self.stage.path, 'source') + self.link_tree = LinkTree(source_path) + + + def tearDown(self): + if self.stage: + self.stage.destroy() + + + def check_file_link(self, filename): + self.assertTrue(os.path.isfile(filename)) + self.assertTrue(os.path.islink(filename)) + + + def check_dir(self, filename): + self.assertTrue(os.path.isdir(filename)) + + + def test_merge_to_new_directory(self): + with working_dir(self.stage.path): + self.link_tree.merge('dest') + + self.check_file_link('dest/1') + self.check_file_link('dest/a/b/2') + self.check_file_link('dest/a/b/3') + self.check_file_link('dest/c/4') + self.check_file_link('dest/c/d/5') + self.check_file_link('dest/c/d/6') + self.check_file_link('dest/c/d/e/7') + + self.link_tree.unmerge('dest') + + self.assertFalse(os.path.exists('dest')) + + + def test_merge_to_existing_directory(self): + with working_dir(self.stage.path): + + touchp('dest/x') + touchp('dest/a/b/y') + + self.link_tree.merge('dest') + + self.check_file_link('dest/1') + self.check_file_link('dest/a/b/2') + self.check_file_link('dest/a/b/3') + self.check_file_link('dest/c/4') + self.check_file_link('dest/c/d/5') + self.check_file_link('dest/c/d/6') + self.check_file_link('dest/c/d/e/7') + + self.assertTrue(os.path.isfile('dest/x')) + self.assertTrue(os.path.isfile('dest/a/b/y')) + + self.link_tree.unmerge('dest') + + self.assertTrue(os.path.isfile('dest/x')) + self.assertTrue(os.path.isfile('dest/a/b/y')) + + self.assertFalse(os.path.isfile('dest/1')) + self.assertFalse(os.path.isfile('dest/a/b/2')) + self.assertFalse(os.path.isfile('dest/a/b/3')) + self.assertFalse(os.path.isfile('dest/c/4')) + self.assertFalse(os.path.isfile('dest/c/d/5')) + self.assertFalse(os.path.isfile('dest/c/d/6')) + self.assertFalse(os.path.isfile('dest/c/d/e/7')) + + + def test_merge_with_empty_directories(self): + with working_dir(self.stage.path): + mkdirp('dest/f/g') + mkdirp('dest/a/b/h') + + self.link_tree.merge('dest') + self.link_tree.unmerge('dest') + + self.assertFalse(os.path.exists('dest/1')) + self.assertFalse(os.path.exists('dest/a/b/2')) + self.assertFalse(os.path.exists('dest/a/b/3')) + self.assertFalse(os.path.exists('dest/c/4')) + self.assertFalse(os.path.exists('dest/c/d/5')) + self.assertFalse(os.path.exists('dest/c/d/6')) + self.assertFalse(os.path.exists('dest/c/d/e/7')) + + self.assertTrue(os.path.isdir('dest/a/b/h')) + self.assertTrue(os.path.isdir('dest/f/g')) + + + def test_ignore(self): + with working_dir(self.stage.path): + touchp('source/.spec') + touchp('dest/.spec') + + self.link_tree.merge('dest', ignore=lambda x: x == '.spec') + self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec') + + self.assertFalse(os.path.exists('dest/1')) + self.assertFalse(os.path.exists('dest/a')) + self.assertFalse(os.path.exists('dest/c')) + + self.assertTrue(os.path.isfile('source/.spec')) + self.assertTrue(os.path.isfile('dest/.spec')) diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py index 51334198ec..89ab14359e 100644 --- a/lib/spack/spack/test/mirror.py +++ b/lib/spack/spack/test/mirror.py @@ -44,7 +44,7 @@ class MirrorTest(MockPackagesTest): self.repos = {} - def set_up_package(self, name, mock_repo_class, url_attr): + def set_up_package(self, name, MockRepoClass, url_attr): """Use this to set up a mock package to be mirrored. Each package needs us to: 1. Set up a mock repo/archive to fetch from. @@ -56,7 +56,7 @@ class MirrorTest(MockPackagesTest): # Get the package and fix its fetch args to point to a mock repo pkg = spack.db.get(spec) - repo = mock_repo_class() + repo = MockRepoClass() self.repos[name] = repo # change the fetch args of the first (only) version. @@ -71,7 +71,7 @@ class MirrorTest(MockPackagesTest): for name, repo in self.repos.items(): if repo.stage: - repo.stage.destroy() + pass #repo.stage.destroy() self.repos.clear() @@ -129,7 +129,7 @@ class MirrorTest(MockPackagesTest): self.assertTrue(all(l in exclude for l in dcmp.left_only)) finally: - stage.destroy() + pass #stage.destroy() def test_git_mirror(self): |