diff options
Diffstat (limited to 'lib')
35 files changed, 877 insertions, 293 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index 50c48b802b..ec193e767d 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -24,12 +24,29 @@ Spack can install: .. command-output:: spack list -The packages are listed by name in alphabetical order. You can also -do wildcats searches using ``*``: +The packages are listed by name in alphabetical order. If you specify a +pattern to match, it will follow this set of rules. A pattern with no +wildcards, ``*`` or ``?``, will be treated as though it started and ended with +``*``, so ``util`` is equivalent to ``*util*``. A pattern with no capital +letters will be treated as case-insensitive. You can also add the ``-i`` flag +to specify a case insensitive search, or ``-d`` to search the description of +the package in addition to the name. Some examples: -.. command-output:: spack list m* +All packages whose names contain "sql" case insensitive: -.. command-output:: spack list *util* +.. command-output:: spack list sql + +All packages whose names start with a capital M: + +.. command-output:: spack list 'M*' + +All packages whose names or descriptions contain Documentation: + +.. command-output:: spack list -d Documentation + +All packages whose names contain documentation case insensitive: + +.. command-output:: spack list -d documentation .. _spack-info: diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index c613071c65..a6f876b2aa 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -140,7 +140,6 @@ Here's an example packages.yaml file that sets preferred packages: packages: dyninst: compiler: [gcc@4.9] - variants: +debug gperftools: version: [2.2, 2.4, 2.3] all: @@ -150,8 +149,8 @@ Here's an example packages.yaml file that sets preferred packages: At a high level, this example is specifying how packages should be -concretized. The dyninst package should prefer using gcc 4.9 and -be built with debug options. The gperftools package should prefer version +concretized. The dyninst package should prefer using gcc 4.9. +The gperftools package should prefer version 2.2 over 2.4. Every package on the system should prefer mvapich for its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9). These options are used to fill in implicit defaults. Any of them can be overwritten @@ -160,7 +159,7 @@ on the command line if explicitly requested. Each packages.yaml file begins with the string ``packages:`` and package names are specified on the next level. The special string ``all`` applies settings to each package. Underneath each package name is -one or more components: ``compiler``, ``variants``, ``version``, +one or more components: ``compiler``, ``version``, or ``providers``. Each component has an ordered list of spec ``constraints``, with earlier entries in the list being preferred over later entries. diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 1f83f611b0..54b886310a 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1950,6 +1950,19 @@ instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for the reasons outlined above. +Blas and Lapack libraries +~~~~~~~~~~~~~~~~~~~~~~~~~ +Different packages provide implementation of ``Blas`` and ``Lapack`` routines. +The names of the resulting static and/or shared libraries differ from package +to package. In order to make ``install()`` method indifferent to the +choice of ``Blas`` implementation, each package which provides it +sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib `` to +point to the shared and static ``Blas`` libraries, respectively. The same +applies to packages which provide ``Lapack``. Package developers are advised to +use these variables, for example ``spec['blas'].blas_shared_lib`` instead of +hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``. + + Forking ``install()`` ~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 6661a80f27..e800c6717a 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -22,33 +22,33 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', - 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', - 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', - 'set_executable', 'copy_mode', 'unset_executable_mode', - 'remove_dead_links', 'remove_linked_tree', 'find_library_path', - 'fix_darwin_install_name'] - import os import glob -import sys import re import shutil import stat import errno import getpass from contextlib import contextmanager, closing -from tempfile import NamedTemporaryFile import subprocess import llnl.util.tty as tty -from spack.util.compression import ALLOWED_ARCHIVE_TYPES + +__all__ = ['set_install_permissions', 'install', 'install_tree', + 'traverse_tree', + 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', + 'force_remove', 'join_path', 'ancestor', 'can_access', + 'filter_file', + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', + 'set_executable', 'copy_mode', 'unset_executable_mode', + 'remove_dead_links', 'remove_linked_tree', 'find_library_path', + 'fix_darwin_install_name', 'to_link_flags'] + def filter_file(regex, repl, *filenames, **kwargs): """Like sed, but uses python regular expressions. - Filters every line of file through regex and replaces the file + Filters every line of each file through regex and replaces the file with a filtered version. Preserves mode of filtered files. As with re.sub, ``repl`` can be either a string or a callable. @@ -59,7 +59,7 @@ def filter_file(regex, repl, *filenames, **kwargs): Keyword Options: string[=False] If True, treat regex as a plain string. - backup[=True] Make a backup files suffixed with ~ + backup[=True] Make backup file(s) suffixed with ~ ignore_absent[=False] Ignore any files that don't exist. """ string = kwargs.get('string', False) @@ -69,6 +69,7 @@ def filter_file(regex, repl, *filenames, **kwargs): # Allow strings to use \1, \2, etc. for replacement, like sed if not callable(repl): unescaped = repl.replace(r'\\', '\\') + def replace_groups_with_groupid(m): def groupid_to_group(x): return m.group(int(x.group(1))) @@ -79,26 +80,26 @@ def filter_file(regex, repl, *filenames, **kwargs): regex = re.escape(regex) for filename in filenames: - backup = filename + "~" + backup_filename = filename + "~" if ignore_absent and not os.path.exists(filename): continue - shutil.copy(filename, backup) + shutil.copy(filename, backup_filename) try: - with closing(open(backup)) as infile: + with closing(open(backup_filename)) as infile: with closing(open(filename, 'w')) as outfile: for line in infile: foo = re.sub(regex, repl, line) outfile.write(foo) except: # clean up the original file on failure. - shutil.move(backup, filename) + shutil.move(backup_filename, filename) raise finally: if not backup: - shutil.rmtree(backup, ignore_errors=True) + os.remove(backup_filename) class FileFilter(object): @@ -113,7 +114,7 @@ class FileFilter(object): def change_sed_delimiter(old_delim, new_delim, *filenames): """Find all sed search/replace commands and change the delimiter. e.g., if the file contains seds that look like 's///', you can - call change_sed_delimeter('/', '@', file) to change the + call change_sed_delimiter('/', '@', file) to change the delimiter to '@'. NOTE that this routine will fail if the delimiter is ' or ". @@ -157,9 +158,12 @@ def set_install_permissions(path): def copy_mode(src, dest): src_mode = os.stat(src).st_mode dest_mode = os.stat(dest).st_mode - if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR - if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP - if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH + if src_mode & stat.S_IXUSR: + dest_mode |= stat.S_IXUSR + if src_mode & stat.S_IXGRP: + dest_mode |= stat.S_IXGRP + if src_mode & stat.S_IXOTH: + dest_mode |= stat.S_IXOTH os.chmod(dest, dest_mode) @@ -175,7 +179,7 @@ def install(src, dest): """Manually install a file to a particular location.""" tty.debug("Installing %s to %s" % (src, dest)) - # Expand dsst to its eventual full path if it is a directory. + # Expand dest to its eventual full path if it is a directory. if os.path.isdir(dest): dest = join_path(dest, os.path.basename(src)) @@ -215,7 +219,7 @@ def mkdirp(*paths): if not os.path.exists(path): os.makedirs(path) elif not os.path.isdir(path): - raise OSError(errno.EEXIST, "File alredy exists", path) + raise OSError(errno.EEXIST, "File already exists", path) def force_remove(*paths): @@ -224,9 +228,10 @@ def force_remove(*paths): for path in paths: try: os.remove(path) - except OSError, e: + except OSError: pass + @contextmanager def working_dir(dirname, **kwargs): if kwargs.get('create', False): @@ -240,7 +245,7 @@ def working_dir(dirname, **kwargs): def touch(path): """Creates an empty file at the specified path.""" - with open(path, 'a') as file: + with open(path, 'a'): os.utime(path, None) @@ -253,7 +258,7 @@ def touchp(path): def force_symlink(src, dest): try: os.symlink(src, dest) - except OSError as e: + except OSError: os.remove(dest) os.symlink(src, dest) @@ -275,7 +280,7 @@ def ancestor(dir, n=1): def can_access(file_name): """True if we have read/write access to the file.""" - return os.access(file_name, os.R_OK|os.W_OK) + return os.access(file_name, os.R_OK | os.W_OK) def traverse_tree(source_root, dest_root, rel_path='', **kwargs): @@ -304,7 +309,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): Optional args: - order=[pre|post] -- Whether to do pre- or post-order traveral. + order=[pre|post] -- Whether to do pre- or post-order traversal. ignore=<predicate> -- Predicate indicating which files to ignore. @@ -343,13 +348,14 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): # Treat as a directory if os.path.isdir(source_child) and ( - follow_links or not os.path.islink(source_child)): + follow_links or not os.path.islink(source_child)): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): - tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) - for t in tuples: yield t + tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501 + for t in tuples: + yield t # Treat as a file. elif not ignore(os.path.join(rel_path, f)): @@ -379,6 +385,7 @@ def remove_dead_links(root): if not os.path.exists(real_path): os.unlink(path) + def remove_linked_tree(path): """ Removes a directory and its contents. If the directory is a @@ -402,28 +409,41 @@ def fix_darwin_install_name(path): Fix install name of dynamic libraries on Darwin to have full path. There are two parts of this task: (i) use install_name('-id',...) to change install name of a single lib; - (ii) use install_name('-change',...) to change the cross linking between libs. - The function assumes that all libraries are in one folder and currently won't - follow subfolders. + (ii) use install_name('-change',...) to change the cross linking between + libs. The function assumes that all libraries are in one folder and + currently won't follow subfolders. Args: - path: directory in which .dylib files are alocated + path: directory in which .dylib files are located """ - libs = glob.glob(join_path(path,"*.dylib")) + libs = glob.glob(join_path(path, "*.dylib")) for lib in libs: # fix install name first: - subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0] - long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n') + subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 + long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501 deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] # fix all dependencies: for dep in deps: for loc in libs: if dep == os.path.basename(loc): - subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0] + subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 break +def to_link_flags(library): + """Transforms a path to a <library> into linking flags -L<dir> -l<name>. + + Return: + A string of linking flags. + """ + dir = os.path.dirname(library) + # Assume libXYZ.suffix + name = os.path.basename(library)[3:].split(".")[0] + res = '-L%s -l%s' % (dir, name) + return res + + def find_library_path(libname, *paths): """Searches for a file called <libname> in each path. diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index cbac7b41d6..a7cda2bf68 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -91,16 +91,10 @@ from spack.util.multiproc import parmap import spack.error as serr -class InvalidSysTypeError(serr.SpackError): - def __init__(self, sys_type): - super(InvalidSysTypeError, self).__init__( - "Invalid sys_type value for Spack: " + sys_type) - - -class NoSysTypeError(serr.SpackError): +class NoPlatformError(serr.SpackError): def __init__(self): - super(NoSysTypeError, self).__init__( - "Could not determine sys_type for this machine.") + super(NoPlatformError, self).__init__( + "Could not determine a platform for this machine.") @key_ordering @@ -345,14 +339,17 @@ class OperatingSystem(object): @key_ordering class Arch(object): - "Architecture is now a class to help with setting attributes" - - def __init__(self, platform=None, platform_os=None, target=None): - self.platform = platform - if platform and platform_os: - platform_os = self.platform.operating_system(platform_os) - self.platform_os = platform_os - if platform and target: + """Architecture is now a class to help with setting attributes. + + TODO: refactor so that we don't need this class. + """ + + def __init__(self, plat=None, os=None, target=None): + self.platform = plat + if plat and os: + os = self.platform.operating_system(os) + self.platform_os = os + if plat and target: target = self.platform.target(target) self.target = target @@ -409,27 +406,27 @@ class Arch(object): return d -def _target_from_dict(target_name, platform=None): +def _target_from_dict(target_name, plat=None): """ Creates new instance of target and assigns all the attributes of that target from the dictionary """ - if not platform: - platform = sys_type() - return platform.target(target_name) + if not plat: + plat = platform() + return plat.target(target_name) -def _operating_system_from_dict(os_name, platform=None): +def _operating_system_from_dict(os_name, plat=None): """ uses platform's operating system method to grab the constructed operating systems that are valid on the platform. """ - if not platform: - platform = sys_type() + if not plat: + plat = platform() if isinstance(os_name, dict): name = os_name['name'] version = os_name['version'] - return platform.operating_system(name+version) + return plat.operating_system(name+version) else: - return platform.operating_system(os_name) + return plat.operating_system(os_name) def _platform_from_dict(platform_name): @@ -504,16 +501,35 @@ def all_platforms(): @memoized -def sys_type(): - """ Gather a list of all available subclasses of platforms. - Sorts the list according to their priority looking. Priority is - an arbitrarily set number. Detects platform either using uname or - a file path (/opt/cray...) +def platform(): + """Detects the platform for this machine. + + Gather a list of all available subclasses of platforms. + Sorts the list according to their priority looking. Priority is + an arbitrarily set number. Detects platform either using uname or + a file path (/opt/cray...) """ # Try to create a Platform object using the config file FIRST platform_list = all_platforms() platform_list.sort(key=lambda a: a.priority) - for platform in platform_list: - if platform.detect(): - return platform() + for platform_cls in platform_list: + if platform_cls.detect(): + return platform_cls() + + +@memoized +def sys_type(): + """Print out the "default" platform-os-target tuple for this machine. + + On machines with only one target OS/target, prints out the + platform-os-target for the frontend. For machines with a frontend + and a backend, prints the default backend. + + TODO: replace with use of more explicit methods to get *all* the + backends, as client code should really be aware of cross-compiled + architectures. + + """ + arch = Arch(platform(), 'default_os', 'default_target') + return str(arch) diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 7c65091d49..99c4cae020 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -75,20 +75,19 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE' # set_build_environment_variables and used to pass parameters to # Spack's compiler wrappers. # -SPACK_ENV_PATH = 'SPACK_ENV_PATH' -SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' -SPACK_PREFIX = 'SPACK_PREFIX' -SPACK_INSTALL = 'SPACK_INSTALL' -SPACK_DEBUG = 'SPACK_DEBUG' -SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC' -SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' +SPACK_ENV_PATH = 'SPACK_ENV_PATH' +SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' +SPACK_PREFIX = 'SPACK_PREFIX' +SPACK_INSTALL = 'SPACK_INSTALL' +SPACK_DEBUG = 'SPACK_DEBUG' +SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC' +SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' # Platform-specific library suffix. dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so' - class MakeExecutable(Executable): """Special callable executable object for make so the user can specify parallel or not on a per-invocation basis. Using @@ -99,6 +98,7 @@ class MakeExecutable(Executable): Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides everything. """ + def __init__(self, name, jobs): super(MakeExecutable, self).__init__(name) self.jobs = jobs @@ -113,12 +113,13 @@ class MakeExecutable(Executable): return super(MakeExecutable, self).__call__(*args, **kwargs) + def load_module(mod): """Takes a module name and removes modules until it is possible to load that module. It then loads the provided module. Depends on the modulecmd implementation of modules used in cray and lmod. """ - #Create an executable of the module command that will output python code + # Create an executable of the module command that will output python code modulecmd = which('modulecmd') modulecmd.add_default_arg('python') @@ -129,11 +130,13 @@ def load_module(mod): text = modulecmd('show', mod, output=str, error=str).split() for i, word in enumerate(text): if word == 'conflict': - exec(compile(modulecmd('unload', text[i+1], output=str, error=str), '<string>', 'exec')) + exec(compile(modulecmd('unload', text[i + 1], output=str, + error=str), '<string>', 'exec')) # Load the module now that there are no conflicts load = modulecmd('load', mod, output=str, error=str) exec(compile(load, '<string>', 'exec')) + def get_path_from_module(mod): """Inspects a TCL module for entries that indicate the absolute path at which the library supported by said module can be found. @@ -146,7 +149,7 @@ def get_path_from_module(mod): text = modulecmd('show', mod, output=str, error=str).split('\n') # If it lists its package directory, return that for line in text: - if line.find(mod.upper()+'_DIR') >= 0: + if line.find(mod.upper() + '_DIR') >= 0: words = line.split() return words[2] @@ -154,49 +157,52 @@ def get_path_from_module(mod): for line in text: rpath = line.find('-rpath/') if rpath >= 0: - return line[rpath+6:line.find('/lib')] + return line[rpath + 6:line.find('/lib')] # If it lists a -L instruction, use that for line in text: L = line.find('-L/') if L >= 0: - return line[L+2:line.find('/lib')] + return line[L + 2:line.find('/lib')] # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that for line in text: - if line.find('LD_LIBRARY_PATH') >= 0: + if line.find('LD_LIBRARY_PATH') >= 0: words = line.split() path = words[2] return path[:path.find('/lib')] # Unable to find module path return None + def set_compiler_environment_variables(pkg, env): assert(pkg.spec.concrete) compiler = pkg.compiler flags = pkg.spec.compiler_flags # Set compiler variables used by CMake and autotools - assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc')) + assert all(key in compiler.link_paths for key in ( + 'cc', 'cxx', 'f77', 'fc')) # Populate an object with the list of environment modifications # and return it - # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc. + # TODO : add additional kwargs for better diagnostics, like requestor, + # ttyout, ttyerr, etc. link_dir = spack.build_env_path - env.set('CC', join_path(link_dir, compiler.link_paths['cc'])) - env.set('CXX', join_path(link_dir, compiler.link_paths['cxx'])) - env.set('F77', join_path(link_dir, compiler.link_paths['f77'])) - env.set('FC', join_path(link_dir, compiler.link_paths['fc'])) # Set SPACK compiler variables so that our wrapper knows what to call if compiler.cc: env.set('SPACK_CC', compiler.cc) + env.set('CC', join_path(link_dir, compiler.link_paths['cc'])) if compiler.cxx: env.set('SPACK_CXX', compiler.cxx) + env.set('CXX', join_path(link_dir, compiler.link_paths['cxx'])) if compiler.f77: env.set('SPACK_F77', compiler.f77) + env.set('F77', join_path(link_dir, compiler.link_paths['f77'])) if compiler.fc: env.set('SPACK_FC', compiler.fc) + env.set('FC', join_path(link_dir, compiler.link_paths['fc'])) # Set SPACK compiler rpath flags so that our wrapper knows what to use env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg) @@ -233,7 +239,8 @@ def set_build_environment_variables(pkg, env): # handled by putting one in the <build_env_path>/case-insensitive # directory. Add that to the path too. env_paths = [] - for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]: + compiler_specific = join_path(spack.build_env_path, pkg.compiler.name) + for item in [spack.build_env_path, compiler_specific]: env_paths.append(item) ci = join_path(item, 'case-insensitive') if os.path.isdir(ci): @@ -246,7 +253,8 @@ def set_build_environment_variables(pkg, env): # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)] env.set_path(SPACK_DEPENDENCIES, dep_prefixes) - env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH + # Add dependencies to CMAKE_PREFIX_PATH + env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Install prefix env.set(SPACK_PREFIX, pkg.prefix) @@ -262,7 +270,8 @@ def set_build_environment_variables(pkg, env): env.unset('DYLD_LIBRARY_PATH') # Add bin directories from dependencies to the PATH for the build. - bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes])) + bin_dirs = reversed( + filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes])) for item in bin_dirs: env.prepend_path('PATH', item) @@ -277,8 +286,7 @@ def set_build_environment_variables(pkg, env): for directory in ('lib', 'lib64', 'share'): pcdir = join_path(pre, directory, 'pkgconfig') if os.path.isdir(pcdir): - #pkg_config_dirs.append(pcdir) - env.prepend_path('PKG_CONFIG_PATH',pcdir) + env.prepend_path('PKG_CONFIG_PATH', pcdir) if pkg.spec.architecture.target.module_name: load_module(pkg.spec.architecture.target.module_name) @@ -290,7 +298,7 @@ def set_module_variables_for_package(pkg, module): """Populate the module scope of install() with some useful functions. This makes things easier for package writers. """ - # number of jobs spack will to build with. + # number of jobs spack will build with. jobs = multiprocessing.cpu_count() if not pkg.parallel: jobs = 1 @@ -301,8 +309,9 @@ def set_module_variables_for_package(pkg, module): m.make_jobs = jobs # TODO: make these build deps that can be installed if not found. - m.make = MakeExecutable('make', jobs) + m.make = MakeExecutable('make', jobs) m.gmake = MakeExecutable('gmake', jobs) + m.scons = MakeExecutable('scons', jobs) # easy shortcut to os.environ m.env = os.environ @@ -316,6 +325,7 @@ def set_module_variables_for_package(pkg, module): # TODO: Currently, everything is a link dependency, but tools like # TODO: this shouldn't be. m.cmake = Executable('cmake') + m.ctest = Executable('ctest') # standard CMake arguments m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix, @@ -325,33 +335,34 @@ def set_module_variables_for_package(pkg, module): # Set up CMake rpath m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') - m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg))) + m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % + ":".join(get_rpaths(pkg))) # Put spack compiler paths in module scope. link_dir = spack.build_env_path - m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) + m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx']) m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77']) - m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc']) + m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc']) # Emulate some shell commands for convenience - m.pwd = os.getcwd - m.cd = os.chdir - m.mkdir = os.mkdir - m.makedirs = os.makedirs - m.remove = os.remove - m.removedirs = os.removedirs - m.symlink = os.symlink - - m.mkdirp = mkdirp - m.install = install + m.pwd = os.getcwd + m.cd = os.chdir + m.mkdir = os.mkdir + m.makedirs = os.makedirs + m.remove = os.remove + m.removedirs = os.removedirs + m.symlink = os.symlink + + m.mkdirp = mkdirp + m.install = install m.install_tree = install_tree - m.rmtree = shutil.rmtree - m.move = shutil.move + m.rmtree = shutil.rmtree + m.move = shutil.move # Useful directories within the prefix are encapsulated in # a Prefix object. - m.prefix = pkg.prefix + m.prefix = pkg.prefix # Platform-specific library suffix. m.dso_suffix = dso_suffix @@ -365,20 +376,22 @@ def get_rpaths(pkg): rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values() if os.path.isdir(d.prefix.lib64)) # Second module is our compiler mod name. We use that to get rpaths from - # module show output. + # module show output. if pkg.compiler.modules and len(pkg.compiler.modules) > 1: rpaths.append(get_path_from_module(pkg.compiler.modules[1])) return rpaths def parent_class_modules(cls): - """Get list of super class modules that are all descend from spack.Package""" + """ + Get list of super class modules that are all descend from spack.Package + """ if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls): return [] result = [] module = sys.modules.get(cls.__module__) if module: - result = [ module ] + result = [module] for c in cls.__bases__: result.extend(parent_class_modules(c)) return result @@ -390,11 +403,12 @@ def load_external_modules(pkg): for dep in list(pkg.spec.traverse()): if dep.external_module: load_module(dep.external_module) - + + def setup_package(pkg): """Execute all environment setup routines.""" spack_env = EnvironmentModifications() - run_env = EnvironmentModifications() + run_env = EnvironmentModifications() # Before proceeding, ensure that specs and packages are consistent # @@ -410,7 +424,8 @@ def setup_package(pkg): # throwaway environment, but it is kind of dirty. # # TODO: Think about how to avoid this fix and do something cleaner. - for s in pkg.spec.traverse(): s.package.spec = s + for s in pkg.spec.traverse(): + s.package.spec = s set_compiler_environment_variables(pkg, spack_env) set_build_environment_variables(pkg, spack_env) @@ -498,7 +513,9 @@ def fork(pkg, function): # message. Just make the parent exit with an error code. pid, returncode = os.waitpid(pid, 0) if returncode != 0: - raise InstallError("Installation process had nonzero exit code.".format(str(returncode))) + message = "Installation process had nonzero exit code : {code}" + strcode = str(returncode) + raise InstallError(message.format(code=strcode)) class InstallError(spack.error.SpackError): diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index c95045ef85..c325628ebf 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -76,10 +76,16 @@ def compiler_find(args): if not paths: paths = get_path('PATH') - compilers = [c for c in spack.compilers.find_compilers(*args.add_paths) - if c.spec not in spack.compilers.all_compilers(scope=args.scope)] + # Don't initialize compilers config via compilers.get_compiler_config. + # Just let compiler_find do the + # entire process and return an empty config from all_compilers + # Default for any other process is init_config=True + compilers = [c for c in spack.compilers.find_compilers(*paths) + if c.spec not in spack.compilers.all_compilers( + scope=args.scope, init_config=False)] if compilers: - spack.compilers.add_compilers_to_config(compilers, scope=args.scope) + spack.compilers.add_compilers_to_config(compilers, scope=args.scope, + init_config=False) n = len(compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py index 41bfa741f6..8cbb367f86 100644 --- a/lib/spack/spack/cmd/create.py +++ b/lib/spack/spack/cmd/create.py @@ -1,4 +1,3 @@ -_copyright = """\ ############################################################################## # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. @@ -23,10 +22,8 @@ _copyright = """\ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -""" import string import os -import hashlib import re from ordereddict_backport import OrderedDict @@ -41,16 +38,37 @@ import spack.util.web from spack.spec import Spec from spack.util.naming import * from spack.repository import Repo, RepoError -import spack.util.crypto as crypto from spack.util.executable import which -from spack.stage import Stage description = "Create a new package file from an archive URL" -package_template = string.Template( - _copyright + """ +package_template = string.Template("""\ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## # # This is a template package file for Spack. We've put "FIXME" # next to all the things you'll want to change. Once you've handled @@ -68,24 +86,21 @@ package_template = string.Template( # from spack import * + class ${class_name}(Package): - ""\"FIXME: put a proper description of your package here.""\" - # FIXME: add a proper url for your package's homepage here. + ""\"FIXME: Put a proper description of your package here.""\" + + # FIXME: Add a proper url for your package's homepage here. homepage = "http://www.example.com" url = "${url}" ${versions} - +${extends} # FIXME: Add dependencies if this package requires them. # depends_on("foo") def install(self, spec, prefix): - # FIXME: Modify the configure line to suit your build system here. - ${configure} - - # FIXME: Add logic to build and install here - make() - make("install") +${install} """) @@ -120,41 +135,95 @@ def setup_parser(subparser): class ConfigureGuesser(object): def __call__(self, stage): - """Try to guess the type of build system used by the project, and return - an appropriate configure line. - """ - autotools = "configure('--prefix=%s' % prefix)" - cmake = "cmake('.', *std_cmake_args)" - python = "python('setup.py', 'install', '--prefix=%s' % prefix)" - r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)" - - config_lines = ((r'/configure$', 'autotools', autotools), - (r'/CMakeLists.txt$', 'cmake', cmake), - (r'/setup.py$', 'python', python), - (r'/NAMESPACE$', 'r', r)) - - # Peek inside the tarball. - tar = which('tar') - output = tar( - "--exclude=*/*/*", "-tf", stage.archive_file, output=str) - lines = output.split("\n") - - # Set the configure line to the one that matched. - for pattern, bs, cl in config_lines: + """Try to guess the type of build system used by the project. + Set the appropriate default installation instructions and any + necessary extensions for Python and R.""" + + # Default installation instructions + installDict = { + 'autotools': """\ + # FIXME: Modify the configure line to suit your build system here. + configure('--prefix={0}'.format(prefix)) + + # FIXME: Add logic to build and install here. + make() + make('install')""", + + 'cmake': """\ + with working_dir('spack-build', create=True): + # FIXME: Modify the cmake line to suit your build system here. + cmake('..', *std_cmake_args) + + # FIXME: Add logic to build and install here. + make() + make('install')""", + + 'scons': """\ + # FIXME: Add logic to build and install here. + scons('prefix={0}'.format(prefix)) + scons('install')""", + + 'python': """\ + # FIXME: Add logic to build and install here. + python('setup.py', 'install', '--prefix={0}'.format(prefix))""", + + 'R': """\ + # FIXME: Add logic to build and install here. + R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir), + self.stage.source_path)""", + + 'unknown': """\ + # FIXME: Unknown build system + make() + make('install')""" + } + + # A list of clues that give us an idea of the build system a package + # uses. If the regular expression matches a file contained in the + # archive, the corresponding build system is assumed. + clues = [ + (r'/configure$', 'autotools'), + (r'/CMakeLists.txt$', 'cmake'), + (r'/SConstruct$', 'scons'), + (r'/setup.py$', 'python'), + (r'/NAMESPACE$', 'R') + ] + + # Peek inside the compressed file. + if stage.archive_file.endswith('.zip'): + try: + unzip = which('unzip') + output = unzip('-l', stage.archive_file, output=str) + except: + output = '' + else: + try: + tar = which('tar') + output = tar('--exclude=*/*/*', '-tf', + stage.archive_file, output=str) + except: + output = '' + lines = output.split('\n') + + # Determine the build system based on the files contained + # in the archive. + build_system = 'unknown' + for pattern, bs in clues: if any(re.search(pattern, l) for l in lines): - config_line = cl build_system = bs - break - else: - # None matched -- just put both, with cmake commented out - config_line = "# FIXME: Spack couldn't guess one, so here are some options:\n" - config_line += " # " + autotools + "\n" - config_line += " # " + cmake - build_system = 'unknown' - self.configure = config_line self.build_system = build_system + # Set the appropriate default installation instructions + self.install = installDict[build_system] + + # Set any necessary extensions for Python and R + extensions = '' + if build_system in ['python', 'R']: + extensions = "\n extends('{0}')\n".format(build_system) + + self.extends = extensions + def guess_name_and_version(url, args): # Try to deduce name and version of the new package from the URL @@ -168,7 +237,7 @@ def guess_name_and_version(url, args): else: try: name = spack.url.parse_name(url, version) - except spack.url.UndetectableNameError, e: + except spack.url.UndetectableNameError: # Use a user-supplied name if one is present tty.die("Couldn't guess a name for this package. Try running:", "", "spack create --name <name> <url>") @@ -182,7 +251,8 @@ def guess_name_and_version(url, args): def find_repository(spec, args): # figure out namespace for spec if spec.namespace and args.namespace and spec.namespace != args.namespace: - tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace)) + tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, + args.namespace)) if not spec.namespace and args.namespace: spec.namespace = args.namespace @@ -193,8 +263,8 @@ def find_repository(spec, args): try: repo = Repo(repo_path) if spec.namespace and spec.namespace != repo.namespace: - tty.die("Can't create package with namespace %s in repo with namespace %s" - % (spec.namespace, repo.namespace)) + tty.die("Can't create package with namespace %s in repo with " + "namespace %s" % (spec.namespace, repo.namespace)) except RepoError as e: tty.die(str(e)) else: @@ -214,11 +284,7 @@ def find_repository(spec, args): def fetch_tarballs(url, name, version): """Try to find versions of the supplied archive by scraping the web. - - Prompts the user to select how many to download if many are found. - - - """ + Prompts the user to select how many to download if many are found.""" versions = spack.util.web.find_versions_of_archive(url) rkeys = sorted(versions.keys(), reverse=True) versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys))) @@ -226,11 +292,11 @@ def fetch_tarballs(url, name, version): archives_to_fetch = 1 if not versions: # If the fetch failed for some reason, revert to what the user provided - versions = { version : url } + versions = {version: url} elif len(versions) > 1: tty.msg("Found %s versions of %s:" % (len(versions), name), *spack.cmd.elide_list( - ["%-10s%s" % (v,u) for v, u in versions.iteritems()])) + ["%-10s%s" % (v, u) for v, u in versions.iteritems()])) print archives_to_fetch = tty.get_number( "Include how many checksums in the package file?", @@ -277,7 +343,7 @@ def create(parser, args): name = 'py-%s' % name # Prepend 'r-' to R package names, by convention. - if guesser.build_system == 'r': + if guesser.build_system == 'R': name = 'r-%s' % name # Create a directory for the new package. @@ -292,10 +358,11 @@ def create(parser, args): pkg_file.write( package_template.substitute( name=name, - configure=guesser.configure, class_name=mod_to_class(name), url=url, - versions=make_version_calls(ver_hash_tuples))) + versions=make_version_calls(ver_hash_tuples), + extends=guesser.extends, + install=guesser.install)) # If everything checks out, go ahead and edit. spack.editor(pkg_path) diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py index 1e3699cee0..c921efd1bd 100644 --- a/lib/spack/spack/cmd/list.py +++ b/lib/spack/spack/cmd/list.py @@ -29,36 +29,62 @@ from llnl.util.tty.colify import colify import spack import fnmatch +import re + +description = "List available spack packages" -description ="List available spack packages" def setup_parser(subparser): subparser.add_argument( 'filter', nargs=argparse.REMAINDER, help='Optional glob patterns to filter results.') subparser.add_argument( - '-i', '--insensitive', action='store_true', default=False, - help='Filtering will be case insensitive.') + '-s', '--sensitive', action='store_true', default=False, + help='Use case-sensitive filtering. Default is case sensitive, ' + 'unless the query contains a capital letter.') + subparser.add_argument( + '-d', '--search-description', action='store_true', default=False, + help='Filtering will also search the description for a match.') def list(parser, args): # Start with all package names. - pkgs = spack.repo.all_package_names() + pkgs = set(spack.repo.all_package_names()) # filter if a filter arg was provided if args.filter: - def match(p, f): - if args.insensitive: - p = p.lower() - f = f.lower() - return fnmatch.fnmatchcase(p, f) - pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)] + res = [] + for f in args.filter: + if '*' not in f and '?' not in f: + r = fnmatch.translate('*' + f + '*') + else: + r = fnmatch.translate(f) + + re_flags = re.I + if any(l.isupper for l in f) or args.sensitive: + re_flags = 0 + rc = re.compile(r, flags=re_flags) + res.append(rc) + + if args.search_description: + def match(p, f): + if f.match(p): + return True + + pkg = spack.repo.get(p) + if pkg.__doc__: + return f.match(pkg.__doc__) + return False + else: + def match(p, f): + return f.match(p) + pkgs = [p for p in pkgs if any(match(p, f) for f in res)] # sort before displaying. - sorted_packages = sorted(pkgs, key=lambda s:s.lower()) + sorted_packages = sorted(pkgs, key=lambda s: s.lower()) # Print all the package names in columns - indent=0 + indent = 0 if sys.stdout.isatty(): tty.msg("%d packages." % len(sorted_packages)) colify(sorted_packages, indent=indent) diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py index 399237b169..cbd8f4784e 100644 --- a/lib/spack/spack/cmd/repo.py +++ b/lib/spack/spack/cmd/repo.py @@ -26,7 +26,7 @@ import os import re import shutil -from external import argparse +import argparse import llnl.util.tty as tty from llnl.util.filesystem import join_path, mkdirp diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index ae72b743b2..0ba94741da 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -83,7 +83,7 @@ def _to_dict(compiler): return {'compiler': d} -def get_compiler_config(scope=None): +def get_compiler_config(scope=None, init_config=True): """Return the compiler configuration for the specified architecture. """ def init_compiler_config(): @@ -98,7 +98,7 @@ def get_compiler_config(scope=None): # Update the configuration if there are currently no compilers # configured. Avoid updating automatically if there ARE site # compilers configured but no user ones. - if not config: + if not config and init_config: if scope is None: # We know no compilers were configured in any scope. init_compiler_config() @@ -117,14 +117,14 @@ def get_compiler_config(scope=None): return [] # Return empty list which we will later append to. -def add_compilers_to_config(compilers, scope=None): +def add_compilers_to_config(compilers, scope=None, init_config=True): """Add compilers to the config for the specified architecture. Arguments: - compilers: a list of Compiler objects. - scope: configuration scope to modify. """ - compiler_config = get_compiler_config(scope) + compiler_config = get_compiler_config(scope, init_config) for compiler in compilers: compiler_config.append(_to_dict(compiler)) global _cache_config_file @@ -142,8 +142,8 @@ def remove_compiler_from_config(compiler_spec, scope=None): """ compiler_config = get_compiler_config(scope) config_length = len(compiler_config) - - filtered_compiler_config = [comp for comp in compiler_config + + filtered_compiler_config = [comp for comp in compiler_config if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] # Need a better way for this global _cache_config_file @@ -153,23 +153,23 @@ def remove_compiler_from_config(compiler_spec, scope=None): spack.config.update_config('compilers', filtered_compiler_config, scope) -def all_compilers_config(scope=None): +def all_compilers_config(scope=None, init_config=True): """Return a set of specs for all the compiler versions currently available to build with. These are instances of CompilerSpec. """ # Get compilers for this architecture. global _cache_config_file #Create a cache of the config file so we don't load all the time. if not _cache_config_file: - _cache_config_file = get_compiler_config(scope) + _cache_config_file = get_compiler_config(scope, init_config) return _cache_config_file else: return _cache_config_file -def all_compilers(scope=None): +def all_compilers(scope=None, init_config=True): # Return compiler specs from the merged config. return [spack.spec.CompilerSpec(s['compiler']['spec']) - for s in all_compilers_config(scope)] + for s in all_compilers_config(scope, init_config)] def default_compiler(): @@ -315,7 +315,7 @@ def all_os_classes(): """ classes = [] - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() for os_class in platform.operating_sys.values(): classes.append(os_class) diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py index 3f552eaece..2fae6688db 100644 --- a/lib/spack/spack/compilers/gcc.py +++ b/lib/spack/spack/compilers/gcc.py @@ -77,9 +77,9 @@ class Gcc(Compiler): return get_compiler_version( fc, '-dumpversion', # older gfortran versions don't have simple dumpversion output. - r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)', module) + r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)') @classmethod def f77_version(cls, f77): - return cls.fc_version(f77, module) + return cls.fc_version(f77) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 1f37455c77..f792008c49 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -221,7 +221,7 @@ class DefaultConcretizer(object): if isinstance(spec.root.architecture.platform,spack.architecture.Platform): spec.architecture.platform = spec.root.architecture.platform else: - spec.architecture.platform = spack.architecture.sys_type() + spec.architecture.platform = spack.architecture.platform() return True #changed? def concretize_architecture(self, spec): @@ -334,7 +334,7 @@ class DefaultConcretizer(object): Default specs set at the compiler level will still be added later. """ - + if not spec.architecture.platform_os: #Although this usually means changed, this means awaiting other changes return True @@ -347,7 +347,7 @@ class DefaultConcretizer(object): and flag in p.compiler_flags)) if not flag in spec.compiler_flags or \ not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])): - if flag in spec.compiler_flags: + if flag in spec.compiler_flags: spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) | set(nearest.compiler_flags[flag])) else: diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index db0787edc6..84179e1469 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -307,6 +307,8 @@ section_schemas = { 'autoload': {'$ref': '#/definitions/dependency_selection'}, 'prerequisites': {'$ref': '#/definitions/dependency_selection'}, 'conflict': {'$ref': '#/definitions/array_of_strings'}, + 'load': {'$ref': '#/definitions/array_of_strings'}, + 'suffixes': {'$ref': '#/definitions/dictionary_of_strings'}, 'environment': { 'type': 'object', 'default': {}, diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index f941346bb1..a4bbff3d5a 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -631,11 +631,13 @@ class WriteTransaction(_Transaction): class CorruptDatabaseError(SpackError): def __init__(self, path, msg=''): super(CorruptDatabaseError, self).__init__( - "Spack database is corrupt: %s. %s" % (path, msg)) + "Spack database is corrupt: %s. %s." + \ + "Try running `spack reindex` to fix." % (path, msg)) class InvalidDatabaseVersionError(SpackError): def __init__(self, expected, found): super(InvalidDatabaseVersionError, self).__init__( - "Expected database version %s but found version %s" % + "Expected database version %s but found version %s." + \ + "Try running `spack reindex` to fix." % (expected, found)) diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py index 7e20365b0f..ee13e2dcbc 100644 --- a/lib/spack/spack/directory_layout.py +++ b/lib/spack/spack/directory_layout.py @@ -165,7 +165,7 @@ class DirectoryLayout(object): class YamlDirectoryLayout(DirectoryLayout): """Lays out installation directories like this:: <install root>/ - <target>/ + <platform-os-target>/ <compiler>-<compiler version>/ <name>-<version>-<variants>-<hash> diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index af642dcc9b..30c6228ca4 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1,4 +1,4 @@ -############################################################################## +# # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -21,14 +21,17 @@ # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## +# import collections import inspect +import json import os import os.path +import subprocess class NameModifier(object): + def __init__(self, name, **kwargs): self.name = name self.args = {'name': name} @@ -36,6 +39,7 @@ class NameModifier(object): class NameValueModifier(object): + def __init__(self, name, value, **kwargs): self.name = name self.value = value @@ -45,23 +49,27 @@ class NameValueModifier(object): class SetEnv(NameValueModifier): + def execute(self): os.environ[self.name] = str(self.value) class UnsetEnv(NameModifier): + def execute(self): # Avoid throwing if the variable was not set os.environ.pop(self.name, None) class SetPath(NameValueModifier): + def execute(self): string_path = concatenate_paths(self.value, separator=self.separator) os.environ[self.name] = string_path class AppendPath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -71,6 +79,7 @@ class AppendPath(NameValueModifier): class PrependPath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -80,6 +89,7 @@ class PrependPath(NameValueModifier): class RemovePath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -90,6 +100,7 @@ class RemovePath(NameValueModifier): class EnvironmentModifications(object): + """ Keeps track of requests to modify the current environment. @@ -240,6 +251,126 @@ class EnvironmentModifications(object): for x in actions: x.execute() + @staticmethod + def from_sourcing_files(*args, **kwargs): + """ + Creates an instance of EnvironmentModifications that, if executed, + has the same effect on the environment as sourcing the files passed as + parameters + + Args: + *args: list of files to be sourced + + Returns: + instance of EnvironmentModifications + """ + env = EnvironmentModifications() + # Check if the files are actually there + if not all(os.path.isfile(file) for file in args): + raise RuntimeError('trying to source non-existing files') + # Relevant kwd parameters and formats + info = dict(kwargs) + info.setdefault('shell', '/bin/bash') + info.setdefault('shell_options', '-c') + info.setdefault('source_command', 'source') + info.setdefault('suppress_output', '&> /dev/null') + info.setdefault('concatenate_on_success', '&&') + + shell = '{shell}'.format(**info) + shell_options = '{shell_options}'.format(**info) + source_file = '{source_command} {file} {concatenate_on_success}' + dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501 + # Construct the command that will be executed + command = [source_file.format(file=file, **info) for file in args] + command.append(dump_environment) + command = ' '.join(command) + command = [ + shell, + shell_options, + command + ] + + # Try to source all the files, + proc = subprocess.Popen( + command, stdout=subprocess.PIPE, env=os.environ) + proc.wait() + if proc.returncode != 0: + raise RuntimeError('sourcing files returned a non-zero exit code') + output = ''.join([line for line in proc.stdout]) + # Construct a dictionary with all the variables in the new environment + after_source_env = dict(json.loads(output)) + this_environment = dict(os.environ) + + # Filter variables that are not related to sourcing a file + to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD' + for d in after_source_env, this_environment: + for name in to_be_filtered: + d.pop(name, None) + + # Fill the EnvironmentModifications instance + + # New variables + new_variables = set(after_source_env) - set(this_environment) + for x in new_variables: + env.set(x, after_source_env[x]) + # Variables that have been unset + unset_variables = set(this_environment) - set(after_source_env) + for x in unset_variables: + env.unset(x) + # Variables that have been modified + common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501 + modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501 + + def return_separator_if_any(first_value, second_value): + separators = ':', ';' + for separator in separators: + if separator in first_value and separator in second_value: + return separator + return None + + for x in modified_variables: + current = this_environment[x] + modified = after_source_env[x] + sep = return_separator_if_any(current, modified) + if sep is None: + # We just need to set the variable to the new value + env.set(x, after_source_env[x]) + else: + current_list = current.split(sep) + modified_list = modified.split(sep) + # Paths that have been removed + remove_list = [ + ii for ii in current_list if ii not in modified_list] + # Check that nothing has been added in the middle of vurrent + # list + remaining_list = [ + ii for ii in current_list if ii in modified_list] + start = modified_list.index(remaining_list[0]) + end = modified_list.index(remaining_list[-1]) + search = sep.join(modified_list[start:end + 1]) + if search not in current: + # We just need to set the variable to the new value + env.set(x, after_source_env[x]) + break + else: + try: + prepend_list = modified_list[:start] + except KeyError: + prepend_list = [] + try: + append_list = modified_list[end + 1:] + except KeyError: + append_list = [] + + for item in remove_list: + env.remove_path(x, item) + for item in append_list: + env.append_path(x, item) + for item in prepend_list: + env.prepend_path(x, item) + + return env + def concatenate_paths(paths, separator=':'): """ diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 83d67ea225..3a957c6e0e 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -23,8 +23,8 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import re -from llnl.util.filesystem import * import llnl.util.tty as tty import spack @@ -34,6 +34,7 @@ import spack.modules # here, as it is the shortest I could find on a modern OS. shebang_limit = 127 + def shebang_too_long(path): """Detects whether a file has a shebang line that is too long.""" with open(path, 'r') as script: @@ -57,17 +58,15 @@ def filter_shebang(path): if original.startswith(new_sbang_line): return - backup = path + ".shebang.bak" - os.rename(path, backup) + # Use --! instead of #! on second line for lua. + if re.search(r'^#!(/[^/]*)*lua\b', original): + original = re.sub(r'^#', '--', original) with open(path, 'w') as new_file: new_file.write(new_sbang_line) new_file.write(original) - copy_mode(backup, path) - unset_executable_mode(backup) - - tty.warn("Patched overly long shebang in %s" % path) + tty.warn("Patched overlong shebang in %s" % path) def filter_shebangs_in_directory(directory): diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index d2b819e80a..ce46047fa3 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -285,11 +285,18 @@ class EnvModule(object): naming_tokens = self.tokens naming_scheme = self.naming_scheme name = naming_scheme.format(**naming_tokens) - name += '-' + self.spec.dag_hash( - ) # Always append the hash to make the module file unique # Not everybody is working on linux... parts = name.split('/') name = join_path(*parts) + # Add optional suffixes based on constraints + configuration, _ = parse_config_options(self) + suffixes = [name] + for constraint, suffix in configuration.get('suffixes', {}).items(): + if constraint in self.spec: + suffixes.append(suffix) + # Always append the hash to make the module file unique + suffixes.append(self.spec.dag_hash()) + name = '-'.join(suffixes) return name @property @@ -381,6 +388,8 @@ class EnvModule(object): for x in filter_blacklisted( module_configuration.pop('autoload', []), self.name): module_file_content += self.autoload(x) + for x in module_configuration.pop('load', []): + module_file_content += self.autoload(x) for x in filter_blacklisted( module_configuration.pop('prerequisites', []), self.name): module_file_content += self.prerequisite(x) @@ -402,8 +411,12 @@ class EnvModule(object): return tuple() def autoload(self, spec): - m = type(self)(spec) - return self.autoload_format.format(module_file=m.use_name) + if not isinstance(spec, str): + m = type(self)(spec) + module_file = m.use_name + else: + module_file = spec + return self.autoload_format.format(module_file=module_file) def prerequisite(self, spec): m = type(self)(spec) @@ -441,7 +454,6 @@ class EnvModule(object): class Dotkit(EnvModule): name = 'dotkit' - path = join_path(spack.share_path, "dotkit") environment_modifications_formats = { PrependPath: 'dk_alter {name} {value}\n', @@ -454,7 +466,7 @@ class Dotkit(EnvModule): @property def file_name(self): - return join_path(Dotkit.path, self.spec.architecture, + return join_path(spack.share_path, "dotkit", self.spec.architecture, '%s.dk' % self.use_name) @property @@ -482,7 +494,6 @@ class Dotkit(EnvModule): class TclModule(EnvModule): name = 'tcl' - path = join_path(spack.share_path, "modules") environment_modifications_formats = { PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n', @@ -503,7 +514,7 @@ class TclModule(EnvModule): @property def file_name(self): - return join_path(TclModule.path, self.spec.architecture, self.use_name) + return join_path(spack.share_path, "modules", self.spec.architecture, self.use_name) @property def header(self): diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py index 170ef3cea2..0818f9092f 100644 --- a/lib/spack/spack/multimethod.py +++ b/lib/spack/spack/multimethod.py @@ -149,7 +149,7 @@ class when(object): @when('arch=chaos_5_x86_64_ib') def install(self, prefix): # This will be executed instead of the default install if - # the package's sys_type() is chaos_5_x86_64_ib. + # the package's platform() is chaos_5_x86_64_ib. @when('arch=bgqos_0") def install(self, prefix): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c5250d17c0..b73056fd30 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -398,6 +398,12 @@ class Package(object): spack.repo.get(self.extendee_spec)._check_extendable() @property + def package_dir(self): + """Return the directory where the package.py file lives.""" + return os.path.dirname(self.module.__file__) + + + @property def global_license_dir(self): """Returns the directory where global license files for all packages are stored.""" @@ -687,7 +693,7 @@ class Package(object): """Get the spack.compiler.Compiler object used to build this package""" if not self.spec.concrete: raise ValueError("Can only get a compiler for a concrete package.") - return spack.compilers.compiler_for_spec(self.spec.compiler, + return spack.compilers.compiler_for_spec(self.spec.compiler, self.spec.architecture) def url_version(self, version): diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py index 70134964ad..2c160a5f45 100644 --- a/lib/spack/spack/repository.py +++ b/lib/spack/spack/repository.py @@ -30,7 +30,7 @@ import imp import re import traceback from bisect import bisect_left -from external import yaml +import yaml import llnl.util.tty as tty from llnl.util.filesystem import * diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 54219ec1b4..77bc57147d 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -2216,7 +2216,7 @@ class SpecParser(spack.parse.Parser): for spec in specs: for s in spec.traverse(): if s.architecture.os_string or s.architecture.target_string: - s._set_platform(spack.architecture.sys_type()) + s._set_platform(spack.architecture.platform()) return specs def parse_compiler(self, text): diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py index a6847c5744..ae3f08deed 100644 --- a/lib/spack/spack/test/architecture.py +++ b/lib/spack/spack/test/architecture.py @@ -5,7 +5,7 @@ import unittest import os import platform as py_platform import spack -from spack.architecture import * +import spack.architecture from spack.spec import * from spack.platforms.cray_xc import CrayXc from spack.platforms.linux import Linux @@ -19,14 +19,14 @@ class ArchitectureTest(MockPackagesTest): def setUp(self): super(ArchitectureTest, self).setUp() - self.platform = sys_type() + self.platform = spack.architecture.platform() def tearDown(self): super(ArchitectureTest, self).tearDown() def test_dict_functions_for_architecture(self): - arch = Arch() - arch.platform = spack.architecture.sys_type() + arch = spack.architecture.Arch() + arch.platform = spack.architecture.platform() arch.platform_os = arch.platform.operating_system('default_os') arch.target = arch.platform.target('default_target') @@ -36,18 +36,23 @@ class ArchitectureTest(MockPackagesTest): self.assertEqual(arch, new_arch) - self.assertTrue( isinstance(arch, Arch) ) - self.assertTrue( isinstance(arch.platform, Platform) ) - self.assertTrue( isinstance(arch.platform_os, OperatingSystem) ) - self.assertTrue( isinstance(arch.target, Target) ) - self.assertTrue( isinstance(new_arch, Arch) ) - self.assertTrue( isinstance(new_arch.platform, Platform) ) - self.assertTrue( isinstance(new_arch.platform_os, OperatingSystem) ) - self.assertTrue( isinstance(new_arch.target, Target) ) - - - def test_sys_type(self): - output_platform_class = sys_type() + self.assertTrue( isinstance(arch, spack.architecture.Arch) ) + self.assertTrue( isinstance(arch.platform, spack.architecture.Platform) ) + self.assertTrue( isinstance(arch.platform_os, + spack.architecture.OperatingSystem) ) + self.assertTrue( isinstance(arch.target, + spack.architecture.Target) ) + self.assertTrue( isinstance(new_arch, spack.architecture.Arch) ) + self.assertTrue( isinstance(new_arch.platform, + spack.architecture.Platform) ) + self.assertTrue( isinstance(new_arch.platform_os, + spack.architecture.OperatingSystem) ) + self.assertTrue( isinstance(new_arch.target, + spack.architecture.Target) ) + + + def test_platform(self): + output_platform_class = spack.architecture.platform() my_arch_class = None if os.path.exists('/opt/cray/craype'): my_platform_class = CrayXc() diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py index ab201f406a..ce02b08bc3 100644 --- a/lib/spack/spack/test/concretize.py +++ b/lib/spack/spack/test/concretize.py @@ -257,8 +257,8 @@ class ConcretizeTest(MockPackagesTest): def test_external_package_module(self): # No tcl modules on darwin/linux machines # TODO: improved way to check for this. - if (spack.architecture.sys_type().name == 'darwin' or - spack.architecture.sys_type().name == 'linux'): + if (spack.architecture.platform().name == 'darwin' or + spack.architecture.platform().name == 'linux'): return spec = Spec('externalmodule') diff --git a/lib/spack/spack/test/data/sourceme_first.sh b/lib/spack/spack/test/data/sourceme_first.sh new file mode 100644 index 0000000000..800f639ac8 --- /dev/null +++ b/lib/spack/spack/test/data/sourceme_first.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +export NEW_VAR='new' +export UNSET_ME='overridden' diff --git a/lib/spack/spack/test/data/sourceme_second.sh b/lib/spack/spack/test/data/sourceme_second.sh new file mode 100644 index 0000000000..9955a0e6d6 --- /dev/null +++ b/lib/spack/spack/test/data/sourceme_second.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +export PATH_LIST='/path/first:/path/second:/path/fourth' +unset EMPTY_PATH_LIST
\ No newline at end of file diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py index a0d959db2f..2396961888 100644 --- a/lib/spack/spack/test/environment.py +++ b/lib/spack/spack/test/environment.py @@ -24,16 +24,21 @@ ############################################################################## import unittest import os -import copy + +from spack import spack_root +from llnl.util.filesystem import join_path from spack.environment import EnvironmentModifications +from spack.environment import SetEnv, UnsetEnv +from spack.environment import RemovePath, PrependPath, AppendPath class EnvironmentTest(unittest.TestCase): + def setUp(self): os.environ['UNSET_ME'] = 'foo' os.environ['EMPTY_PATH_LIST'] = '' os.environ['PATH_LIST'] = '/path/second:/path/third' - os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' + os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' # NOQA: ignore=E501 def tearDown(self): pass @@ -77,9 +82,18 @@ class EnvironmentTest(unittest.TestCase): env.remove_path('REMOVE_PATH_LIST', '/duplicate/') env.apply_modifications() - self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST']) - self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST']) - self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST']) + self.assertEqual( + '/path/first:/path/second:/path/third:/path/last', + os.environ['PATH_LIST'] + ) + self.assertEqual( + '/path/first:/path/middle:/path/last', + os.environ['EMPTY_PATH_LIST'] + ) + self.assertEqual( + '/path/first:/path/middle:/path/last', + os.environ['NEWLY_CREATED_PATH_LIST'] + ) self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST']) def test_extra_arguments(self): @@ -98,3 +112,46 @@ class EnvironmentTest(unittest.TestCase): self.assertEqual(len(copy_construct), 2) for x, y in zip(env, copy_construct): assert x is y + + def test_source_files(self): + datadir = join_path(spack_root, 'lib', 'spack', + 'spack', 'test', 'data') + files = [ + join_path(datadir, 'sourceme_first.sh'), + join_path(datadir, 'sourceme_second.sh') + ] + env = EnvironmentModifications.from_sourcing_files(*files) + modifications = env.group_by_name() + + # This is sensitive to the user's environment; can include + # spurious entries for things like PS1 + # + # TODO: figure out how to make a bit more robust. + self.assertTrue(len(modifications) >= 4) + + # Set new variables + self.assertEqual(len(modifications['NEW_VAR']), 1) + self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv)) + self.assertEqual(modifications['NEW_VAR'][0].value, 'new') + # Unset variables + self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1) + self.assertTrue(isinstance( + modifications['EMPTY_PATH_LIST'][0], UnsetEnv)) + # Modified variables + self.assertEqual(len(modifications['UNSET_ME']), 1) + self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv)) + self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden') + + self.assertEqual(len(modifications['PATH_LIST']), 3) + self.assertTrue( + isinstance(modifications['PATH_LIST'][0], RemovePath) + ) + self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third') + self.assertTrue( + isinstance(modifications['PATH_LIST'][1], AppendPath) + ) + self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth') + self.assertTrue( + isinstance(modifications['PATH_LIST'][2], PrependPath) + ) + self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first') diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index cfe6ea9b27..390ec096a9 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -28,6 +28,7 @@ import tempfile import spack from llnl.util.filesystem import * from spack.directory_layout import YamlDirectoryLayout +from spack.database import Database from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite from spack.test.mock_packages_test import * from spack.test.mock_repo import MockArchive @@ -49,7 +50,10 @@ class InstallTest(MockPackagesTest): # installed pkgs and mock packages. self.tmpdir = tempfile.mkdtemp() self.orig_layout = spack.install_layout + self.orig_db = spack.installed_db + spack.install_layout = YamlDirectoryLayout(self.tmpdir) + spack.installed_db = Database(self.tmpdir) def tearDown(self): @@ -61,6 +65,7 @@ class InstallTest(MockPackagesTest): # restore spack's layout. spack.install_layout = self.orig_layout + spack.installed_db = self.orig_db shutil.rmtree(self.tmpdir, ignore_errors=True) @@ -71,7 +76,7 @@ class InstallTest(MockPackagesTest): pkg.fetcher = fetcher - def ztest_install_and_uninstall(self): + def test_install_and_uninstall(self): # Get a basic concrete spec for the trivial install package. spec = Spec('trivial_install_test_package') spec.concretize() diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py index a56bd8ebdc..e1acc32cb7 100644 --- a/lib/spack/spack/test/mock_packages_test.py +++ b/lib/spack/spack/test/mock_packages_test.py @@ -34,7 +34,7 @@ from ordereddict_backport import OrderedDict from spack.repository import RepoPath from spack.spec import Spec -platform = spack.architecture.sys_type() +platform = spack.architecture.platform() linux_os_name = 'debian' linux_os_version = '6' @@ -56,7 +56,7 @@ compilers: fc: None modules: 'None' - compiler: - spec: gcc@4.5.0 + spec: gcc@4.5.0 operating_system: {0}{1} paths: cc: /path/to/gcc @@ -144,7 +144,7 @@ compilers: fc: /path/to/gfortran operating_system: elcapitan spec: gcc@4.5.0 - modules: 'None' + modules: 'None' - compiler: spec: clang@3.3 operating_system: elcapitan @@ -201,6 +201,10 @@ class MockPackagesTest(unittest.TestCase): spack.config.ConfigScope('site', self.mock_site_config) spack.config.ConfigScope('user', self.mock_user_config) + # Keep tests from interfering with the actual module path. + self.real_share_path = spack.share_path + spack.share_path = tempfile.mkdtemp() + # Store changes to the package's dependencies so we can # restore later. self.saved_deps = {} @@ -235,6 +239,9 @@ class MockPackagesTest(unittest.TestCase): pkg.dependencies.clear() pkg.dependencies.update(deps) + shutil.rmtree(spack.share_path, ignore_errors=True) + spack.share_path = self.real_share_path + def setUp(self): self.initmock() diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index 582e067860..6d2e3705bd 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -27,6 +27,7 @@ from contextlib import contextmanager import StringIO import spack.modules +import unittest from spack.test.mock_packages_test import MockPackagesTest FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) @@ -67,6 +68,24 @@ configuration_autoload_all = { } } +configuration_prerequisites_direct = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'prerequisites': 'direct' + } + } +} + +configuration_prerequisites_all = { + 'enable': ['tcl'], + 'tcl': { + 'all': { + 'prerequisites': 'all' + } + } +} + configuration_alter_environment = { 'enable': ['tcl'], 'tcl': { @@ -74,8 +93,13 @@ configuration_alter_environment = { 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']} }, 'platform=test target=x86_64': { - 'environment': {'set': {'FOO': 'foo'}, - 'unset': ['BAR']} + 'environment': { + 'set': {'FOO': 'foo'}, + 'unset': ['BAR'] + } + }, + 'platform=test target=x86_32': { + 'load': ['foo/bar'] } } } @@ -83,7 +107,8 @@ configuration_alter_environment = { configuration_blacklist = { 'enable': ['tcl'], 'tcl': { - 'blacklist': ['callpath'], + 'whitelist': ['zmpi'], + 'blacklist': ['callpath', 'mpi'], 'all': { 'autoload': 'direct' } @@ -100,8 +125,68 @@ configuration_conflicts = { } } +configuration_wrong_conflicts = { + 'enable': ['tcl'], + 'tcl': { + 'naming_scheme': '{name}/{version}-{compiler.name}', + 'all': { + 'conflict': ['{name}/{compiler.name}'] + } + } +} + +configuration_suffix = { + 'enable': ['tcl'], + 'tcl': { + 'mpileaks': { + 'suffixes': { + '+debug': 'foo', + '~debug': 'bar' + } + } + } +} + + +class HelperFunctionsTests(MockPackagesTest): + + def test_update_dictionary_extending_list(self): + target = { + 'foo': { + 'a': 1, + 'b': 2, + 'd': 4 + }, + 'bar': [1, 2, 4], + 'baz': 'foobar' + } + update = { + 'foo': { + 'c': 3, + }, + 'bar': [3], + 'baz': 'foobaz', + 'newkey': { + 'd': 4 + } + } + spack.modules.update_dictionary_extending_lists(target, update) + self.assertTrue(len(target) == 4) + self.assertTrue(len(target['foo']) == 4) + self.assertTrue(len(target['bar']) == 4) + self.assertEqual(target['baz'], 'foobaz') + + def test_inspect_path(self): + env = spack.modules.inspect_path('/usr') + names = [item.name for item in env] + self.assertTrue('PATH' in names) + self.assertTrue('LIBRARY_PATH' in names) + self.assertTrue('LD_LIBRARY_PATH' in names) + self.assertTrue('CPATH' in names) + class TclTests(MockPackagesTest): + def setUp(self): super(TclTests, self).setUp() self.configuration_obj = spack.modules.CONFIGURATION @@ -116,7 +201,6 @@ class TclTests(MockPackagesTest): def get_modulefile_content(self, spec): spec.concretize() - print spec, '&&&&&' generator = spack.modules.TclModule(spec) generator.write() content = FILE_REGISTRY[generator.file_name].split('\n') @@ -127,6 +211,8 @@ class TclTests(MockPackagesTest): spec = spack.spec.Spec('mpich@3.0.4') content = self.get_modulefile_content(spec) self.assertTrue('module-whatis "mpich @3.0.4"' in content) + self.assertRaises(TypeError, spack.modules.dependencies, + spec, 'non-existing-tag') def test_autoload(self): spack.modules.CONFIGURATION = configuration_autoload_direct @@ -141,11 +227,21 @@ class TclTests(MockPackagesTest): self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5) self.assertEqual(len([x for x in content if 'module load ' in x]), 5) + def test_prerequisites(self): + spack.modules.CONFIGURATION = configuration_prerequisites_direct + spec = spack.spec.Spec('mpileaks arch=x86-linux') + content = self.get_modulefile_content(spec) + self.assertEqual(len([x for x in content if 'prereq' in x]), 2) + + spack.modules.CONFIGURATION = configuration_prerequisites_all + spec = spack.spec.Spec('mpileaks arch=x86-linux') + content = self.get_modulefile_content(spec) + self.assertEqual(len([x for x in content if 'prereq' in x]), 5) + def test_alter_environment(self): spack.modules.CONFIGURATION = configuration_alter_environment spec = spack.spec.Spec('mpileaks platform=test target=x86_64') content = self.get_modulefile_content(spec) - print content self.assertEqual( len([x for x in content @@ -156,7 +252,6 @@ class TclTests(MockPackagesTest): spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32') content = self.get_modulefile_content(spec) - print content self.assertEqual( len([x for x in content @@ -164,6 +259,10 @@ class TclTests(MockPackagesTest): self.assertEqual( len([x for x in content if 'setenv FOO "foo"' in x]), 0) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0) + self.assertEqual( + len([x for x in content if 'is-loaded foo/bar' in x]), 1) + self.assertEqual( + len([x for x in content if 'module load foo/bar' in x]), 1) def test_blacklist(self): spack.modules.CONFIGURATION = configuration_blacklist @@ -171,6 +270,13 @@ class TclTests(MockPackagesTest): content = self.get_modulefile_content(spec) self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1) self.assertEqual(len([x for x in content if 'module load ' in x]), 1) + spec = spack.spec.Spec('callpath arch=x86-linux') + # Returns a StringIO instead of a string as no module file was written + self.assertRaises(AttributeError, self.get_modulefile_content, spec) + spec = spack.spec.Spec('zmpi arch=x86-linux') + content = self.get_modulefile_content(spec) + self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1) + self.assertEqual(len([x for x in content if 'module load ' in x]), 1) def test_conflicts(self): spack.modules.CONFIGURATION = configuration_conflicts @@ -182,3 +288,57 @@ class TclTests(MockPackagesTest): len([x for x in content if x == 'conflict mpileaks']), 1) self.assertEqual( len([x for x in content if x == 'conflict intel/14.0.1']), 1) + + spack.modules.CONFIGURATION = configuration_wrong_conflicts + self.assertRaises(SystemExit, self.get_modulefile_content, spec) + + def test_suffixes(self): + spack.modules.CONFIGURATION = configuration_suffix + spec = spack.spec.Spec('mpileaks+debug arch=x86-linux') + spec.concretize() + generator = spack.modules.TclModule(spec) + self.assertTrue('foo' in generator.use_name) + + spec = spack.spec.Spec('mpileaks~debug arch=x86-linux') + spec.concretize() + generator = spack.modules.TclModule(spec) + self.assertTrue('bar' in generator.use_name) + + +configuration_dotkit = { + 'enable': ['dotkit'], + 'dotkit': { + 'all': { + 'prerequisites': 'direct' + } + } +} + + +class DotkitTests(MockPackagesTest): + + def setUp(self): + super(DotkitTests, self).setUp() + self.configuration_obj = spack.modules.CONFIGURATION + spack.modules.open = mock_open + # Make sure that a non-mocked configuration will trigger an error + spack.modules.CONFIGURATION = None + + def tearDown(self): + del spack.modules.open + spack.modules.CONFIGURATION = self.configuration_obj + super(DotkitTests, self).tearDown() + + def get_modulefile_content(self, spec): + spec.concretize() + generator = spack.modules.Dotkit(spec) + generator.write() + content = FILE_REGISTRY[generator.file_name].split('\n') + return content + + def test_dotkit(self): + spack.modules.CONFIGURATION = configuration_dotkit + spec = spack.spec.Spec('mpileaks arch=x86-linux') + content = self.get_modulefile_content(spec) + self.assertTrue('#c spack' in content) + self.assertTrue('#d mpileaks @2.3' in content) diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py index 034e6b3923..c233ea4fd6 100644 --- a/lib/spack/spack/test/multimethod.py +++ b/lib/spack/spack/test/multimethod.py @@ -93,7 +93,7 @@ class MultiMethodTest(MockPackagesTest): def test_target_match(self): - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() targets = platform.targets.values() for target in targets[:-1]: pkg = spack.repo.get('multimethod target='+target.name) diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py index 6aea1a68c7..ed54ff90b0 100644 --- a/lib/spack/spack/test/sbang.py +++ b/lib/spack/spack/test/sbang.py @@ -34,10 +34,12 @@ from llnl.util.filesystem import * from spack.hooks.sbang import filter_shebangs_in_directory import spack -short_line = "#!/this/is/short/bin/bash\n" -long_line = "#!/this/" + ('x' * 200) + "/is/long\n" -sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root -last_line = "last!\n" +short_line = "#!/this/is/short/bin/bash\n" +long_line = "#!/this/" + ('x' * 200) + "/is/long\n" +lua_line = "#!/this/" + ('x' * 200) + "/is/lua\n" +lua_line_patched = "--!/this/" + ('x' * 200) + "/is/lua\n" +sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root +last_line = "last!\n" class SbangTest(unittest.TestCase): def setUp(self): @@ -59,6 +61,12 @@ class SbangTest(unittest.TestCase): f.write(long_line) f.write(last_line) + # Lua script with long shebang + self.lua_shebang = os.path.join(self.tempdir, 'lua') + with open(self.lua_shebang, 'w') as f: + f.write(lua_line) + f.write(last_line) + # Script already using sbang. self.has_shebang = os.path.join(self.tempdir, 'shebang') with open(self.has_shebang, 'w') as f: @@ -71,7 +79,6 @@ class SbangTest(unittest.TestCase): shutil.rmtree(self.tempdir, ignore_errors=True) - def test_shebang_handling(self): filter_shebangs_in_directory(self.tempdir) @@ -86,6 +93,12 @@ class SbangTest(unittest.TestCase): self.assertEqual(f.readline(), long_line) self.assertEqual(f.readline(), last_line) + # Make sure this got patched. + with open(self.lua_shebang, 'r') as f: + self.assertEqual(f.readline(), sbang_line) + self.assertEqual(f.readline(), lua_line_patched) + self.assertEqual(f.readline(), last_line) + # Make sure this is untouched with open(self.has_shebang, 'r') as f: self.assertEqual(f.readline(), sbang_line) diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 712f07ac4d..c56c70b1fe 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -242,7 +242,7 @@ class SpecDagTest(MockPackagesTest): def test_unsatisfiable_architecture(self): - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() self.set_pkg_dep('mpileaks', 'mpich platform=test target=be') spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf') diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py index 9876bfd5a8..b174e5305c 100644 --- a/lib/spack/spack/test/spec_semantics.py +++ b/lib/spack/spack/test/spec_semantics.py @@ -141,7 +141,6 @@ class SpecSematicsTest(MockPackagesTest): def test_satisfies_architecture(self): - platform = spack.architecture.sys_type() self.check_satisfies( 'foo platform=test target=frontend os=frontend', 'platform=test target=frontend os=frontend') @@ -396,7 +395,7 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_changed('libelf', 'debug=2') self.check_constrain_changed('libelf', 'cppflags="-O3"') - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() self.check_constrain_changed('libelf', 'target='+platform.target('default_target').name) self.check_constrain_changed('libelf', 'os='+platform.operating_system('default_os').name) @@ -412,7 +411,7 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_not_changed('libelf debug=2', 'debug=2') self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"') - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() default_target = platform.target('default_target').name self.check_constrain_not_changed('libelf target='+default_target, 'target='+default_target) @@ -425,7 +424,7 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_changed('libelf^foo', 'libelf^foo+debug') self.check_constrain_changed('libelf^foo', 'libelf^foo~debug') - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() default_target = platform.target('default_target').name self.check_constrain_changed('libelf^foo', 'libelf^foo target='+default_target) @@ -439,6 +438,6 @@ class SpecSematicsTest(MockPackagesTest): self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug') self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"') - platform = spack.architecture.sys_type() + platform = spack.architecture.platform() default_target = platform.target('default_target').name self.check_constrain_not_changed('libelf^foo target='+default_target, 'libelf^foo target='+default_target) |