diff options
Diffstat (limited to 'lib')
-rw-r--r-- | lib/spack/docs/packaging_guide.rst | 13 | ||||
-rwxr-xr-x | lib/spack/env/cc | 4 | ||||
-rw-r--r-- | lib/spack/llnl/util/filesystem.py | 76 | ||||
-rw-r--r-- | lib/spack/llnl/util/tty/__init__.py | 4 | ||||
-rw-r--r-- | lib/spack/spack/build_environment.py | 114 | ||||
-rw-r--r-- | lib/spack/spack/cmd/compiler.py | 15 | ||||
-rw-r--r-- | lib/spack/spack/compilers/__init__.py | 43 | ||||
-rw-r--r-- | lib/spack/spack/database.py | 6 | ||||
-rw-r--r-- | lib/spack/spack/environment.py | 135 | ||||
-rw-r--r-- | lib/spack/spack/hooks/licensing.py | 5 | ||||
-rw-r--r-- | lib/spack/spack/hooks/sbang.py | 8 | ||||
-rw-r--r-- | lib/spack/spack/package.py | 14 | ||||
-rw-r--r-- | lib/spack/spack/test/__init__.py | 2 | ||||
-rw-r--r-- | lib/spack/spack/test/cmd/test_compiler_cmd.py | 81 | ||||
-rw-r--r-- | lib/spack/spack/test/data/sourceme_first.sh | 3 | ||||
-rw-r--r-- | lib/spack/spack/test/data/sourceme_second.sh | 3 | ||||
-rw-r--r-- | lib/spack/spack/test/environment.py | 62 |
17 files changed, 459 insertions, 129 deletions
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 1f83f611b0..54b886310a 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1950,6 +1950,19 @@ instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for the reasons outlined above. +Blas and Lapack libraries +~~~~~~~~~~~~~~~~~~~~~~~~~ +Different packages provide implementation of ``Blas`` and ``Lapack`` routines. +The names of the resulting static and/or shared libraries differ from package +to package. In order to make ``install()`` method indifferent to the +choice of ``Blas`` implementation, each package which provides it +sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib `` to +point to the shared and static ``Blas`` libraries, respectively. The same +applies to packages which provide ``Lapack``. Package developers are advised to +use these variables, for example ``spec['blas'].blas_shared_lib`` instead of +hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``. + + Forking ``install()`` ~~~~~~~~~~~~~~~~~~~~~ diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 1e405ae6e9..bf98b4c354 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -324,8 +324,8 @@ fi if [[ $SPACK_DEBUG == TRUE ]]; then input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log" output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log" - echo "[$mode] $command $input_command" >> $input_log - echo "[$mode] ${full_command[@]}" >> $output_log + echo "[$mode] $command $input_command" >> "$input_log" + echo "[$mode] ${full_command[@]}" >> "$output_log" fi exec "${full_command[@]}" diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index 6661a80f27..d72e8bae92 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -22,28 +22,28 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## -__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree', - 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', - 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', - 'set_executable', 'copy_mode', 'unset_executable_mode', - 'remove_dead_links', 'remove_linked_tree', 'find_library_path', - 'fix_darwin_install_name'] - import os import glob -import sys import re import shutil import stat import errno import getpass from contextlib import contextmanager, closing -from tempfile import NamedTemporaryFile import subprocess import llnl.util.tty as tty -from spack.util.compression import ALLOWED_ARCHIVE_TYPES + +__all__ = ['set_install_permissions', 'install', 'install_tree', + 'traverse_tree', + 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', + 'force_remove', 'join_path', 'ancestor', 'can_access', + 'filter_file', + 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', + 'set_executable', 'copy_mode', 'unset_executable_mode', + 'remove_dead_links', 'remove_linked_tree', 'find_library_path', + 'fix_darwin_install_name', 'to_link_flags'] + def filter_file(regex, repl, *filenames, **kwargs): """Like sed, but uses python regular expressions. @@ -69,6 +69,7 @@ def filter_file(regex, repl, *filenames, **kwargs): # Allow strings to use \1, \2, etc. for replacement, like sed if not callable(repl): unescaped = repl.replace(r'\\', '\\') + def replace_groups_with_groupid(m): def groupid_to_group(x): return m.group(int(x.group(1))) @@ -157,9 +158,12 @@ def set_install_permissions(path): def copy_mode(src, dest): src_mode = os.stat(src).st_mode dest_mode = os.stat(dest).st_mode - if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR - if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP - if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH + if src_mode & stat.S_IXUSR: + dest_mode |= stat.S_IXUSR + if src_mode & stat.S_IXGRP: + dest_mode |= stat.S_IXGRP + if src_mode & stat.S_IXOTH: + dest_mode |= stat.S_IXOTH os.chmod(dest, dest_mode) @@ -224,9 +228,10 @@ def force_remove(*paths): for path in paths: try: os.remove(path) - except OSError, e: + except OSError: pass + @contextmanager def working_dir(dirname, **kwargs): if kwargs.get('create', False): @@ -240,7 +245,7 @@ def working_dir(dirname, **kwargs): def touch(path): """Creates an empty file at the specified path.""" - with open(path, 'a') as file: + with open(path, 'a'): os.utime(path, None) @@ -253,7 +258,7 @@ def touchp(path): def force_symlink(src, dest): try: os.symlink(src, dest) - except OSError as e: + except OSError: os.remove(dest) os.symlink(src, dest) @@ -275,7 +280,7 @@ def ancestor(dir, n=1): def can_access(file_name): """True if we have read/write access to the file.""" - return os.access(file_name, os.R_OK|os.W_OK) + return os.access(file_name, os.R_OK | os.W_OK) def traverse_tree(source_root, dest_root, rel_path='', **kwargs): @@ -343,13 +348,14 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs): # Treat as a directory if os.path.isdir(source_child) and ( - follow_links or not os.path.islink(source_child)): + follow_links or not os.path.islink(source_child)): # When follow_nonexisting isn't set, don't descend into dirs # in source that do not exist in dest if follow_nonexisting or os.path.exists(dest_child): - tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) - for t in tuples: yield t + tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs) # NOQA: ignore=E501 + for t in tuples: + yield t # Treat as a file. elif not ignore(os.path.join(rel_path, f)): @@ -379,6 +385,7 @@ def remove_dead_links(root): if not os.path.exists(real_path): os.unlink(path) + def remove_linked_tree(path): """ Removes a directory and its contents. If the directory is a @@ -402,28 +409,41 @@ def fix_darwin_install_name(path): Fix install name of dynamic libraries on Darwin to have full path. There are two parts of this task: (i) use install_name('-id',...) to change install name of a single lib; - (ii) use install_name('-change',...) to change the cross linking between libs. - The function assumes that all libraries are in one folder and currently won't - follow subfolders. + (ii) use install_name('-change',...) to change the cross linking between + libs. The function assumes that all libraries are in one folder and + currently won't follow subfolders. Args: path: directory in which .dylib files are alocated """ - libs = glob.glob(join_path(path,"*.dylib")) + libs = glob.glob(join_path(path, "*.dylib")) for lib in libs: # fix install name first: - subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0] - long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n') + subprocess.Popen(["install_name_tool", "-id", lib, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 + long_deps = subprocess.Popen(["otool", "-L", lib], stdout=subprocess.PIPE).communicate()[0].split('\n') # NOQA: ignore=E501 deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]] # fix all dependencies: for dep in deps: for loc in libs: if dep == os.path.basename(loc): - subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0] + subprocess.Popen(["install_name_tool", "-change", dep, loc, lib], stdout=subprocess.PIPE).communicate()[0] # NOQA: ignore=E501 break +def to_link_flags(library): + """Transforms a path to a <library> into linking flags -L<dir> -l<name>. + + Return: + A string of linking flags. + """ + dir = os.path.dirname(library) + # Asume libXYZ.suffix + name = os.path.basename(library)[3:].split(".")[0] + res = '-L%s -l%s' % (dir, name) + return res + + def find_library_path(libname, *paths): """Searches for a file called <libname> in each path. diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py index c638b113fd..ee81e11a20 100644 --- a/lib/spack/llnl/util/tty/__init__.py +++ b/lib/spack/llnl/util/tty/__init__.py @@ -64,12 +64,14 @@ def info(message, *args, **kwargs): format = kwargs.get('format', '*b') stream = kwargs.get('stream', sys.stdout) wrap = kwargs.get('wrap', False) + break_long_words = kwargs.get('break_long_words', False) cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream) for arg in args: if wrap: lines = textwrap.wrap( - str(arg), initial_indent=indent, subsequent_indent=indent) + str(arg), initial_indent=indent, subsequent_indent=indent, + break_long_words=break_long_words) for line in lines: stream.write(line + '\n') else: diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index 7c65091d49..3fcfb151b8 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -75,20 +75,19 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE' # set_build_environment_variables and used to pass parameters to # Spack's compiler wrappers. # -SPACK_ENV_PATH = 'SPACK_ENV_PATH' -SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' -SPACK_PREFIX = 'SPACK_PREFIX' -SPACK_INSTALL = 'SPACK_INSTALL' -SPACK_DEBUG = 'SPACK_DEBUG' -SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC' -SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' +SPACK_ENV_PATH = 'SPACK_ENV_PATH' +SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES' +SPACK_PREFIX = 'SPACK_PREFIX' +SPACK_INSTALL = 'SPACK_INSTALL' +SPACK_DEBUG = 'SPACK_DEBUG' +SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC' +SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR' # Platform-specific library suffix. dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so' - class MakeExecutable(Executable): """Special callable executable object for make so the user can specify parallel or not on a per-invocation basis. Using @@ -99,6 +98,7 @@ class MakeExecutable(Executable): Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides everything. """ + def __init__(self, name, jobs): super(MakeExecutable, self).__init__(name) self.jobs = jobs @@ -113,12 +113,13 @@ class MakeExecutable(Executable): return super(MakeExecutable, self).__call__(*args, **kwargs) + def load_module(mod): """Takes a module name and removes modules until it is possible to load that module. It then loads the provided module. Depends on the modulecmd implementation of modules used in cray and lmod. """ - #Create an executable of the module command that will output python code + # Create an executable of the module command that will output python code modulecmd = which('modulecmd') modulecmd.add_default_arg('python') @@ -129,11 +130,13 @@ def load_module(mod): text = modulecmd('show', mod, output=str, error=str).split() for i, word in enumerate(text): if word == 'conflict': - exec(compile(modulecmd('unload', text[i+1], output=str, error=str), '<string>', 'exec')) + exec(compile(modulecmd('unload', text[ + i + 1], output=str, error=str), '<string>', 'exec')) # Load the module now that there are no conflicts load = modulecmd('load', mod, output=str, error=str) exec(compile(load, '<string>', 'exec')) + def get_path_from_module(mod): """Inspects a TCL module for entries that indicate the absolute path at which the library supported by said module can be found. @@ -146,7 +149,7 @@ def get_path_from_module(mod): text = modulecmd('show', mod, output=str, error=str).split('\n') # If it lists its package directory, return that for line in text: - if line.find(mod.upper()+'_DIR') >= 0: + if line.find(mod.upper() + '_DIR') >= 0: words = line.split() return words[2] @@ -154,49 +157,52 @@ def get_path_from_module(mod): for line in text: rpath = line.find('-rpath/') if rpath >= 0: - return line[rpath+6:line.find('/lib')] + return line[rpath + 6:line.find('/lib')] # If it lists a -L instruction, use that for line in text: L = line.find('-L/') if L >= 0: - return line[L+2:line.find('/lib')] + return line[L + 2:line.find('/lib')] # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that for line in text: - if line.find('LD_LIBRARY_PATH') >= 0: + if line.find('LD_LIBRARY_PATH') >= 0: words = line.split() path = words[2] return path[:path.find('/lib')] # Unable to find module path return None + def set_compiler_environment_variables(pkg, env): assert(pkg.spec.concrete) compiler = pkg.compiler flags = pkg.spec.compiler_flags # Set compiler variables used by CMake and autotools - assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc')) + assert all(key in compiler.link_paths for key in ( + 'cc', 'cxx', 'f77', 'fc')) # Populate an object with the list of environment modifications # and return it - # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc. + # TODO : add additional kwargs for better diagnostics, like requestor, + # ttyout, ttyerr, etc. link_dir = spack.build_env_path - env.set('CC', join_path(link_dir, compiler.link_paths['cc'])) - env.set('CXX', join_path(link_dir, compiler.link_paths['cxx'])) - env.set('F77', join_path(link_dir, compiler.link_paths['f77'])) - env.set('FC', join_path(link_dir, compiler.link_paths['fc'])) # Set SPACK compiler variables so that our wrapper knows what to call if compiler.cc: env.set('SPACK_CC', compiler.cc) + env.set('CC', join_path(link_dir, compiler.link_paths['cc'])) if compiler.cxx: env.set('SPACK_CXX', compiler.cxx) + env.set('CXX', join_path(link_dir, compiler.link_paths['cxx'])) if compiler.f77: env.set('SPACK_F77', compiler.f77) + env.set('F77', join_path(link_dir, compiler.link_paths['f77'])) if compiler.fc: env.set('SPACK_FC', compiler.fc) + env.set('FC', join_path(link_dir, compiler.link_paths['fc'])) # Set SPACK compiler rpath flags so that our wrapper knows what to use env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg) @@ -233,7 +239,8 @@ def set_build_environment_variables(pkg, env): # handled by putting one in the <build_env_path>/case-insensitive # directory. Add that to the path too. env_paths = [] - for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]: + compiler_specific = join_path(spack.build_env_path, pkg.compiler.name) + for item in [spack.build_env_path, compiler_specific]: env_paths.append(item) ci = join_path(item, 'case-insensitive') if os.path.isdir(ci): @@ -246,7 +253,8 @@ def set_build_environment_variables(pkg, env): # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)] env.set_path(SPACK_DEPENDENCIES, dep_prefixes) - env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH + # Add dependencies to CMAKE_PREFIX_PATH + env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Install prefix env.set(SPACK_PREFIX, pkg.prefix) @@ -262,7 +270,8 @@ def set_build_environment_variables(pkg, env): env.unset('DYLD_LIBRARY_PATH') # Add bin directories from dependencies to the PATH for the build. - bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes])) + bin_dirs = reversed( + filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes])) for item in bin_dirs: env.prepend_path('PATH', item) @@ -277,8 +286,8 @@ def set_build_environment_variables(pkg, env): for directory in ('lib', 'lib64', 'share'): pcdir = join_path(pre, directory, 'pkgconfig') if os.path.isdir(pcdir): - #pkg_config_dirs.append(pcdir) - env.prepend_path('PKG_CONFIG_PATH',pcdir) + # pkg_config_dirs.append(pcdir) + env.prepend_path('PKG_CONFIG_PATH', pcdir) if pkg.spec.architecture.target.module_name: load_module(pkg.spec.architecture.target.module_name) @@ -301,7 +310,7 @@ def set_module_variables_for_package(pkg, module): m.make_jobs = jobs # TODO: make these build deps that can be installed if not found. - m.make = MakeExecutable('make', jobs) + m.make = MakeExecutable('make', jobs) m.gmake = MakeExecutable('gmake', jobs) # easy shortcut to os.environ @@ -325,33 +334,34 @@ def set_module_variables_for_package(pkg, module): # Set up CMake rpath m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') - m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg))) + m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % + ":".join(get_rpaths(pkg))) # Put spack compiler paths in module scope. link_dir = spack.build_env_path - m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) + m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc']) m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx']) m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77']) - m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc']) + m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc']) # Emulate some shell commands for convenience - m.pwd = os.getcwd - m.cd = os.chdir - m.mkdir = os.mkdir - m.makedirs = os.makedirs - m.remove = os.remove - m.removedirs = os.removedirs - m.symlink = os.symlink - - m.mkdirp = mkdirp - m.install = install + m.pwd = os.getcwd + m.cd = os.chdir + m.mkdir = os.mkdir + m.makedirs = os.makedirs + m.remove = os.remove + m.removedirs = os.removedirs + m.symlink = os.symlink + + m.mkdirp = mkdirp + m.install = install m.install_tree = install_tree - m.rmtree = shutil.rmtree - m.move = shutil.move + m.rmtree = shutil.rmtree + m.move = shutil.move # Useful directories within the prefix are encapsulated in # a Prefix object. - m.prefix = pkg.prefix + m.prefix = pkg.prefix # Platform-specific library suffix. m.dso_suffix = dso_suffix @@ -365,20 +375,22 @@ def get_rpaths(pkg): rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values() if os.path.isdir(d.prefix.lib64)) # Second module is our compiler mod name. We use that to get rpaths from - # module show output. + # module show output. if pkg.compiler.modules and len(pkg.compiler.modules) > 1: rpaths.append(get_path_from_module(pkg.compiler.modules[1])) return rpaths def parent_class_modules(cls): - """Get list of super class modules that are all descend from spack.Package""" + """ + Get list of super class modules that are all descend from spack.Package + """ if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls): return [] result = [] module = sys.modules.get(cls.__module__) if module: - result = [ module ] + result = [module] for c in cls.__bases__: result.extend(parent_class_modules(c)) return result @@ -390,11 +402,12 @@ def load_external_modules(pkg): for dep in list(pkg.spec.traverse()): if dep.external_module: load_module(dep.external_module) - + + def setup_package(pkg): """Execute all environment setup routines.""" spack_env = EnvironmentModifications() - run_env = EnvironmentModifications() + run_env = EnvironmentModifications() # Before proceeding, ensure that specs and packages are consistent # @@ -410,7 +423,8 @@ def setup_package(pkg): # throwaway environment, but it is kind of dirty. # # TODO: Think about how to avoid this fix and do something cleaner. - for s in pkg.spec.traverse(): s.package.spec = s + for s in pkg.spec.traverse(): + s.package.spec = s set_compiler_environment_variables(pkg, spack_env) set_build_environment_variables(pkg, spack_env) @@ -498,7 +512,9 @@ def fork(pkg, function): # message. Just make the parent exit with an error code. pid, returncode = os.waitpid(pid, 0) if returncode != 0: - raise InstallError("Installation process had nonzero exit code.".format(str(returncode))) + message = "Installation process had nonzero exit code : {code}" + strcode = str(returncode) + raise InstallError(message.format(code=strcode)) class InstallError(spack.error.SpackError): diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py index 030aa77c30..c95045ef85 100644 --- a/lib/spack/spack/cmd/compiler.py +++ b/lib/spack/spack/cmd/compiler.py @@ -69,7 +69,7 @@ def setup_parser(subparser): help="Configuration scope to read from.") -def compiler_add(args): +def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration.""" paths = args.add_paths @@ -78,7 +78,6 @@ def compiler_add(args): compilers = [c for c in spack.compilers.find_compilers(*args.add_paths) if c.spec not in spack.compilers.all_compilers(scope=args.scope)] - if compilers: spack.compilers.add_compilers_to_config(compilers, scope=args.scope) n = len(compilers) @@ -93,7 +92,6 @@ def compiler_add(args): def compiler_remove(args): cspec = CompilerSpec(args.compiler_spec) compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope) - if not compilers: tty.die("No compilers match spec %s" % cspec) elif not args.all and len(compilers) > 1: @@ -137,9 +135,10 @@ def compiler_list(args): def compiler(parser, args): - action = { 'add' : compiler_add, - 'remove' : compiler_remove, - 'rm' : compiler_remove, - 'info' : compiler_info, - 'list' : compiler_list } + action = {'add' : compiler_find, + 'find' : compiler_find, + 'remove' : compiler_remove, + 'rm' : compiler_remove, + 'info' : compiler_info, + 'list' : compiler_list } action[args.compiler_command](args) diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py index 4b546c2cbf..ae72b743b2 100644 --- a/lib/spack/spack/compilers/__init__.py +++ b/lib/spack/spack/compilers/__init__.py @@ -52,6 +52,7 @@ from spack.util.environment import get_path _imported_compilers_module = 'spack.compilers' _path_instance_vars = ['cc', 'cxx', 'f77', 'fc'] _other_instance_vars = ['modules', 'operating_system'] +_cache_config_file = [] # TODO: customize order in config file if platform.system() == 'Darwin': @@ -79,9 +80,7 @@ def _to_dict(compiler): if compiler.alias: d['alias'] = compiler.alias - return { - 'compiler': d - } + return {'compiler': d} def get_compiler_config(scope=None): @@ -99,7 +98,6 @@ def get_compiler_config(scope=None): # Update the configuration if there are currently no compilers # configured. Avoid updating automatically if there ARE site # compilers configured but no user ones. -# if (isinstance(arch, basestring) or arch == my_arch) and arch not in config: if not config: if scope is None: # We know no compilers were configured in any scope. @@ -112,8 +110,11 @@ def get_compiler_config(scope=None): if not site_config: init_compiler_config() config = spack.config.get_config('compilers', scope=scope) - - return config + return config + elif config: + return config + else: + return [] # Return empty list which we will later append to. def add_compilers_to_config(compilers, scope=None): @@ -126,7 +127,8 @@ def add_compilers_to_config(compilers, scope=None): compiler_config = get_compiler_config(scope) for compiler in compilers: compiler_config.append(_to_dict(compiler)) - + global _cache_config_file + _cache_config_file = compiler_config spack.config.update_config('compilers', compiler_config, scope) @@ -139,15 +141,17 @@ def remove_compiler_from_config(compiler_spec, scope=None): - scope: configuration scope to modify. """ compiler_config = get_compiler_config(scope) - matches = [(a,c) for (a,c) in compiler_config.items() if c['spec'] == compiler_spec] - if len(matches) == 1: - del compiler_config[matches[0][0]] - else: + config_length = len(compiler_config) + + filtered_compiler_config = [comp for comp in compiler_config + if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec] + # Need a better way for this + global _cache_config_file + _cache_config_file = filtered_compiler_config # Update the cache for changes + if len(filtered_compiler_config) == config_length: # No items removed CompilerSpecInsufficientlySpecificError(compiler_spec) + spack.config.update_config('compilers', filtered_compiler_config, scope) - spack.config.update_config('compilers', compiler_config, scope) - -_cache_config_file = {} def all_compilers_config(scope=None): """Return a set of specs for all the compiler versions currently @@ -155,14 +159,13 @@ def all_compilers_config(scope=None): """ # Get compilers for this architecture. global _cache_config_file #Create a cache of the config file so we don't load all the time. - if not _cache_config_file: _cache_config_file = get_compiler_config(scope) return _cache_config_file - else: return _cache_config_file + def all_compilers(scope=None): # Return compiler specs from the merged config. return [spack.spec.CompilerSpec(s['compiler']['spec']) @@ -181,7 +184,7 @@ def default_compiler(): return sorted(versions)[-1] -def find_compilers(): +def find_compilers(*paths): """Return a list of compilers found in the suppied paths. This invokes the find_compilers() method for each operating system associated with the host platform, and appends @@ -190,11 +193,11 @@ def find_compilers(): # Find compilers for each operating system class oss = all_os_classes() compiler_lists = [] - for os in oss: - compiler_lists.extend(os.find_compilers()) - + for o in oss: + compiler_lists.extend(o.find_compilers(*paths)) return compiler_lists + def supported_compilers(): """Return a set of names of compilers supported by Spack. diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 38bb7541e0..f941346bb1 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -311,7 +311,11 @@ class Database(object): for spec in directory_layout.all_specs(): # Create a spec for each known package and add it. path = directory_layout.path_for_spec(spec) - self._add(spec, path, directory_layout) + old_info = old_data.get(spec.dag_hash()) + explicit = False + if old_info is not None: + explicit = old_info.explicit + self._add(spec, path, directory_layout, explicit=explicit) self._check_ref_counts() diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index af642dcc9b..30c6228ca4 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -1,4 +1,4 @@ -############################################################################## +# # Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # @@ -21,14 +21,17 @@ # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## +# import collections import inspect +import json import os import os.path +import subprocess class NameModifier(object): + def __init__(self, name, **kwargs): self.name = name self.args = {'name': name} @@ -36,6 +39,7 @@ class NameModifier(object): class NameValueModifier(object): + def __init__(self, name, value, **kwargs): self.name = name self.value = value @@ -45,23 +49,27 @@ class NameValueModifier(object): class SetEnv(NameValueModifier): + def execute(self): os.environ[self.name] = str(self.value) class UnsetEnv(NameModifier): + def execute(self): # Avoid throwing if the variable was not set os.environ.pop(self.name, None) class SetPath(NameValueModifier): + def execute(self): string_path = concatenate_paths(self.value, separator=self.separator) os.environ[self.name] = string_path class AppendPath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -71,6 +79,7 @@ class AppendPath(NameValueModifier): class PrependPath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -80,6 +89,7 @@ class PrependPath(NameValueModifier): class RemovePath(NameValueModifier): + def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split( @@ -90,6 +100,7 @@ class RemovePath(NameValueModifier): class EnvironmentModifications(object): + """ Keeps track of requests to modify the current environment. @@ -240,6 +251,126 @@ class EnvironmentModifications(object): for x in actions: x.execute() + @staticmethod + def from_sourcing_files(*args, **kwargs): + """ + Creates an instance of EnvironmentModifications that, if executed, + has the same effect on the environment as sourcing the files passed as + parameters + + Args: + *args: list of files to be sourced + + Returns: + instance of EnvironmentModifications + """ + env = EnvironmentModifications() + # Check if the files are actually there + if not all(os.path.isfile(file) for file in args): + raise RuntimeError('trying to source non-existing files') + # Relevant kwd parameters and formats + info = dict(kwargs) + info.setdefault('shell', '/bin/bash') + info.setdefault('shell_options', '-c') + info.setdefault('source_command', 'source') + info.setdefault('suppress_output', '&> /dev/null') + info.setdefault('concatenate_on_success', '&&') + + shell = '{shell}'.format(**info) + shell_options = '{shell_options}'.format(**info) + source_file = '{source_command} {file} {concatenate_on_success}' + dump_environment = 'python -c "import os, json; print json.dumps(dict(os.environ))"' # NOQA: ignore=E501 + # Construct the command that will be executed + command = [source_file.format(file=file, **info) for file in args] + command.append(dump_environment) + command = ' '.join(command) + command = [ + shell, + shell_options, + command + ] + + # Try to source all the files, + proc = subprocess.Popen( + command, stdout=subprocess.PIPE, env=os.environ) + proc.wait() + if proc.returncode != 0: + raise RuntimeError('sourcing files returned a non-zero exit code') + output = ''.join([line for line in proc.stdout]) + # Construct a dictionary with all the variables in the new environment + after_source_env = dict(json.loads(output)) + this_environment = dict(os.environ) + + # Filter variables that are not related to sourcing a file + to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD' + for d in after_source_env, this_environment: + for name in to_be_filtered: + d.pop(name, None) + + # Fill the EnvironmentModifications instance + + # New variables + new_variables = set(after_source_env) - set(this_environment) + for x in new_variables: + env.set(x, after_source_env[x]) + # Variables that have been unset + unset_variables = set(this_environment) - set(after_source_env) + for x in unset_variables: + env.unset(x) + # Variables that have been modified + common_variables = set(this_environment).intersection(set(after_source_env)) # NOQA: ignore=E501 + modified_variables = [x for x in common_variables if this_environment[x] != after_source_env[x]] # NOQA: ignore=E501 + + def return_separator_if_any(first_value, second_value): + separators = ':', ';' + for separator in separators: + if separator in first_value and separator in second_value: + return separator + return None + + for x in modified_variables: + current = this_environment[x] + modified = after_source_env[x] + sep = return_separator_if_any(current, modified) + if sep is None: + # We just need to set the variable to the new value + env.set(x, after_source_env[x]) + else: + current_list = current.split(sep) + modified_list = modified.split(sep) + # Paths that have been removed + remove_list = [ + ii for ii in current_list if ii not in modified_list] + # Check that nothing has been added in the middle of vurrent + # list + remaining_list = [ + ii for ii in current_list if ii in modified_list] + start = modified_list.index(remaining_list[0]) + end = modified_list.index(remaining_list[-1]) + search = sep.join(modified_list[start:end + 1]) + if search not in current: + # We just need to set the variable to the new value + env.set(x, after_source_env[x]) + break + else: + try: + prepend_list = modified_list[:start] + except KeyError: + prepend_list = [] + try: + append_list = modified_list[end + 1:] + except KeyError: + append_list = [] + + for item in remove_list: + env.remove_path(x, item) + for item in append_list: + env.append_path(x, item) + for item in prepend_list: + env.prepend_path(x, item) + + return env + def concatenate_paths(paths, separator=':'): """ diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py index 0f63b0e05a..9010b84154 100644 --- a/lib/spack/spack/hooks/licensing.py +++ b/lib/spack/spack/hooks/licensing.py @@ -26,7 +26,7 @@ import os import spack import llnl.util.tty as tty -from llnl.util.filesystem import join_path +from llnl.util.filesystem import join_path, mkdirp def pre_install(pkg): @@ -154,6 +154,9 @@ def symlink_license(pkg): target = pkg.global_license_file for filename in pkg.license_files: link_name = join_path(pkg.prefix, filename) + license_dir = os.path.dirname(link_name) + if not os.path.exists(license_dir): + mkdirp(license_dir) if os.path.exists(target): os.symlink(target, link_name) tty.msg("Added local symlink %s to global license file" % diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py index 83d67ea225..cb0ad42b14 100644 --- a/lib/spack/spack/hooks/sbang.py +++ b/lib/spack/spack/hooks/sbang.py @@ -24,7 +24,6 @@ ############################################################################## import os -from llnl.util.filesystem import * import llnl.util.tty as tty import spack @@ -34,6 +33,7 @@ import spack.modules # here, as it is the shortest I could find on a modern OS. shebang_limit = 127 + def shebang_too_long(path): """Detects whether a file has a shebang line that is too long.""" with open(path, 'r') as script: @@ -57,16 +57,10 @@ def filter_shebang(path): if original.startswith(new_sbang_line): return - backup = path + ".shebang.bak" - os.rename(path, backup) - with open(path, 'w') as new_file: new_file.write(new_sbang_line) new_file.write(original) - copy_mode(backup, path) - unset_executable_mode(backup) - tty.warn("Patched overly long shebang in %s" % path) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 1a6c289bc7..98fd51b262 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -398,13 +398,19 @@ class Package(object): spack.repo.get(self.extendee_spec)._check_extendable() @property + def global_license_dir(self): + """Returns the directory where global license files for all + packages are stored.""" + spack_root = ancestor(__file__, 4) + return join_path(spack_root, 'etc', 'spack', 'licenses') + + @property def global_license_file(self): - """Returns the path where a global license file should be stored.""" + """Returns the path where a global license file for this + particular package should be stored.""" if not self.license_files: return - spack_root = ancestor(__file__, 4) - global_license_dir = join_path(spack_root, 'etc', 'spack', 'licenses') - return join_path(global_license_dir, self.name, + return join_path(self.global_license_dir, self.name, os.path.basename(self.license_files[0])) @property diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 97f142e746..fb91f24721 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -40,7 +40,7 @@ test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'pack 'cc', 'link_tree', 'spec_yaml', 'optional_deps', 'make_executable', 'configure_guess', 'lock', 'database', 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find', - 'cmd.uninstall', 'cmd.test_install'] + 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd'] def list_tests(): diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py new file mode 100644 index 0000000000..d89814154b --- /dev/null +++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py @@ -0,0 +1,81 @@ +import os +import shutil +from tempfile import mkdtemp + +from llnl.util.filesystem import set_executable, mkdirp + +import spack.spec +import spack.cmd.compiler +import spack.compilers +from spack.version import Version +from spack.test.mock_packages_test import * + +test_version = '4.5-spacktest' + +class MockArgs(object): + def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None): + self.add_paths = add_paths + self.scope = scope + self.compiler_spec = compiler_spec + self.all = all + + +def make_mock_compiler(): + """Make a directory containing a fake, but detectable compiler.""" + mock_compiler_dir = mkdtemp() + bin_dir = os.path.join(mock_compiler_dir, 'bin') + mkdirp(bin_dir) + + gcc_path = os.path.join(bin_dir, 'gcc') + gxx_path = os.path.join(bin_dir, 'g++') + gfortran_path = os.path.join(bin_dir, 'gfortran') + + with open(gcc_path, 'w') as f: + f.write("""\ +#!/bin/sh + +for arg in "$@"; do + if [ "$arg" = -dumpversion ]; then + echo '%s' + fi +done +""" % test_version) + + # Create some mock compilers in the temporary directory + set_executable(gcc_path) + shutil.copy(gcc_path, gxx_path) + shutil.copy(gcc_path, gfortran_path) + + return mock_compiler_dir + + +class CompilerCmdTest(MockPackagesTest): + """ Test compiler commands for add and remove """ + + + def test_compiler_remove(self): + args = MockArgs(all=True, compiler_spec='gcc@4.5.0') + spack.cmd.compiler.compiler_remove(args) + compilers = spack.compilers.all_compilers() + self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers) + + + def test_compiler_add(self): + # compilers available by default. + old_compilers = set(spack.compilers.all_compilers()) + + # add our new compiler and find again. + compiler_dir = make_mock_compiler() + + try: + args = MockArgs(add_paths=[compiler_dir]) + spack.cmd.compiler.compiler_find(args) + + # ensure new compiler is in there + new_compilers = set(spack.compilers.all_compilers()) + new_compiler = new_compilers - old_compilers + self.assertTrue(new_compiler) + self.assertTrue(new_compiler.pop().version == Version(test_version)) + + finally: + shutil.rmtree(compiler_dir, ignore_errors=True) diff --git a/lib/spack/spack/test/data/sourceme_first.sh b/lib/spack/spack/test/data/sourceme_first.sh new file mode 100644 index 0000000000..800f639ac8 --- /dev/null +++ b/lib/spack/spack/test/data/sourceme_first.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +export NEW_VAR='new' +export UNSET_ME='overridden' diff --git a/lib/spack/spack/test/data/sourceme_second.sh b/lib/spack/spack/test/data/sourceme_second.sh new file mode 100644 index 0000000000..9955a0e6d6 --- /dev/null +++ b/lib/spack/spack/test/data/sourceme_second.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +export PATH_LIST='/path/first:/path/second:/path/fourth' +unset EMPTY_PATH_LIST
\ No newline at end of file diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py index a0d959db2f..f3644cb0b7 100644 --- a/lib/spack/spack/test/environment.py +++ b/lib/spack/spack/test/environment.py @@ -24,16 +24,21 @@ ############################################################################## import unittest import os -import copy + +from spack import spack_root +from llnl.util.filesystem import join_path from spack.environment import EnvironmentModifications +from spack.environment import SetEnv, UnsetEnv +from spack.environment import RemovePath, PrependPath, AppendPath class EnvironmentTest(unittest.TestCase): + def setUp(self): os.environ['UNSET_ME'] = 'foo' os.environ['EMPTY_PATH_LIST'] = '' os.environ['PATH_LIST'] = '/path/second:/path/third' - os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' + os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g' # NOQA: ignore=E501 def tearDown(self): pass @@ -77,9 +82,18 @@ class EnvironmentTest(unittest.TestCase): env.remove_path('REMOVE_PATH_LIST', '/duplicate/') env.apply_modifications() - self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST']) - self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST']) - self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST']) + self.assertEqual( + '/path/first:/path/second:/path/third:/path/last', + os.environ['PATH_LIST'] + ) + self.assertEqual( + '/path/first:/path/middle:/path/last', + os.environ['EMPTY_PATH_LIST'] + ) + self.assertEqual( + '/path/first:/path/middle:/path/last', + os.environ['NEWLY_CREATED_PATH_LIST'] + ) self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST']) def test_extra_arguments(self): @@ -98,3 +112,41 @@ class EnvironmentTest(unittest.TestCase): self.assertEqual(len(copy_construct), 2) for x, y in zip(env, copy_construct): assert x is y + + def test_source_files(self): + datadir = join_path(spack_root, 'lib', 'spack', + 'spack', 'test', 'data') + files = [ + join_path(datadir, 'sourceme_first.sh'), + join_path(datadir, 'sourceme_second.sh') + ] + env = EnvironmentModifications.from_sourcing_files(*files) + modifications = env.group_by_name() + + self.assertEqual(len(modifications), 4) + # Set new variables + self.assertEqual(len(modifications['NEW_VAR']), 1) + self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv)) + self.assertEqual(modifications['NEW_VAR'][0].value, 'new') + # Unset variables + self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1) + self.assertTrue(isinstance( + modifications['EMPTY_PATH_LIST'][0], UnsetEnv)) + # Modified variables + self.assertEqual(len(modifications['UNSET_ME']), 1) + self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv)) + self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden') + + self.assertEqual(len(modifications['PATH_LIST']), 3) + self.assertTrue( + isinstance(modifications['PATH_LIST'][0], RemovePath) + ) + self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third') + self.assertTrue( + isinstance(modifications['PATH_LIST'][1], AppendPath) + ) + self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth') + self.assertTrue( + isinstance(modifications['PATH_LIST'][2], PrependPath) + ) + self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first') |