summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--etc/spack/modules.yaml10
-rw-r--r--lib/spack/spack/cmd/module.py17
-rw-r--r--lib/spack/spack/config.py297
-rw-r--r--lib/spack/spack/environment.py65
-rw-r--r--lib/spack/spack/modules.py478
-rw-r--r--lib/spack/spack/test/__init__.py68
-rw-r--r--lib/spack/spack/test/modules.py157
-rwxr-xr-xshare/spack/setup-env.sh6
8 files changed, 835 insertions, 263 deletions
diff --git a/etc/spack/modules.yaml b/etc/spack/modules.yaml
index aa2a2c3fe2..8f8f88e908 100644
--- a/etc/spack/modules.yaml
+++ b/etc/spack/modules.yaml
@@ -5,4 +5,14 @@
# although users can override these settings in their ~/.spack/modules.yaml.
# -------------------------------------------------------------------------
modules:
+ prefix_inspections: {
+ bin: ['PATH'],
+ man: ['MANPATH'],
+ lib: ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
+ lib64: ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
+ include: ['CPATH'],
+ lib/pkgconfig: ['PKGCONFIG'],
+ lib64/pkgconfig: ['PKGCONFIG'],
+ '': ['CMAKE_PREFIX_PATH']
+ }
enable: ['tcl', 'dotkit']
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index a67f5c0c13..cfe59c8d98 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -32,18 +32,21 @@ from llnl.util.filesystem import mkdirp
from spack.modules import module_types
from spack.util.string import *
-description ="Manipulate modules and dotkits."
+description = "Manipulate modules and dotkits."
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
- refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.')
+ sp.add_parser('refresh', help='Regenerate all module files.')
find_parser = sp.add_parser('find', help='Find module files for packages.')
- find_parser.add_argument(
- 'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]")
- find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.')
+ find_parser.add_argument('module_type',
+ help="Type of module to find file for. [" +
+ '|'.join(module_types) + "]")
+ find_parser.add_argument('spec',
+ nargs='+',
+ help='spec to find a module file for.')
def module_find(mtype, spec_array):
@@ -53,7 +56,8 @@ def module_find(mtype, spec_array):
should type to use that package's module.
"""
if mtype not in module_types:
- tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types)))
+ tty.die("Invalid module type: '%s'. Options are %s" %
+ (mtype, comma_or(module_types)))
specs = spack.cmd.parse_specs(spec_array)
if len(specs) > 1:
@@ -89,7 +93,6 @@ def module_refresh():
shutil.rmtree(cls.path, ignore_errors=False)
mkdirp(cls.path)
for spec in specs:
- tty.debug(" Writing file for %s" % spec)
cls(spec).write()
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 336d47cbb7..684a420b3b 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -117,27 +117,23 @@ Will make Spack take compilers *only* from the user configuration, and
the site configuration will be ignored.
"""
+import copy
import os
import re
import sys
-import copy
-import jsonschema
-from jsonschema import Draft4Validator, validators
-import yaml
-from yaml.error import MarkedYAMLError
-from ordereddict_backport import OrderedDict
+import jsonschema
import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp
-import copy
-
import spack
+import yaml
+from jsonschema import Draft4Validator, validators
+from llnl.util.filesystem import mkdirp
+from ordereddict_backport import OrderedDict
from spack.error import SpackError
+from yaml.error import MarkedYAMLError
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
-
-
"""Dict from section names -> schema for that section."""
section_schemas = {
'compilers': {
@@ -146,30 +142,36 @@ section_schemas = {
'type': 'object',
'additionalProperties': False,
'patternProperties': {
- 'compilers:?': { # optional colon for overriding site config.
+ 'compilers:?': { # optional colon for overriding site config.
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
- r'\w[\w-]*': { # architecture
+ r'\w[\w-]*': { # architecture
'type': 'object',
'additionalProperties': False,
'patternProperties': {
- r'\w[\w-]*@\w[\w-]*': { # compiler spec
+ r'\w[\w-]*@\w[\w-]*': { # compiler spec
'type': 'object',
'additionalProperties': False,
'required': ['cc', 'cxx', 'f77', 'fc'],
'properties': {
- 'cc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cxx': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'f77': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'fc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- },},},},},},},},
-
+ 'cc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cxx': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'f77': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'fc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
'mirrors': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema',
@@ -182,8 +184,12 @@ section_schemas = {
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
- 'type': 'string'},},},},},
-
+ 'type': 'string'
+ },
+ },
+ },
+ },
+ },
'repos': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack repository configuration file schema',
@@ -194,7 +200,11 @@ section_schemas = {
'type': 'array',
'default': [],
'items': {
- 'type': 'string'},},},},
+ 'type': 'string'
+ },
+ },
+ },
+ },
'packages': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema',
@@ -206,62 +216,179 @@ section_schemas = {
'default': {},
'additionalProperties': False,
'patternProperties': {
- r'\w[\w-]*': { # package name
+ r'\w[\w-]*': { # package name
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
'version': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'anyOf' : [ { 'type' : 'string' },
- { 'type' : 'number'}]}}, #version strings
+ 'type': 'array',
+ 'default': [],
+ 'items': {'anyOf': [{'type': 'string'},
+ {'type': 'number'}]}
+ }, # version strings
'compiler': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' } }, #compiler specs
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}
+ }, # compiler specs
'buildable': {
- 'type': 'boolean',
+ 'type': 'boolean',
'default': True,
- },
+ },
'providers': {
- 'type': 'object',
+ 'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' },},},},
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'},
+ },
+ },
+ },
'paths': {
- 'type' : 'object',
- 'default' : {},
+ 'type': 'object',
+ 'default': {},
}
- },},},},},},
+ },
+ },
+ },
+ },
+ },
+ },
'modules': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack module file configuration file schema',
'type': 'object',
'additionalProperties': False,
+ 'definitions': {
+ 'array_of_strings': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ },
+ 'dictionary_of_strings': {
+ 'type': 'object',
+ 'patternProperties': {
+ r'\w[\w-]*': { # key
+ 'type': 'string'
+ }
+ }
+ },
+ 'dependency_selection': {
+ 'type': 'string',
+ 'enum': ['none', 'direct', 'all']
+ },
+ 'module_file_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'filter': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'environment_blacklist': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ }
+ }
+ },
+ 'autoload': {'$ref': '#/definitions/dependency_selection'},
+ 'prerequisites':
+ {'$ref': '#/definitions/dependency_selection'},
+ 'conflict': {'$ref': '#/definitions/array_of_strings'},
+ 'environment': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'set':
+ {'$ref': '#/definitions/dictionary_of_strings'},
+ 'unset':
+ {'$ref': '#/definitions/array_of_strings'},
+ 'prepend_path':
+ {'$ref': '#/definitions/dictionary_of_strings'},
+ 'append_path':
+ {'$ref': '#/definitions/dictionary_of_strings'}
+ }
+ }
+ }
+ },
+ 'module_type_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'anyOf': [
+ {
+ 'properties': {
+ 'whitelist':
+ {'$ref': '#/definitions/array_of_strings'},
+ 'blacklist':
+ {'$ref': '#/definitions/array_of_strings'},
+ 'naming_scheme': {
+ 'type':
+ 'string' # Can we be more specific here?
+ }
+ }
+ },
+ {
+ 'patternProperties':
+ {r'\w[\w-]*':
+ {'$ref': '#/definitions/module_file_configuration'}}
+ }
+ ]
+ }
+ },
'patternProperties': {
r'modules:?': {
'type': 'object',
'default': {},
'additionalProperties': False,
'properties': {
+ 'prefix_inspections': {
+ 'type': 'object',
+ 'patternProperties': {
+ r'\w[\w-]*':
+ { # path to be inspected (relative to prefix)
+ '$ref': '#/definitions/array_of_strings'
+ }
+ }
+ },
'enable': {
'type': 'array',
'default': [],
'items': {
- 'type': 'string'
+ 'type': 'string',
+ 'enum': ['tcl', 'dotkit']
}
- }
+ },
+ 'tcl': {
+ 'allOf': [
+ {'$ref': '#/definitions/module_type_configuration'
+ }, # Base configuration
+ {} # Specific tcl extensions
+ ]
+ },
+ 'dotkit': {
+ 'allOf': [
+ {'$ref': '#/definitions/module_type_configuration'
+ }, # Base configuration
+ {} # Specific dotkit extensions
+ ]
+ },
}
},
},
},
}
-
"""OrderedDict of config scopes keyed by name.
Later scopes will override earlier scopes.
"""
@@ -271,12 +398,13 @@ config_scopes = OrderedDict()
def validate_section_name(section):
"""Raise a ValueError if the section is not a valid section."""
if section not in section_schemas:
- raise ValueError("Invalid config section: '%s'. Options are %s"
- % (section, section_schemas))
+ raise ValueError("Invalid config section: '%s'. Options are %s" %
+ (section, section_schemas))
def extend_with_default(validator_class):
- """Add support for the 'default' attribute for properties and patternProperties.
+ """Add support for the 'default' attribute for
+ properties and patternProperties
jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs
@@ -285,13 +413,15 @@ def extend_with_default(validator_class):
"""
validate_properties = validator_class.VALIDATORS["properties"]
- validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
+ validate_pattern_properties = validator_class.VALIDATORS[
+ "patternProperties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
- for err in validate_properties(validator, properties, instance, schema):
+ for err in validate_properties(validator, properties, instance,
+ schema):
yield err
def set_pp_defaults(validator, properties, instance, schema):
@@ -302,17 +432,19 @@ def extend_with_default(validator_class):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
- for err in validate_pattern_properties(validator, properties, instance, schema):
+ for err in validate_pattern_properties(validator, properties, instance,
+ schema):
yield err
return validators.extend(validator_class, {
- "properties" : set_defaults,
- "patternProperties" : set_pp_defaults
+ "properties": set_defaults,
+ "patternProperties": set_pp_defaults
})
DefaultSettingValidator = extend_with_default(Draft4Validator)
+
def validate_section(data, schema):
"""Validate data read in from a Spack YAML file.
@@ -334,9 +466,9 @@ class ConfigScope(object):
"""
def __init__(self, name, path):
- self.name = name # scope name.
- self.path = path # path to directory containing configs.
- self.sections = {} # sections read from config files.
+ self.name = name # scope name.
+ self.path = path # path to directory containing configs.
+ self.sections = {} # sections read from config files.
# Register in a dict of all ConfigScopes
# TODO: make this cleaner. Mocking up for testing is brittle.
@@ -347,16 +479,14 @@ class ConfigScope(object):
validate_section_name(section)
return os.path.join(self.path, "%s.yaml" % section)
-
def get_section(self, section):
- if not section in self.sections:
- path = self.get_section_filename(section)
+ if section not in self.sections:
+ path = self.get_section_filename(section)
schema = section_schemas[section]
- data = _read_config_file(path, schema)
+ data = _read_config_file(path, schema)
self.sections[section] = data
return self.sections[section]
-
def write_section(self, section):
filename = self.get_section_filename(section)
data = self.get_section(section)
@@ -368,8 +498,8 @@ class ConfigScope(object):
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e:
- raise ConfigFileError("Error writing to config file: '%s'" % str(e))
-
+ raise ConfigFileError("Error writing to config file: '%s'" %
+ str(e))
def clear(self):
"""Empty cached config information."""
@@ -401,8 +531,8 @@ def validate_scope(scope):
return config_scopes[scope]
else:
- raise ValueError("Invalid config scope: '%s'. Must be one of %s"
- % (scope, config_scopes.keys()))
+ raise ValueError("Invalid config scope: '%s'. Must be one of %s" %
+ (scope, config_scopes.keys()))
def _read_config_file(filename, schema):
@@ -413,7 +543,7 @@ def _read_config_file(filename, schema):
elif not os.path.isfile(filename):
raise ConfigFileError(
- "Invlaid configuration. %s exists but is not a file." % filename)
+ "Invalid configuration. %s exists but is not a file." % filename)
elif not os.access(filename, os.R_OK):
raise ConfigFileError("Config file is not readable: %s" % filename)
@@ -428,12 +558,12 @@ def _read_config_file(filename, schema):
return data
except MarkedYAMLError as e:
- raise ConfigFileError(
- "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
+ raise ConfigFileError("Error parsing yaml%s: %s" %
+ (str(e.context_mark), e.problem))
except IOError as e:
- raise ConfigFileError(
- "Error reading configuration file %s: %s" % (filename, str(e)))
+ raise ConfigFileError("Error reading configuration file %s: %s" %
+ (filename, str(e)))
def clear_config_caches():
@@ -456,6 +586,7 @@ def _merge_yaml(dest, source):
parent instead of merging.
"""
+
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
@@ -476,7 +607,7 @@ def _merge_yaml(dest, source):
# Source dict is merged into dest.
elif they_are(dict):
for sk, sv in source.iteritems():
- if not sk in dest:
+ if sk not in dest:
dest[sk] = copy.copy(sv)
else:
dest[sk] = _merge_yaml(dest[sk], source[sk])
@@ -558,7 +689,7 @@ def print_section(section):
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
- except (yaml.YAMLError, IOError) as e:
+ except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
@@ -588,15 +719,22 @@ def is_spec_buildable(spec):
"""Return true if the spec pkgspec is configured as buildable"""
allpkgs = get_config('packages')
name = spec.name
- if not spec.name in allpkgs:
+ if name not in allpkgs:
return True
- if not 'buildable' in allpkgs[spec.name]:
+ if 'buildable' not in allpkgs[name]:
return True
return allpkgs[spec.name]['buildable']
-class ConfigError(SpackError): pass
-class ConfigFileError(ConfigError): pass
+class ConfigError(SpackError):
+
+ pass
+
+
+class ConfigFileError(ConfigError):
+
+ pass
+
def get_path(path, data):
if path:
@@ -604,8 +742,10 @@ def get_path(path, data):
else:
return data
+
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
+
def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None)
@@ -638,5 +778,6 @@ class ConfigFormatError(ConfigError):
message = '%s: %s' % (location, validation_error.message)
super(ConfigError, self).__init__(message)
+
class ConfigSanityError(ConfigFormatError):
"""Same as ConfigFormatError, raised when config is written by Spack."""
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index 72aafa4e2d..9748b4033a 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -26,7 +26,8 @@ class SetEnv(NameValueModifier):
class UnsetEnv(NameModifier):
def execute(self):
- os.environ.pop(self.name, None) # Avoid throwing if the variable was not set
+ # Avoid throwing if the variable was not set
+ os.environ.pop(self.name, None)
class SetPath(NameValueModifier):
@@ -55,7 +56,9 @@ class RemovePath(NameValueModifier):
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(':') if environment_value else []
- directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)]
+ directories = [os.path.normpath(x)
+ for x in directories
+ if x != os.path.normpath(self.value)]
os.environ[self.name] = ':'.join(directories)
@@ -63,7 +66,8 @@ class EnvironmentModifications(object):
"""
Keeps track of requests to modify the current environment.
- Each call to a method to modify the environment stores the extra information on the caller in the request:
+ Each call to a method to modify the environment stores the extra
+ information on the caller in the request:
- 'filename' : filename of the module where the caller is defined
- 'lineno': line number where the request occurred
- 'context' : line of code that issued the request that failed
@@ -71,10 +75,10 @@ class EnvironmentModifications(object):
def __init__(self, other=None):
"""
- Initializes a new instance, copying commands from other if it is not None
+ Initializes a new instance, copying commands from other if not None
Args:
- other: another instance of EnvironmentModifications from which (optional)
+ other: another instance of EnvironmentModifications
"""
self.env_modifications = []
if other is not None:
@@ -93,7 +97,7 @@ class EnvironmentModifications(object):
@staticmethod
def _check_other(other):
if not isinstance(other, EnvironmentModifications):
- raise TypeError('other must be an instance of EnvironmentModifications')
+ raise TypeError('not an instance of EnvironmentModifications')
def _get_outside_caller_attributes(self):
stack = inspect.stack()
@@ -101,12 +105,10 @@ class EnvironmentModifications(object):
_, filename, lineno, _, context, index = stack[2]
context = context[index].strip()
except Exception:
- filename, lineno, context = 'unknown file', 'unknown line', 'unknown context'
- args = {
- 'filename': filename,
- 'lineno': lineno,
- 'context': context
- }
+ filename = 'unknown file'
+ lineno = 'unknown line'
+ context = 'unknown context'
+ args = {'filename': filename, 'lineno': lineno, 'context': context}
return args
def set(self, name, value, **kwargs):
@@ -170,7 +172,7 @@ class EnvironmentModifications(object):
def remove_path(self, name, path, **kwargs):
"""
- Stores in the current object a request to remove a path from a path list
+ Stores in the current object a request to remove a path from a list
Args:
name: name of the path list in the environment
@@ -185,7 +187,8 @@ class EnvironmentModifications(object):
Returns a dict of the modifications grouped by variable name
Returns:
- dict mapping the environment variable name to the modifications to be done on it
+ dict mapping the environment variable name to the modifications
+ to be done on it
"""
modifications = collections.defaultdict(list)
for item in self:
@@ -203,7 +206,8 @@ class EnvironmentModifications(object):
Applies the modifications and clears the list
"""
modifications = self.group_by_name()
- # Apply the modifications to the environment variables one variable at a time
+ # Apply the modifications to the environment variables one variable
+ # at a time
for name, actions in sorted(modifications.items()):
for x in actions:
x.execute()
@@ -224,13 +228,17 @@ def concatenate_paths(paths):
def set_or_unset_not_first(variable, changes, errstream):
"""
- Check if we are going to set or unset something after other modifications have already been requested
+ Check if we are going to set or unset something after other modifications
+ have already been requested
"""
- indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
+ indexes = [ii
+ for ii, item in enumerate(changes)
+ if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
if indexes:
good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}'
- errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable))
+ message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
+ errstream(message.format(var=variable))
for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good
errstream(print_format.format(**item.args))
@@ -238,8 +246,8 @@ def set_or_unset_not_first(variable, changes, errstream):
def validate(env, errstream):
"""
- Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for
- everything that was found
+ Validates the environment modifications to check for the presence of
+ suspicious patterns. Prompts a warning for everything that was found
Current checks:
- set or unset variables after other changes on the same variable
@@ -250,3 +258,20 @@ def validate(env, errstream):
modifications = env.group_by_name()
for variable, list_of_changes in sorted(modifications.items()):
set_or_unset_not_first(variable, list_of_changes, errstream)
+
+
+def filter_environment_blacklist(env, variables):
+ """
+ Generator that filters out any change to environment variables present in
+ the input list
+
+ Args:
+ env: list of environment modifications
+ variables: list of variable names to be filtered
+
+ Yields:
+ items in env if they are not in variables
+ """
+ for item in env:
+ if item.name not in variables:
+ yield item
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index 61624fbd70..0dc6f06f55 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -23,33 +23,34 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
-This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others.
+This module contains code for creating environment modules, which can include
+dotkits, tcl modules, lmod, and others.
-The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks.
-This class consolidates the logic for creating an abstract description of the information that module systems need.
-Currently that includes a number of directories to be appended to paths in the user's environment:
+The various types of modules are installed by post-install hooks and removed
+after an uninstall by post-uninstall hooks. This class consolidates the logic
+for creating an abstract description of the information that module systems
+need.
- * /bin directories to be appended to PATH
- * /lib* directories for LD_LIBRARY_PATH
- * /include directories for CPATH
- * /man* and /share/man* directories for MANPATH
- * the package prefix for CMAKE_PREFIX_PATH
+This module also includes logic for coming up with unique names for the module
+files so that they can be found by the various shell-support files in
+$SPACK/share/spack/setup-env.*.
-This module also includes logic for coming up with unique names for the module files so that they can be found by the
-various shell-support files in $SPACK/share/spack/setup-env.*.
-
-Each hook in hooks/ implements the logic for writing its specific type of module file.
+Each hook implements the logic for writing its specific type of module.
"""
+import copy
+import datetime
import os
import os.path
import re
-import shutil
+import string
import textwrap
import llnl.util.tty as tty
import spack
import spack.config
from llnl.util.filesystem import join_path, mkdirp
+from spack.build_environment import parent_class_modules
+from spack.build_environment import set_module_variables_for_package
from spack.environment import *
__all__ = ['EnvModule', 'Dotkit', 'TclModule']
@@ -61,56 +62,183 @@ CONFIGURATION = spack.config.get_config('modules')
def print_help():
- """For use by commands to tell user how to activate shell support."""
-
- tty.msg("This command requires spack's shell integration.",
- "",
+ """
+ For use by commands to tell user how to activate shell support.
+ """
+ tty.msg("This command requires spack's shell integration.", "",
"To initialize spack's shell commands, you must run one of",
"the commands below. Choose the right command for your shell.",
- "",
- "For bash and zsh:",
- " . %s/setup-env.sh" % spack.share_path,
- "",
- "For csh and tcsh:",
- " setenv SPACK_ROOT %s" % spack.prefix,
- " source %s/setup-env.csh" % spack.share_path,
- "")
+ "", "For bash and zsh:",
+ " . %s/setup-env.sh" % spack.share_path, "",
+ "For csh and tcsh:", " setenv SPACK_ROOT %s" % spack.prefix,
+ " source %s/setup-env.csh" % spack.share_path, "")
def inspect_path(prefix):
"""
- Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment
- accordingly when an item is found.
+ Inspects the prefix of an installation to search for common layouts.
+ Issues a request to modify the environment when an item is found.
Args:
prefix: prefix of the installation
Returns:
- instance of EnvironmentModifications containing the requested modifications
+ instance of EnvironmentModifications containing the requested
+ modifications
"""
env = EnvironmentModifications()
# Inspect the prefix to check for the existence of common directories
- prefix_inspections = {
- 'bin': ('PATH',),
- 'man': ('MANPATH',),
- 'lib': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
- 'lib64': ('LIBRARY_PATH', 'LD_LIBRARY_PATH'),
- 'include': ('CPATH',)
- }
- for attribute, variables in prefix_inspections.items():
- expected = getattr(prefix, attribute)
+ prefix_inspections = CONFIGURATION.get('prefix_inspections', {})
+ for relative_path, variables in prefix_inspections.items():
+ expected = join_path(prefix, relative_path)
if os.path.isdir(expected):
for variable in variables:
env.prepend_path(variable, expected)
- # PKGCONFIG
- for expected in (join_path(prefix.lib, 'pkgconfig'), join_path(prefix.lib64, 'pkgconfig')):
- if os.path.isdir(expected):
- env.prepend_path('PKG_CONFIG_PATH', expected)
- # CMake related variables
- env.prepend_path('CMAKE_PREFIX_PATH', prefix)
return env
+def dependencies(spec, request='all'):
+ """
+ Returns the list of dependent specs for a given spec, according to the
+ given request
+
+ Args:
+ spec: target spec
+ request: either 'none', 'direct' or 'all'
+
+ Returns:
+ empty list if 'none', direct dependency list if 'direct', all
+ dependencies if 'all'
+ """
+ if request not in ('none', 'direct', 'all'):
+ message = "Wrong value for argument 'request' : "
+ message += "should be one of ('none', 'direct', 'all')"
+ message += " [current value is '{0}']"
+ raise tty.error(message.format(request))
+
+ if request == 'none':
+ return []
+
+ if request == 'direct':
+ return [xx for _, xx in spec.dependencies.items()]
+
+ # FIXME : during module file creation nodes seem to be visited
+ # FIXME : multiple times even if cover='nodes' is given. This work around
+ # FIXME : permits to get a unique list of spec anyhow. Maybe we miss a
+ # FIXME : merge step among nodes that refer to the same package?
+ seen = set()
+ seen_add = seen.add
+ l = [xx
+ for xx in sorted(
+ spec.traverse(order='post',
+ depth=True,
+ cover='nodes',
+ root=False),
+ reverse=True)]
+ return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
+
+
+def update_dictionary_extending_lists(target, update):
+ for key in update:
+ value = target.get(key, None)
+ if isinstance(value, list):
+ target[key].extend(update[key])
+ elif isinstance(value, dict):
+ update_dictionary_extending_lists(target[key], update[key])
+ else:
+ target[key] = update[key]
+
+
+def parse_config_options(module_generator):
+ """
+ Parse the configuration file and returns a bunch of items that will be
+ needed during module file generation
+
+ Args:
+ module_generator: module generator for a given spec
+
+ Returns:
+ autoloads: list of specs to be autoloaded
+ prerequisites: list of specs to be marked as prerequisite
+ filters: list of environment variables whose modification is
+ blacklisted in module files
+ env: list of custom environment modifications to be applied in the
+ module file
+ """
+ # Get the configuration for this kind of generator
+ module_configuration = copy.deepcopy(CONFIGURATION.get(
+ module_generator.name, {}))
+
+ #####
+ # Merge all the rules
+ #####
+ module_file_actions = module_configuration.pop('all', {})
+ for spec, conf in module_configuration.items():
+ override = False
+ if spec.endswith(':'):
+ spec = spec.strip(':')
+ override = True
+ if module_generator.spec.satisfies(spec):
+ if override:
+ module_file_actions = {}
+ update_dictionary_extending_lists(module_file_actions, conf)
+
+ #####
+ # Process the common rules
+ #####
+
+ # Automatic loading loads
+ module_file_actions['autoload'] = dependencies(
+ module_generator.spec, module_file_actions.get('autoload', 'none'))
+ # Prerequisites
+ module_file_actions['prerequisites'] = dependencies(
+ module_generator.spec, module_file_actions.get('prerequisites',
+ 'none'))
+ # Environment modifications
+ environment_actions = module_file_actions.pop('environment', {})
+ env = EnvironmentModifications()
+
+ def process_arglist(arglist):
+ if method == 'unset':
+ for x in arglist:
+ yield (x, )
+ else:
+ for x in arglist.iteritems():
+ yield x
+
+ for method, arglist in environment_actions.items():
+ for args in process_arglist(arglist):
+ getattr(env, method)(*args)
+
+ # for item in arglist:
+ # if method == 'unset':
+ # args = [item]
+ # else:
+ # args = item.split(',')
+ # getattr(env, method)(*args)
+
+ return module_file_actions, env
+
+
+def filter_blacklisted(specs, module_name):
+ """
+ Given a sequence of specs, filters the ones that are blacklisted in the
+ module configuration file.
+
+ Args:
+ specs: sequence of spec instances
+ module_name: type of module file objects
+
+ Yields:
+ non blacklisted specs
+ """
+ for x in specs:
+ if module_types[module_name](x).blacklisted:
+ tty.debug('\tFILTER : %s' % x)
+ continue
+ yield x
+
+
class EnvModule(object):
name = 'env_module'
formats = {}
@@ -118,7 +246,8 @@ class EnvModule(object):
class __metaclass__(type):
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
- if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']:
+ if cls.name != 'env_module' and cls.name in CONFIGURATION[
+ 'enable']:
module_types[cls.name] = cls
def __init__(self, spec=None):
@@ -134,8 +263,41 @@ class EnvModule(object):
# long description is the docstring with reduced whitespace.
self.long_description = None
if self.spec.package.__doc__:
- self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__)
+ self.long_description = re.sub(r'\s+', ' ',
+ self.spec.package.__doc__)
+
+ @property
+ def naming_scheme(self):
+ try:
+ naming_scheme = CONFIGURATION[self.name]['naming_scheme']
+ except KeyError:
+ naming_scheme = self.default_naming_format
+ return naming_scheme
+
+ @property
+ def tokens(self):
+ tokens = {
+ 'name': self.spec.name,
+ 'version': self.spec.version,
+ 'compiler': self.spec.compiler
+ }
+ return tokens
+ @property
+ def use_name(self):
+ """
+ Subclasses should implement this to return the name the module command
+ uses to refer to the package.
+ """
+ naming_tokens = self.tokens
+ naming_scheme = self.naming_scheme
+ name = naming_scheme.format(**naming_tokens)
+ name += '-' + self.spec.dag_hash(
+ ) # Always append the hash to make the module file unique
+ # Not everybody is working on linux...
+ parts = name.split('/')
+ name = join_path(*parts)
+ return name
@property
def category(self):
@@ -144,13 +306,51 @@ class EnvModule(object):
return self.pkg.category
# Extensions
for extendee in self.pkg.extendees:
- return '{extendee} extension'.format(extendee=extendee)
+ return '{extendee}_extension'.format(extendee=extendee)
# Not very descriptive fallback
- return 'spack installed package'
+ return 'spack'
+ @property
+ def blacklisted(self):
+ configuration = CONFIGURATION.get(self.name, {})
+ whitelist_matches = [x
+ for x in configuration.get('whitelist', [])
+ if self.spec.satisfies(x)]
+ blacklist_matches = [x
+ for x in configuration.get('blacklist', [])
+ if self.spec.satisfies(x)]
+ if whitelist_matches:
+ message = '\tWHITELIST : %s [matches : ' % self.spec.cshort_spec
+ for rule in whitelist_matches:
+ message += '%s ' % rule
+ message += ' ]'
+ tty.debug(message)
+
+ if blacklist_matches:
+ message = '\tBLACKLIST : %s [matches : ' % self.spec.cshort_spec
+ for rule in blacklist_matches:
+ message += '%s ' % rule
+ message += ' ]'
+ tty.debug(message)
+
+ if not whitelist_matches and blacklist_matches:
+ return True
+
+ return False
def write(self):
- """Write out a module file for this object."""
+ """
+ Writes out a module file for this object.
+
+ This method employs a template pattern and expects derived classes to:
+ - override the header property
+ - provide formats for autoload, prerequisites and environment changes
+ """
+ if self.blacklisted:
+ return
+ tty.debug("\tWRITE : %s [%s]" %
+ (self.spec.cshort_spec, self.file_name))
+
module_dir = os.path.dirname(self.file_name)
if not os.path.exists(module_dir):
mkdirp(module_dir)
@@ -159,42 +359,73 @@ class EnvModule(object):
# installation prefix
env = inspect_path(self.spec.prefix)
- # Let the extendee modify their extensions before asking for
- # package-specific modifications
+ # Let the extendee/dependency modify their extensions/dependencies
+ # before asking for package-specific modifications
spack_env = EnvironmentModifications()
- for item in self.pkg.extendees:
- try:
- package = self.spec[item].package
- package.setup_dependent_package(self.pkg.module, self.spec)
- package.setup_dependent_environment(spack_env, env, self.spec)
- except:
- # The extends was conditional, so it doesn't count here
- # eg: extends('python', when='+python')
- pass
+ # TODO : the code down below is quite similar to
+ # TODO : build_environment.setup_package and needs to be factored out
+ # TODO : to a single place
+ for item in dependencies(self.spec, 'all'):
+ package = self.spec[item.name].package
+ modules = parent_class_modules(package.__class__)
+ for mod in modules:
+ set_module_variables_for_package(package, mod)
+ set_module_variables_for_package(package, package.module)
+ package.setup_dependent_package(self.pkg.module, self.spec)
+ package.setup_dependent_environment(spack_env, env, self.spec)
# Package-specific environment modifications
+ set_module_variables_for_package(self.pkg, self.pkg.module)
self.spec.package.setup_environment(spack_env, env)
- # TODO : implement site-specific modifications and filters
- if not env:
- return
-
+ # Parse configuration file
+ module_configuration, conf_env = parse_config_options(self)
+ env.extend(conf_env)
+ filters = module_configuration.get('filter', {}).get(
+ 'environment_blacklist', {})
+ # Build up the module file content
+ module_file_content = self.header
+ for x in filter_blacklisted(
+ module_configuration.pop('autoload', []), self.name):
+ module_file_content += self.autoload(x)
+ for x in filter_blacklisted(
+ module_configuration.pop('prerequisites', []), self.name):
+ module_file_content += self.prerequisite(x)
+ for line in self.process_environment_command(
+ filter_environment_blacklist(env, filters)):
+ module_file_content += line
+ for line in self.module_specific_content(module_configuration):
+ module_file_content += line
+
+ # Dump to file
with open(self.file_name, 'w') as f:
- self.write_header(f)
- for line in self.process_environment_command(env):
- f.write(line)
+ f.write(module_file_content)
- def write_header(self, stream):
+ @property
+ def header(self):
raise NotImplementedError()
+ def module_specific_content(self, configuration):
+ return tuple()
+
+ def autoload(self, spec):
+ m = type(self)(spec)
+ return self.autoload_format.format(module_file=m.use_name)
+
+ def prerequisite(self, spec):
+ m = type(self)(spec)
+ return self.prerequisite_format.format(module_file=m.use_name)
+
def process_environment_command(self, env):
for command in env:
try:
- yield self.formats[type(command)].format(**command.args)
+ yield self.environment_modifications_formats[type(
+ command)].format(**command.args)
except KeyError:
- tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command)))
- tty.warn('{context} at {filename}:{lineno}'.format(**command.args))
-
+ message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501
+ tty.warn(message.format(command=type(command)))
+ context = '{context} at {filename}:{lineno}'
+ tty.warn(context.format(**command.args))
@property
def file_name(self):
@@ -202,62 +433,65 @@ class EnvModule(object):
where this module lives."""
raise NotImplementedError()
- @property
- def use_name(self):
- """Subclasses should implement this to return the name the
- module command uses to refer to the package."""
- raise NotImplementedError()
-
def remove(self):
mod_file = self.file_name
if os.path.exists(mod_file):
try:
os.remove(mod_file) # Remove the module file
- os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up
+ os.removedirs(
+ os.path.dirname(mod_file)
+ ) # Remove all the empty directories from the leaf up
except OSError:
- pass # removedirs throws OSError on first non-empty directory found
+ # removedirs throws OSError on first non-empty directory found
+ pass
class Dotkit(EnvModule):
name = 'dotkit'
path = join_path(spack.share_path, "dotkit")
- formats = {
+ environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
}
+ autoload_format = 'dk_op {module_file}\n'
+
+ default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+
@property
def file_name(self):
- return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name)
+ return join_path(Dotkit.path, self.spec.architecture,
+ '%s.dk' % self.use_name)
@property
- def use_name(self):
- return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
- self.spec.compiler.name,
- self.spec.compiler.version,
- self.spec.dag_hash())
-
- def write_header(self, dk_file):
+ def header(self):
# Category
+ header = ''
if self.category:
- dk_file.write('#c %s\n' % self.category)
+ header += '#c %s\n' % self.category
# Short description
if self.short_description:
- dk_file.write('#d %s\n' % self.short_description)
+ header += '#d %s\n' % self.short_description
# Long description
if self.long_description:
for line in textwrap.wrap(self.long_description, 72):
- dk_file.write("#h %s\n" % line)
+ header += '#h %s\n' % line
+ return header
+
+ def prerequisite(self, spec):
+ tty.warn('prerequisites: not supported by dotkit module files')
+ tty.warn('\tYou may want to check ~/.spack/modules.yaml')
+ return ''
class TclModule(EnvModule):
name = 'tcl'
path = join_path(spack.share_path, "modules")
- formats = {
+ environment_modifications_formats = {
PrependPath: 'prepend-path {name} \"{value}\"\n',
AppendPath: 'append-path {name} \"{value}\"\n',
RemovePath: 'remove-path {name} \"{value}\"\n',
@@ -265,28 +499,62 @@ class TclModule(EnvModule):
UnsetEnv: 'unsetenv {name}\n'
}
+ autoload_format = ('if ![ is-loaded {module_file} ] {{\n'
+ ' puts stderr "Autoloading {module_file}"\n'
+ ' module load {module_file}\n'
+ '}}\n\n')
+
+ prerequisite_format = 'prereq {module_file}\n'
+
+ default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+
@property
def file_name(self):
return join_path(TclModule.path, self.spec.architecture, self.use_name)
@property
- def use_name(self):
- return "%s-%s-%s-%s-%s" % (self.spec.name, self.spec.version,
- self.spec.compiler.name,
- self.spec.compiler.version,
- self.spec.dag_hash())
-
- def write_header(self, module_file):
+ def header(self):
+ timestamp = datetime.datetime.now()
# TCL Modulefile header
- module_file.write('#%Module1.0\n')
+ header = '#%Module1.0\n'
+ header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
+ header += '##\n'
+ header += '## %s\n' % self.spec.short_spec
+ header += '##\n'
+
# TODO : category ?
# Short description
if self.short_description:
- module_file.write('module-whatis \"%s\"\n\n' % self.short_description)
+ header += 'module-whatis \"%s\"\n\n' % self.short_description
# Long description
if self.long_description:
- module_file.write('proc ModulesHelp { } {\n')
+ header += 'proc ModulesHelp { } {\n'
for line in textwrap.wrap(self.long_description, 72):
- module_file.write("puts stderr \"%s\"\n" % line)
- module_file.write('}\n\n')
+ header += 'puts stderr "%s"\n' % line
+ header += '}\n\n'
+ return header
+
+ def module_specific_content(self, configuration):
+ naming_tokens = self.tokens
+ # Conflict
+ conflict_format = configuration.get('conflict', [])
+ f = string.Formatter()
+ for item in conflict_format:
+ line = 'conflict ' + item + '\n'
+ if len([x for x in f.parse(line)
+ ]) > 1: # We do have placeholder to substitute
+ for naming_dir, conflict_dir in zip(
+ self.naming_scheme.split('/'), item.split('/')):
+ if naming_dir != conflict_dir:
+ message = 'conflict scheme does not match naming'
+ message += ' [{spec}]\n\n'
+ message += 'naming scheme : "{nformat}"\n'
+ message += 'conflict scheme : "{cformat}"\n\n'
+ message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501
+ tty.error(message.format(spec=self.spec,
+ nformat=self.naming_scheme,
+ cformat=item))
+ raise SystemExit('Module generation aborted.')
+ line = line.format(**naming_tokens)
+ yield line
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 3c5edde66b..395ca0c87a 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -23,52 +23,23 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-import unittest
-import nose
-from spack.test.tally_plugin import Tally
-from llnl.util.filesystem import join_path
import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
-
+import nose
import spack
-
+from llnl.util.filesystem import join_path
+from llnl.util.tty.colify import colify
+from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite"""
-test_names = ['versions',
- 'url_parse',
- 'url_substitution',
- 'packages',
- 'stage',
- 'spec_syntax',
- 'spec_semantics',
- 'spec_dag',
- 'concretize',
- 'multimethod',
- 'install',
- 'package_sanity',
- 'config',
- 'directory_layout',
- 'pattern',
- 'python_version',
- 'git_fetch',
- 'svn_fetch',
- 'hg_fetch',
- 'mirror',
- 'url_extrapolate',
- 'cc',
- 'link_tree',
- 'spec_yaml',
- 'optional_deps',
- 'make_executable',
- 'configure_guess',
- 'lock',
- 'database',
- 'namespace_trie',
- 'yaml',
- 'sbang',
- 'environment',
- 'cmd.uninstall',
- 'cmd.test_install']
+test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
+ 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
+ 'multimethod', 'install', 'package_sanity', 'config',
+ 'directory_layout', 'pattern', 'python_version', 'git_fetch',
+ 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
+ 'cc', 'link_tree', 'spec_yaml', 'optional_deps',
+ 'make_executable', 'configure_guess', 'lock', 'database',
+ 'namespace_trie', 'yaml', 'sbang', 'environment',
+ 'cmd.uninstall', 'cmd.test_install']
def list_tests():
@@ -79,7 +50,6 @@ def list_tests():
def run(names, outputDir, verbose=False):
"""Run tests with the supplied names. Names should be a list. If
it's empty, run ALL of Spack's tests."""
- verbosity = 1 if not verbose else 2
if not names:
names = test_names
@@ -94,7 +64,7 @@ def run(names, outputDir, verbose=False):
tally = Tally()
for test in names:
module = 'spack.test.' + test
- print module
+ print(module)
tty.msg("Running test: %s" % test)
@@ -104,15 +74,13 @@ def run(names, outputDir, verbose=False):
xmlOutputFname = "unittests-{0}.xml".format(test)
xmlOutputPath = join_path(outputDir, xmlOutputFname)
runOpts += ["--with-xunit",
- "--xunit-file={0}".format(xmlOutputPath)]
+ "--xunit-file={0}".format(xmlOutputPath)]
argv = [""] + runOpts + [module]
- result = nose.run(argv=argv, addplugins=[tally])
+ nose.run(argv=argv, addplugins=[tally])
succeeded = not tally.failCount and not tally.errorCount
- tty.msg("Tests Complete.",
- "%5d tests run" % tally.numberOfTestsRun,
- "%5d failures" % tally.failCount,
- "%5d errors" % tally.errorCount)
+ tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun,
+ "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount)
if succeeded:
tty.info("OK", format='g')
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
new file mode 100644
index 0000000000..f0ff778a10
--- /dev/null
+++ b/lib/spack/spack/test/modules.py
@@ -0,0 +1,157 @@
+import collections
+from contextlib import contextmanager
+
+import StringIO
+import spack.modules
+from spack.test.mock_packages_test import MockPackagesTest
+
+FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
+
+# Monkey-patch open to write module files to a StringIO instance
+@contextmanager
+def mock_open(filename, mode):
+ if not mode == 'w':
+ message = 'test.modules : unexpected opening mode [mock_open]'
+ raise RuntimeError(message)
+
+ FILE_REGISTRY[filename] = StringIO.StringIO()
+
+ try:
+ yield FILE_REGISTRY[filename]
+ finally:
+ handle = FILE_REGISTRY[filename]
+ FILE_REGISTRY[filename] = handle.getvalue()
+ handle.close()
+
+
+configuration_autoload_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+}
+
+configuration_autoload_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'all'
+ }
+ }
+}
+
+configuration_alter_environment = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
+ },
+ '=x86-linux': {
+ 'environment': {'set': {'FOO': 'foo'},
+ 'unset': ['BAR']}
+ }
+ }
+}
+
+configuration_blacklist = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'blacklist': ['callpath'],
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+}
+
+configuration_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '{name}/{version}-{compiler.name}',
+ 'all': {
+ 'conflict': ['{name}', 'intel/14.0.1']
+ }
+ }
+}
+
+
+class TclTests(MockPackagesTest):
+ def setUp(self):
+ super(TclTests, self).setUp()
+ self.configuration_obj = spack.modules.CONFIGURATION
+ spack.modules.open = mock_open
+ # Make sure that a non-mocked configuration will trigger an error
+ spack.modules.CONFIGURATION = None
+
+ def tearDown(self):
+ del spack.modules.open
+ spack.modules.CONFIGURATION = self.configuration_obj
+ super(TclTests, self).tearDown()
+
+ def get_modulefile_content(self, spec):
+ spec.concretize()
+ generator = spack.modules.TclModule(spec)
+ generator.write()
+ content = FILE_REGISTRY[generator.file_name].split('\n')
+ return content
+
+ def test_simple_case(self):
+ spack.modules.CONFIGURATION = configuration_autoload_direct
+ spec = spack.spec.Spec('mpich@3.0.4=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertTrue('module-whatis "mpich @3.0.4"' in content)
+
+ def test_autoload(self):
+ spack.modules.CONFIGURATION = configuration_autoload_direct
+ spec = spack.spec.Spec('mpileaks=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
+
+ spack.modules.CONFIGURATION = configuration_autoload_all
+ spec = spack.spec.Spec('mpileaks=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
+
+ def test_alter_environment(self):
+ spack.modules.CONFIGURATION = configuration_alter_environment
+ spec = spack.spec.Spec('mpileaks=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x
+ for x in content
+ if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
+ self.assertEqual(
+ len([x for x in content if 'setenv FOO "foo"' in x]), 1)
+ self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
+
+ spec = spack.spec.Spec('libdwarf=x64-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x
+ for x in content
+ if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
+ self.assertEqual(
+ len([x for x in content if 'setenv FOO "foo"' in x]), 0)
+ self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0)
+
+ def test_blacklist(self):
+ spack.modules.CONFIGURATION = configuration_blacklist
+ spec = spack.spec.Spec('mpileaks=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
+
+ def test_conflicts(self):
+ spack.modules.CONFIGURATION = configuration_conflicts
+ spec = spack.spec.Spec('mpileaks=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(
+ len([x for x in content if x.startswith('conflict')]), 2)
+ self.assertEqual(
+ len([x for x in content if x == 'conflict mpileaks']), 1)
+ self.assertEqual(
+ len([x for x in content if x == 'conflict intel/14.0.1']), 1)
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index 11a4c0a70c..dba6f1eff4 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -41,7 +41,7 @@
# commands. This allows the user to use packages without knowing all
# their installation details.
#
-# e.g., rather than requring a full spec for libelf, the user can type:
+# e.g., rather than requiring a full spec for libelf, the user can type:
#
# spack use libelf
#
@@ -113,11 +113,11 @@ function spack {
unuse $_sp_module_args $_sp_full_spec
fi ;;
"load")
- if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
module load $_sp_module_args $_sp_full_spec
fi ;;
"unload")
- if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
module unload $_sp_module_args $_sp_full_spec
fi ;;
esac