diff options
author | alalazo <massimiliano.culpo@googlemail.com> | 2016-05-11 16:09:47 +0200 |
---|---|---|
committer | alalazo <massimiliano.culpo@googlemail.com> | 2016-05-11 16:09:47 +0200 |
commit | 22bb0562fea525afb329d5710970785189d3af63 (patch) | |
tree | 8e5152373ce2af0955851e1beec0a9e354d0a981 | |
parent | b215b19cae1eaf665dfbec4e20c36497c57dd182 (diff) | |
download | spack-22bb0562fea525afb329d5710970785189d3af63.tar.gz spack-22bb0562fea525afb329d5710970785189d3af63.tar.bz2 spack-22bb0562fea525afb329d5710970785189d3af63.tar.xz spack-22bb0562fea525afb329d5710970785189d3af63.zip |
Revert "flake8 : fixed all issues?"
This reverts commit 71e49e289a849b8aaa4f0d9a195d07569051ca88.
-rw-r--r-- | lib/spack/spack/cmd/module.py | 16 | ||||
-rw-r--r-- | lib/spack/spack/config.py | 204 | ||||
-rw-r--r-- | lib/spack/spack/environment.py | 51 | ||||
-rw-r--r-- | lib/spack/spack/modules.py | 202 | ||||
-rw-r--r-- | lib/spack/spack/test/__init__.py | 69 | ||||
-rw-r--r-- | lib/spack/spack/test/modules.py | 42 |
6 files changed, 254 insertions, 330 deletions
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py index cfe59c8d98..f996f4eb84 100644 --- a/lib/spack/spack/cmd/module.py +++ b/lib/spack/spack/cmd/module.py @@ -32,21 +32,18 @@ from llnl.util.filesystem import mkdirp from spack.modules import module_types from spack.util.string import * -description = "Manipulate modules and dotkits." +description ="Manipulate modules and dotkits." def setup_parser(subparser): sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') - sp.add_parser('refresh', help='Regenerate all module files.') + refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.') find_parser = sp.add_parser('find', help='Find module files for packages.') - find_parser.add_argument('module_type', - help="Type of module to find file for. [" + - '|'.join(module_types) + "]") - find_parser.add_argument('spec', - nargs='+', - help='spec to find a module file for.') + find_parser.add_argument( + 'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]") + find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.') def module_find(mtype, spec_array): @@ -56,8 +53,7 @@ def module_find(mtype, spec_array): should type to use that package's module. """ if mtype not in module_types: - tty.die("Invalid module type: '%s'. Options are %s" % - (mtype, comma_or(module_types))) + tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types))) specs = spack.cmd.parse_specs(spec_array) if len(specs) > 1: diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py index 684a420b3b..6ddf07776b 100644 --- a/lib/spack/spack/config.py +++ b/lib/spack/spack/config.py @@ -134,6 +134,8 @@ from yaml.error import MarkedYAMLError # Hacked yaml for configuration files preserves line numbers. import spack.util.spack_yaml as syaml + + """Dict from section names -> schema for that section.""" section_schemas = { 'compilers': { @@ -147,31 +149,25 @@ section_schemas = { 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # architecture + r'\w[\w-]*': { # architecture 'type': 'object', 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*@\w[\w-]*': { # compiler spec + r'\w[\w-]*@\w[\w-]*': { # compiler spec 'type': 'object', 'additionalProperties': False, 'required': ['cc', 'cxx', 'f77', 'fc'], 'properties': { - 'cc': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'cxx': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'f77': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - 'fc': {'anyOf': [{'type': 'string'}, - {'type': 'null'}]}, - }, - }, - }, - }, - }, - }, - }, - }, + 'cc': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'cxx': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'f77': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + 'fc': { 'anyOf': [ {'type' : 'string' }, + {'type' : 'null' }]}, + },},},},},},},}, + 'mirrors': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack mirror configuration file schema', @@ -184,12 +180,8 @@ section_schemas = { 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type': 'string' - }, - }, - }, - }, - }, + 'type': 'string'},},},},}, + 'repos': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack repository configuration file schema', @@ -200,11 +192,8 @@ section_schemas = { 'type': 'array', 'default': [], 'items': { - 'type': 'string' - }, - }, - }, - }, + 'type': 'string'},},},}, + 'packages': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack package configuration file schema', @@ -216,48 +205,39 @@ section_schemas = { 'default': {}, 'additionalProperties': False, 'patternProperties': { - r'\w[\w-]*': { # package name + r'\w[\w-]*': { # package name 'type': 'object', 'default': {}, 'additionalProperties': False, 'properties': { 'version': { - 'type': 'array', - 'default': [], - 'items': {'anyOf': [{'type': 'string'}, - {'type': 'number'}]} - }, # version strings + 'type' : 'array', + 'default' : [], + 'items' : { 'anyOf' : [ { 'type' : 'string' }, + { 'type' : 'number'}]}}, #version strings 'compiler': { - 'type': 'array', - 'default': [], - 'items': {'type': 'string'} - }, # compiler specs + 'type' : 'array', + 'default' : [], + 'items' : { 'type' : 'string' } }, #compiler specs 'buildable': { - 'type': 'boolean', + 'type': 'boolean', 'default': True, - }, + }, 'providers': { - 'type': 'object', + 'type': 'object', 'default': {}, 'additionalProperties': False, 'patternProperties': { r'\w[\w-]*': { - 'type': 'array', - 'default': [], - 'items': {'type': 'string'}, - }, - }, - }, + 'type' : 'array', + 'default' : [], + 'items' : { 'type' : 'string' },},},}, 'paths': { - 'type': 'object', - 'default': {}, + 'type' : 'object', + 'default' : {}, } - }, - }, - }, - }, - }, - }, + },},},},},}, + 'modules': { '$schema': 'http://json-schema.org/schema#', 'title': 'Spack module file configuration file schema', @@ -303,22 +283,17 @@ section_schemas = { } }, 'autoload': {'$ref': '#/definitions/dependency_selection'}, - 'prerequisites': - {'$ref': '#/definitions/dependency_selection'}, + 'prerequisites': {'$ref': '#/definitions/dependency_selection'}, 'conflict': {'$ref': '#/definitions/array_of_strings'}, 'environment': { 'type': 'object', 'default': {}, 'additionalProperties': False, 'properties': { - 'set': - {'$ref': '#/definitions/dictionary_of_strings'}, - 'unset': - {'$ref': '#/definitions/array_of_strings'}, - 'prepend_path': - {'$ref': '#/definitions/dictionary_of_strings'}, - 'append_path': - {'$ref': '#/definitions/dictionary_of_strings'} + 'set': {'$ref': '#/definitions/dictionary_of_strings'}, + 'unset': {'$ref': '#/definitions/array_of_strings'}, + 'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'}, + 'append_path': {'$ref': '#/definitions/dictionary_of_strings'} } } } @@ -329,20 +304,15 @@ section_schemas = { 'anyOf': [ { 'properties': { - 'whitelist': - {'$ref': '#/definitions/array_of_strings'}, - 'blacklist': - {'$ref': '#/definitions/array_of_strings'}, + 'whitelist': {'$ref': '#/definitions/array_of_strings'}, + 'blacklist': {'$ref': '#/definitions/array_of_strings'}, 'naming_scheme': { - 'type': - 'string' # Can we be more specific here? + 'type': 'string' # Can we be more specific here? } } }, { - 'patternProperties': - {r'\w[\w-]*': - {'$ref': '#/definitions/module_file_configuration'}} + 'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}} } ] } @@ -356,8 +326,7 @@ section_schemas = { 'prefix_inspections': { 'type': 'object', 'patternProperties': { - r'\w[\w-]*': - { # path to be inspected (relative to prefix) + r'\w[\w-]*': { # path to be inspected for existence (relative to prefix) '$ref': '#/definitions/array_of_strings' } } @@ -372,15 +341,13 @@ section_schemas = { }, 'tcl': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration' - }, # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, # Base configuration {} # Specific tcl extensions ] }, 'dotkit': { 'allOf': [ - {'$ref': '#/definitions/module_type_configuration' - }, # Base configuration + {'$ref': '#/definitions/module_type_configuration'}, # Base configuration {} # Specific dotkit extensions ] }, @@ -389,6 +356,7 @@ section_schemas = { }, }, } + """OrderedDict of config scopes keyed by name. Later scopes will override earlier scopes. """ @@ -398,13 +366,12 @@ config_scopes = OrderedDict() def validate_section_name(section): """Raise a ValueError if the section is not a valid section.""" if section not in section_schemas: - raise ValueError("Invalid config section: '%s'. Options are %s" % - (section, section_schemas)) + raise ValueError("Invalid config section: '%s'. Options are %s" + % (section, section_schemas)) def extend_with_default(validator_class): - """Add support for the 'default' attribute for - properties and patternProperties + """Add support for the 'default' attribute for properties and patternProperties. jsonschema does not handle this out of the box -- it only validates. This allows us to set default values for configs @@ -413,15 +380,13 @@ def extend_with_default(validator_class): """ validate_properties = validator_class.VALIDATORS["properties"] - validate_pattern_properties = validator_class.VALIDATORS[ - "patternProperties"] + validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] def set_defaults(validator, properties, instance, schema): for property, subschema in properties.iteritems(): if "default" in subschema: instance.setdefault(property, subschema["default"]) - for err in validate_properties(validator, properties, instance, - schema): + for err in validate_properties(validator, properties, instance, schema): yield err def set_pp_defaults(validator, properties, instance, schema): @@ -432,19 +397,17 @@ def extend_with_default(validator_class): if re.match(property, key) and val is None: instance[key] = subschema["default"] - for err in validate_pattern_properties(validator, properties, instance, - schema): + for err in validate_pattern_properties(validator, properties, instance, schema): yield err return validators.extend(validator_class, { - "properties": set_defaults, - "patternProperties": set_pp_defaults + "properties" : set_defaults, + "patternProperties" : set_pp_defaults }) DefaultSettingValidator = extend_with_default(Draft4Validator) - def validate_section(data, schema): """Validate data read in from a Spack YAML file. @@ -466,9 +429,9 @@ class ConfigScope(object): """ def __init__(self, name, path): - self.name = name # scope name. - self.path = path # path to directory containing configs. - self.sections = {} # sections read from config files. + self.name = name # scope name. + self.path = path # path to directory containing configs. + self.sections = {} # sections read from config files. # Register in a dict of all ConfigScopes # TODO: make this cleaner. Mocking up for testing is brittle. @@ -479,14 +442,16 @@ class ConfigScope(object): validate_section_name(section) return os.path.join(self.path, "%s.yaml" % section) + def get_section(self, section): - if section not in self.sections: - path = self.get_section_filename(section) + if not section in self.sections: + path = self.get_section_filename(section) schema = section_schemas[section] - data = _read_config_file(path, schema) + data = _read_config_file(path, schema) self.sections[section] = data return self.sections[section] + def write_section(self, section): filename = self.get_section_filename(section) data = self.get_section(section) @@ -498,8 +463,8 @@ class ConfigScope(object): except jsonschema.ValidationError as e: raise ConfigSanityError(e, data) except (yaml.YAMLError, IOError) as e: - raise ConfigFileError("Error writing to config file: '%s'" % - str(e)) + raise ConfigFileError("Error writing to config file: '%s'" % str(e)) + def clear(self): """Empty cached config information.""" @@ -531,8 +496,8 @@ def validate_scope(scope): return config_scopes[scope] else: - raise ValueError("Invalid config scope: '%s'. Must be one of %s" % - (scope, config_scopes.keys())) + raise ValueError("Invalid config scope: '%s'. Must be one of %s" + % (scope, config_scopes.keys())) def _read_config_file(filename, schema): @@ -558,12 +523,12 @@ def _read_config_file(filename, schema): return data except MarkedYAMLError as e: - raise ConfigFileError("Error parsing yaml%s: %s" % - (str(e.context_mark), e.problem)) + raise ConfigFileError( + "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) except IOError as e: - raise ConfigFileError("Error reading configuration file %s: %s" % - (filename, str(e))) + raise ConfigFileError( + "Error reading configuration file %s: %s" % (filename, str(e))) def clear_config_caches(): @@ -586,7 +551,6 @@ def _merge_yaml(dest, source): parent instead of merging. """ - def they_are(t): return isinstance(dest, t) and isinstance(source, t) @@ -607,7 +571,7 @@ def _merge_yaml(dest, source): # Source dict is merged into dest. elif they_are(dict): for sk, sv in source.iteritems(): - if sk not in dest: + if not sk in dest: dest[sk] = copy.copy(sv) else: dest[sk] = _merge_yaml(dest[sk], source[sk]) @@ -689,7 +653,7 @@ def print_section(section): data = syaml.syaml_dict() data[section] = get_config(section) syaml.dump(data, stream=sys.stdout, default_flow_style=False) - except (yaml.YAMLError, IOError): + except (yaml.YAMLError, IOError) as e: raise ConfigError("Error reading configuration: %s" % section) @@ -719,22 +683,15 @@ def is_spec_buildable(spec): """Return true if the spec pkgspec is configured as buildable""" allpkgs = get_config('packages') name = spec.name - if name not in allpkgs: + if not spec.name in allpkgs: return True - if 'buildable' not in allpkgs[name]: + if not 'buildable' in allpkgs[spec.name]: return True return allpkgs[spec.name]['buildable'] -class ConfigError(SpackError): - - pass - - -class ConfigFileError(ConfigError): - - pass - +class ConfigError(SpackError): pass +class ConfigFileError(ConfigError): pass def get_path(path, data): if path: @@ -742,10 +699,8 @@ def get_path(path, data): else: return data - class ConfigFormatError(ConfigError): """Raised when a configuration format does not match its schema.""" - def __init__(self, validation_error, data): # Try to get line number from erroneous instance and its parent instance_mark = getattr(validation_error.instance, '_start_mark', None) @@ -778,6 +733,5 @@ class ConfigFormatError(ConfigError): message = '%s: %s' % (location, validation_error.message) super(ConfigError, self).__init__(message) - class ConfigSanityError(ConfigFormatError): """Same as ConfigFormatError, raised when config is written by Spack.""" diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 9748b4033a..92ab4e6bea 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -26,8 +26,7 @@ class SetEnv(NameValueModifier): class UnsetEnv(NameModifier): def execute(self): - # Avoid throwing if the variable was not set - os.environ.pop(self.name, None) + os.environ.pop(self.name, None) # Avoid throwing if the variable was not set class SetPath(NameValueModifier): @@ -56,9 +55,7 @@ class RemovePath(NameValueModifier): def execute(self): environment_value = os.environ.get(self.name, '') directories = environment_value.split(':') if environment_value else [] - directories = [os.path.normpath(x) - for x in directories - if x != os.path.normpath(self.value)] + directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)] os.environ[self.name] = ':'.join(directories) @@ -66,8 +63,7 @@ class EnvironmentModifications(object): """ Keeps track of requests to modify the current environment. - Each call to a method to modify the environment stores the extra - information on the caller in the request: + Each call to a method to modify the environment stores the extra information on the caller in the request: - 'filename' : filename of the module where the caller is defined - 'lineno': line number where the request occurred - 'context' : line of code that issued the request that failed @@ -75,10 +71,10 @@ class EnvironmentModifications(object): def __init__(self, other=None): """ - Initializes a new instance, copying commands from other if not None + Initializes a new instance, copying commands from other if it is not None Args: - other: another instance of EnvironmentModifications + other: another instance of EnvironmentModifications from which (optional) """ self.env_modifications = [] if other is not None: @@ -97,7 +93,7 @@ class EnvironmentModifications(object): @staticmethod def _check_other(other): if not isinstance(other, EnvironmentModifications): - raise TypeError('not an instance of EnvironmentModifications') + raise TypeError('other must be an instance of EnvironmentModifications') def _get_outside_caller_attributes(self): stack = inspect.stack() @@ -105,10 +101,12 @@ class EnvironmentModifications(object): _, filename, lineno, _, context, index = stack[2] context = context[index].strip() except Exception: - filename = 'unknown file' - lineno = 'unknown line' - context = 'unknown context' - args = {'filename': filename, 'lineno': lineno, 'context': context} + filename, lineno, context = 'unknown file', 'unknown line', 'unknown context' + args = { + 'filename': filename, + 'lineno': lineno, + 'context': context + } return args def set(self, name, value, **kwargs): @@ -172,7 +170,7 @@ class EnvironmentModifications(object): def remove_path(self, name, path, **kwargs): """ - Stores in the current object a request to remove a path from a list + Stores in the current object a request to remove a path from a path list Args: name: name of the path list in the environment @@ -187,8 +185,7 @@ class EnvironmentModifications(object): Returns a dict of the modifications grouped by variable name Returns: - dict mapping the environment variable name to the modifications - to be done on it + dict mapping the environment variable name to the modifications to be done on it """ modifications = collections.defaultdict(list) for item in self: @@ -206,8 +203,7 @@ class EnvironmentModifications(object): Applies the modifications and clears the list """ modifications = self.group_by_name() - # Apply the modifications to the environment variables one variable - # at a time + # Apply the modifications to the environment variables one variable at a time for name, actions in sorted(modifications.items()): for x in actions: x.execute() @@ -228,17 +224,13 @@ def concatenate_paths(paths): def set_or_unset_not_first(variable, changes, errstream): """ - Check if we are going to set or unset something after other modifications - have already been requested + Check if we are going to set or unset something after other modifications have already been requested """ - indexes = [ii - for ii, item in enumerate(changes) - if ii != 0 and type(item) in [SetEnv, UnsetEnv]] + indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]] if indexes: good = '\t \t{context} at {filename}:{lineno}' nogood = '\t--->\t{context} at {filename}:{lineno}' - message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501 - errstream(message.format(var=variable)) + errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable)) for ii, item in enumerate(changes): print_format = nogood if ii in indexes else good errstream(print_format.format(**item.args)) @@ -246,8 +238,8 @@ def set_or_unset_not_first(variable, changes, errstream): def validate(env, errstream): """ - Validates the environment modifications to check for the presence of - suspicious patterns. Prompts a warning for everything that was found + Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for + everything that was found Current checks: - set or unset variables after other changes on the same variable @@ -262,8 +254,7 @@ def validate(env, errstream): def filter_environment_blacklist(env, variables): """ - Generator that filters out any change to environment variables present in - the input list + Generator that filters out any change to environment variables present in the input list Args: env: list of environment modifications diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py index 0dc6f06f55..ffed469b20 100644 --- a/lib/spack/spack/modules.py +++ b/lib/spack/spack/modules.py @@ -23,34 +23,36 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## """ -This module contains code for creating environment modules, which can include -dotkits, tcl modules, lmod, and others. +This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others. -The various types of modules are installed by post-install hooks and removed -after an uninstall by post-uninstall hooks. This class consolidates the logic -for creating an abstract description of the information that module systems -need. +The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks. +This class consolidates the logic for creating an abstract description of the information that module systems need. +Currently that includes a number of directories to be appended to paths in the user's environment: -This module also includes logic for coming up with unique names for the module -files so that they can be found by the various shell-support files in -$SPACK/share/spack/setup-env.*. + * /bin directories to be appended to PATH + * /lib* directories for LD_LIBRARY_PATH + * /include directories for CPATH + * /man* and /share/man* directories for MANPATH + * the package prefix for CMAKE_PREFIX_PATH -Each hook implements the logic for writing its specific type of module. +This module also includes logic for coming up with unique names for the module files so that they can be found by the +various shell-support files in $SPACK/share/spack/setup-env.*. + +Each hook in hooks/ implements the logic for writing its specific type of module file. """ import copy import datetime import os import os.path import re -import string import textwrap +import string import llnl.util.tty as tty import spack import spack.config from llnl.util.filesystem import join_path, mkdirp -from spack.build_environment import parent_class_modules -from spack.build_environment import set_module_variables_for_package +from spack.build_environment import parent_class_modules, set_module_variables_for_package from spack.environment import * __all__ = ['EnvModule', 'Dotkit', 'TclModule'] @@ -65,26 +67,30 @@ def print_help(): """ For use by commands to tell user how to activate shell support. """ - tty.msg("This command requires spack's shell integration.", "", + tty.msg("This command requires spack's shell integration.", + "", "To initialize spack's shell commands, you must run one of", "the commands below. Choose the right command for your shell.", - "", "For bash and zsh:", - " . %s/setup-env.sh" % spack.share_path, "", - "For csh and tcsh:", " setenv SPACK_ROOT %s" % spack.prefix, - " source %s/setup-env.csh" % spack.share_path, "") + "", + "For bash and zsh:", + " . %s/setup-env.sh" % spack.share_path, + "", + "For csh and tcsh:", + " setenv SPACK_ROOT %s" % spack.prefix, + " source %s/setup-env.csh" % spack.share_path, + "") def inspect_path(prefix): """ - Inspects the prefix of an installation to search for common layouts. - Issues a request to modify the environment when an item is found. + Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment + accordingly when an item is found. Args: prefix: prefix of the installation Returns: - instance of EnvironmentModifications containing the requested - modifications + instance of EnvironmentModifications containing the requested modifications """ env = EnvironmentModifications() # Inspect the prefix to check for the existence of common directories @@ -99,22 +105,18 @@ def inspect_path(prefix): def dependencies(spec, request='all'): """ - Returns the list of dependent specs for a given spec, according to the - given request + Returns the list of dependent specs for a given spec, according to the given request Args: spec: target spec request: either 'none', 'direct' or 'all' Returns: - empty list if 'none', direct dependency list if 'direct', all - dependencies if 'all' + empty list if 'none', direct dependency list if 'direct', all dependencies if 'all' """ if request not in ('none', 'direct', 'all'): - message = "Wrong value for argument 'request' : " - message += "should be one of ('none', 'direct', 'all')" - message += " [current value is '{0}']" - raise tty.error(message.format(request)) + raise tty.error("Wrong value for argument 'request' : should be one of ('none', 'direct', 'all') " + " [current value is '%s']" % request) if request == 'none': return [] @@ -122,19 +124,12 @@ def dependencies(spec, request='all'): if request == 'direct': return [xx for _, xx in spec.dependencies.items()] - # FIXME : during module file creation nodes seem to be visited - # FIXME : multiple times even if cover='nodes' is given. This work around - # FIXME : permits to get a unique list of spec anyhow. Maybe we miss a - # FIXME : merge step among nodes that refer to the same package? + # FIXME : during module file creation nodes seem to be visited multiple times even if cover='nodes' + # FIXME : is given. This work around permits to get a unique list of spec anyhow. + # FIXME : Possibly we miss a merge step among nodes that refer to the same package. seen = set() seen_add = seen.add - l = [xx - for xx in sorted( - spec.traverse(order='post', - depth=True, - cover='nodes', - root=False), - reverse=True)] + l = [xx for xx in sorted(spec.traverse(order='post', depth=True, cover='nodes', root=False), reverse=True)] return [xx for ii, xx in l if not (xx in seen or seen_add(xx))] @@ -151,8 +146,7 @@ def update_dictionary_extending_lists(target, update): def parse_config_options(module_generator): """ - Parse the configuration file and returns a bunch of items that will be - needed during module file generation + Parse the configuration file and returns a bunch of items that will be needed during module file generation Args: module_generator: module generator for a given spec @@ -160,14 +154,11 @@ def parse_config_options(module_generator): Returns: autoloads: list of specs to be autoloaded prerequisites: list of specs to be marked as prerequisite - filters: list of environment variables whose modification is - blacklisted in module files - env: list of custom environment modifications to be applied in the - module file + filters: list of environment variables whose modification is blacklisted in module files + env: list of custom environment modifications to be applied in the module file """ # Get the configuration for this kind of generator - module_configuration = copy.deepcopy(CONFIGURATION.get( - module_generator.name, {})) + module_configuration = copy.deepcopy(CONFIGURATION.get(module_generator.name, {})) ##### # Merge all the rules @@ -188,12 +179,9 @@ def parse_config_options(module_generator): ##### # Automatic loading loads - module_file_actions['autoload'] = dependencies( - module_generator.spec, module_file_actions.get('autoload', 'none')) + module_file_actions['autoload'] = dependencies(module_generator.spec, module_file_actions.get('autoload', 'none')) # Prerequisites - module_file_actions['prerequisites'] = dependencies( - module_generator.spec, module_file_actions.get('prerequisites', - 'none')) + module_file_actions['prerequisites'] = dependencies(module_generator.spec, module_file_actions.get('prerequisites', 'none')) # Environment modifications environment_actions = module_file_actions.pop('environment', {}) env = EnvironmentModifications() @@ -201,7 +189,7 @@ def parse_config_options(module_generator): def process_arglist(arglist): if method == 'unset': for x in arglist: - yield (x, ) + yield (x,) else: for x in arglist.iteritems(): yield x @@ -210,20 +198,19 @@ def parse_config_options(module_generator): for args in process_arglist(arglist): getattr(env, method)(*args) - # for item in arglist: - # if method == 'unset': - # args = [item] - # else: - # args = item.split(',') - # getattr(env, method)(*args) + # for item in arglist: + # if method == 'unset': + # args = [item] + # else: + # args = item.split(',') + # getattr(env, method)(*args) return module_file_actions, env def filter_blacklisted(specs, module_name): """ - Given a sequence of specs, filters the ones that are blacklisted in the - module configuration file. + Given a sequence of specs, filters the ones that are blacklisted in the module configuration file. Args: specs: sequence of spec instances @@ -246,8 +233,7 @@ class EnvModule(object): class __metaclass__(type): def __init__(cls, name, bases, dict): type.__init__(cls, name, bases, dict) - if cls.name != 'env_module' and cls.name in CONFIGURATION[ - 'enable']: + if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']: module_types[cls.name] = cls def __init__(self, spec=None): @@ -263,8 +249,7 @@ class EnvModule(object): # long description is the docstring with reduced whitespace. self.long_description = None if self.spec.package.__doc__: - self.long_description = re.sub(r'\s+', ' ', - self.spec.package.__doc__) + self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__) @property def naming_scheme(self): @@ -286,14 +271,12 @@ class EnvModule(object): @property def use_name(self): """ - Subclasses should implement this to return the name the module command - uses to refer to the package. + Subclasses should implement this to return the name the module command uses to refer to the package. """ naming_tokens = self.tokens naming_scheme = self.naming_scheme name = naming_scheme.format(**naming_tokens) - name += '-' + self.spec.dag_hash( - ) # Always append the hash to make the module file unique + name += '-' + self.spec.dag_hash() # Always append the hash to make the module file unique # Not everybody is working on linux... parts = name.split('/') name = join_path(*parts) @@ -313,12 +296,8 @@ class EnvModule(object): @property def blacklisted(self): configuration = CONFIGURATION.get(self.name, {}) - whitelist_matches = [x - for x in configuration.get('whitelist', []) - if self.spec.satisfies(x)] - blacklist_matches = [x - for x in configuration.get('blacklist', []) - if self.spec.satisfies(x)] + whitelist_matches = [x for x in configuration.get('whitelist', []) if self.spec.satisfies(x)] + blacklist_matches = [x for x in configuration.get('blacklist', []) if self.spec.satisfies(x)] if whitelist_matches: message = '\tWHITELIST : %s [matches : ' % self.spec.cshort_spec for rule in whitelist_matches: @@ -348,8 +327,7 @@ class EnvModule(object): """ if self.blacklisted: return - tty.debug("\tWRITE : %s [%s]" % - (self.spec.cshort_spec, self.file_name)) + tty.debug("\tWRITE : %s [%s]" % (self.spec.cshort_spec, self.file_name)) module_dir = os.path.dirname(self.file_name) if not os.path.exists(module_dir): @@ -359,12 +337,11 @@ class EnvModule(object): # installation prefix env = inspect_path(self.spec.prefix) - # Let the extendee/dependency modify their extensions/dependencies - # before asking for package-specific modifications + # Let the extendee/dependency modify their extensions/dependencies before asking for + # package-specific modifications spack_env = EnvironmentModifications() - # TODO : the code down below is quite similar to - # TODO : build_environment.setup_package and needs to be factored out - # TODO : to a single place + # TODO : the code down below is quite similar to build_environment.setup_package and needs to be + # TODO : factored out to a single place for item in dependencies(self.spec, 'all'): package = self.spec[item.name].package modules = parent_class_modules(package.__class__) @@ -381,18 +358,14 @@ class EnvModule(object): # Parse configuration file module_configuration, conf_env = parse_config_options(self) env.extend(conf_env) - filters = module_configuration.get('filter', {}).get( - 'environment_blacklist', {}) + filters = module_configuration.get('filter', {}).get('environment_blacklist',{}) # Build up the module file content module_file_content = self.header - for x in filter_blacklisted( - module_configuration.pop('autoload', []), self.name): + for x in filter_blacklisted(module_configuration.pop('autoload', []), self.name): module_file_content += self.autoload(x) - for x in filter_blacklisted( - module_configuration.pop('prerequisites', []), self.name): + for x in filter_blacklisted(module_configuration.pop('prerequisites', []), self.name): module_file_content += self.prerequisite(x) - for line in self.process_environment_command( - filter_environment_blacklist(env, filters)): + for line in self.process_environment_command(filter_environment_blacklist(env, filters)): module_file_content += line for line in self.module_specific_content(module_configuration): module_file_content += line @@ -419,13 +392,10 @@ class EnvModule(object): def process_environment_command(self, env): for command in env: try: - yield self.environment_modifications_formats[type( - command)].format(**command.args) + yield self.environment_modifications_formats[type(command)].format(**command.args) except KeyError: - message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501 - tty.warn(message.format(command=type(command))) - context = '{context} at {filename}:{lineno}' - tty.warn(context.format(**command.args)) + tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command))) + tty.warn('{context} at {filename}:{lineno}'.format(**command.args)) @property def file_name(self): @@ -438,12 +408,9 @@ class EnvModule(object): if os.path.exists(mod_file): try: os.remove(mod_file) # Remove the module file - os.removedirs( - os.path.dirname(mod_file) - ) # Remove all the empty directories from the leaf up + os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up except OSError: - # removedirs throws OSError on first non-empty directory found - pass + pass # removedirs throws OSError on first non-empty directory found class Dotkit(EnvModule): @@ -457,12 +424,13 @@ class Dotkit(EnvModule): autoload_format = 'dk_op {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + prerequisite_format = None # TODO : does something like prerequisite exist for dotkit? + + default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): - return join_path(Dotkit.path, self.spec.architecture, - '%s.dk' % self.use_name) + return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name) @property def header(self): @@ -506,7 +474,7 @@ class TclModule(EnvModule): prerequisite_format = 'prereq {module_file}\n' - default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501 + default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' @property def file_name(self): @@ -514,10 +482,9 @@ class TclModule(EnvModule): @property def header(self): - timestamp = datetime.datetime.now() # TCL Modulefile header header = '#%Module1.0\n' - header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501 + header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % datetime.datetime.now() header += '##\n' header += '## %s\n' % self.spec.short_spec header += '##\n' @@ -542,19 +509,16 @@ class TclModule(EnvModule): f = string.Formatter() for item in conflict_format: line = 'conflict ' + item + '\n' - if len([x for x in f.parse(line) - ]) > 1: # We do have placeholder to substitute - for naming_dir, conflict_dir in zip( - self.naming_scheme.split('/'), item.split('/')): + if len([x for x in f.parse(line)]) > 1: # We do have placeholder to substitute + for naming_dir, conflict_dir in zip(self.naming_scheme.split('/'), item.split('/')): if naming_dir != conflict_dir: - message = 'conflict scheme does not match naming' - message += ' [{spec}]\n\n' + message = 'conflict scheme does not match naming scheme [{spec}]\n\n' message += 'naming scheme : "{nformat}"\n' message += 'conflict scheme : "{cformat}"\n\n' - message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501 - tty.error(message.format(spec=self.spec, - nformat=self.naming_scheme, - cformat=item)) + message += '** You may want to check your `modules.yaml` configuration file **\n' + tty.error( + message.format(spec=self.spec, nformat=self.naming_scheme, cformat=item) + ) raise SystemExit('Module generation aborted.') line = line.format(**naming_tokens) yield line diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py index 395ca0c87a..05f58ab7b1 100644 --- a/lib/spack/spack/test/__init__.py +++ b/lib/spack/spack/test/__init__.py @@ -23,23 +23,53 @@ # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import sys - -import llnl.util.tty as tty +import unittest import nose -import spack + +from spack.test.tally_plugin import Tally from llnl.util.filesystem import join_path +import llnl.util.tty as tty from llnl.util.tty.colify import colify -from spack.test.tally_plugin import Tally + +import spack + """Names of tests to be included in Spack's test suite""" -test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage', - 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize', - 'multimethod', 'install', 'package_sanity', 'config', - 'directory_layout', 'pattern', 'python_version', 'git_fetch', - 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate', - 'cc', 'link_tree', 'spec_yaml', 'optional_deps', - 'make_executable', 'configure_guess', 'lock', 'database', - 'namespace_trie', 'yaml', 'sbang', 'environment', - 'cmd.uninstall', 'cmd.test_install'] +test_names = ['versions', + 'url_parse', + 'url_substitution', + 'packages', + 'stage', + 'spec_syntax', + 'spec_semantics', + 'spec_dag', + 'concretize', + 'multimethod', + 'install', + 'package_sanity', + 'config', + 'directory_layout', + 'pattern', + 'python_version', + 'git_fetch', + 'svn_fetch', + 'hg_fetch', + 'mirror', + 'modules', + 'url_extrapolate', + 'cc', + 'link_tree', + 'spec_yaml', + 'optional_deps', + 'make_executable', + 'configure_guess', + 'lock', + 'database', + 'namespace_trie', + 'yaml', + 'sbang', + 'environment', + 'cmd.uninstall', + 'cmd.test_install'] def list_tests(): @@ -50,6 +80,7 @@ def list_tests(): def run(names, outputDir, verbose=False): """Run tests with the supplied names. Names should be a list. If it's empty, run ALL of Spack's tests.""" + verbosity = 1 if not verbose else 2 if not names: names = test_names @@ -64,7 +95,7 @@ def run(names, outputDir, verbose=False): tally = Tally() for test in names: module = 'spack.test.' + test - print(module) + print module tty.msg("Running test: %s" % test) @@ -74,13 +105,15 @@ def run(names, outputDir, verbose=False): xmlOutputFname = "unittests-{0}.xml".format(test) xmlOutputPath = join_path(outputDir, xmlOutputFname) runOpts += ["--with-xunit", - "--xunit-file={0}".format(xmlOutputPath)] + "--xunit-file={0}".format(xmlOutputPath)] argv = [""] + runOpts + [module] - nose.run(argv=argv, addplugins=[tally]) + result = nose.run(argv=argv, addplugins=[tally]) succeeded = not tally.failCount and not tally.errorCount - tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun, - "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount) + tty.msg("Tests Complete.", + "%5d tests run" % tally.numberOfTestsRun, + "%5d failures" % tally.failCount, + "%5d errors" % tally.errorCount) if succeeded: tty.info("OK", format='g') diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py index f0ff778a10..b8b0d6fc6a 100644 --- a/lib/spack/spack/test/modules.py +++ b/lib/spack/spack/test/modules.py @@ -2,18 +2,14 @@ import collections from contextlib import contextmanager import StringIO -import spack.modules -from spack.test.mock_packages_test import MockPackagesTest FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) - # Monkey-patch open to write module files to a StringIO instance @contextmanager def mock_open(filename, mode): if not mode == 'w': - message = 'test.modules : unexpected opening mode [mock_open]' - raise RuntimeError(message) + raise RuntimeError('test.modules : unexpected opening mode for monkey-patched open') FILE_REGISTRY[filename] = StringIO.StringIO() @@ -24,6 +20,7 @@ def mock_open(filename, mode): FILE_REGISTRY[filename] = handle.getvalue() handle.close() +import spack.modules configuration_autoload_direct = { 'enable': ['tcl'], @@ -50,8 +47,7 @@ configuration_alter_environment = { 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']} }, '=x86-linux': { - 'environment': {'set': {'FOO': 'foo'}, - 'unset': ['BAR']} + 'environment': {'set': {'FOO': 'foo'}, 'unset': ['BAR']} } } } @@ -76,14 +72,15 @@ configuration_conflicts = { } } +from spack.test.mock_packages_test import MockPackagesTest + class TclTests(MockPackagesTest): def setUp(self): super(TclTests, self).setUp() self.configuration_obj = spack.modules.CONFIGURATION spack.modules.open = mock_open - # Make sure that a non-mocked configuration will trigger an error - spack.modules.CONFIGURATION = None + spack.modules.CONFIGURATION = None # Make sure that a non-mocked configuration will trigger an error def tearDown(self): del spack.modules.open @@ -101,7 +98,7 @@ class TclTests(MockPackagesTest): spack.modules.CONFIGURATION = configuration_autoload_direct spec = spack.spec.Spec('mpich@3.0.4=x86-linux') content = self.get_modulefile_content(spec) - self.assertTrue('module-whatis "mpich @3.0.4"' in content) + self.assertTrue('module-whatis "mpich @3.0.4"' in content ) def test_autoload(self): spack.modules.CONFIGURATION = configuration_autoload_direct @@ -120,22 +117,14 @@ class TclTests(MockPackagesTest): spack.modules.CONFIGURATION = configuration_alter_environment spec = spack.spec.Spec('mpileaks=x86-linux') content = self.get_modulefile_content(spec) - self.assertEqual( - len([x - for x in content - if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) - self.assertEqual( - len([x for x in content if 'setenv FOO "foo"' in x]), 1) + self.assertEqual(len([x for x in content if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) + self.assertEqual(len([x for x in content if 'setenv FOO "foo"' in x]), 1) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1) spec = spack.spec.Spec('libdwarf=x64-linux') content = self.get_modulefile_content(spec) - self.assertEqual( - len([x - for x in content - if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) - self.assertEqual( - len([x for x in content if 'setenv FOO "foo"' in x]), 0) + self.assertEqual(len([x for x in content if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) + self.assertEqual(len([x for x in content if 'setenv FOO "foo"' in x]), 0) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0) def test_blacklist(self): @@ -149,9 +138,6 @@ class TclTests(MockPackagesTest): spack.modules.CONFIGURATION = configuration_conflicts spec = spack.spec.Spec('mpileaks=x86-linux') content = self.get_modulefile_content(spec) - self.assertEqual( - len([x for x in content if x.startswith('conflict')]), 2) - self.assertEqual( - len([x for x in content if x == 'conflict mpileaks']), 1) - self.assertEqual( - len([x for x in content if x == 'conflict intel/14.0.1']), 1) + self.assertEqual(len([x for x in content if x.startswith('conflict')]), 2) + self.assertEqual(len([x for x in content if x == 'conflict mpileaks']), 1) + self.assertEqual(len([x for x in content if x == 'conflict intel/14.0.1']), 1) |