flake8 : fixed all issues?

This commit is contained in:
alalazo 2016-05-10 17:19:22 +02:00
parent c3f3f26632
commit 71e49e289a
6 changed files with 330 additions and 254 deletions

View File

@ -38,12 +38,15 @@
def setup_parser(subparser): def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command') sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
refresh_parser = sp.add_parser('refresh', help='Regenerate all module files.') sp.add_parser('refresh', help='Regenerate all module files.')
find_parser = sp.add_parser('find', help='Find module files for packages.') find_parser = sp.add_parser('find', help='Find module files for packages.')
find_parser.add_argument( find_parser.add_argument('module_type',
'module_type', help="Type of module to find file for. [" + '|'.join(module_types) + "]") help="Type of module to find file for. [" +
find_parser.add_argument('spec', nargs='+', help='spec to find a module file for.') '|'.join(module_types) + "]")
find_parser.add_argument('spec',
nargs='+',
help='spec to find a module file for.')
def module_find(mtype, spec_array): def module_find(mtype, spec_array):
@ -53,7 +56,8 @@ def module_find(mtype, spec_array):
should type to use that package's module. should type to use that package's module.
""" """
if mtype not in module_types: if mtype not in module_types:
tty.die("Invalid module type: '%s'. Options are %s" % (mtype, comma_or(module_types))) tty.die("Invalid module type: '%s'. Options are %s" %
(mtype, comma_or(module_types)))
specs = spack.cmd.parse_specs(spec_array) specs = spack.cmd.parse_specs(spec_array)
if len(specs) > 1: if len(specs) > 1:

View File

@ -134,8 +134,6 @@
# Hacked yaml for configuration files preserves line numbers. # Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
"""Dict from section names -> schema for that section.""" """Dict from section names -> schema for that section."""
section_schemas = { section_schemas = {
'compilers': { 'compilers': {
@ -166,8 +164,14 @@
{'type': 'null'}]}, {'type': 'null'}]},
'fc': {'anyOf': [{'type': 'string'}, 'fc': {'anyOf': [{'type': 'string'},
{'type': 'null'}]}, {'type': 'null'}]},
},},},},},},},}, },
},
},
},
},
},
},
},
'mirrors': { 'mirrors': {
'$schema': 'http://json-schema.org/schema#', '$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema', 'title': 'Spack mirror configuration file schema',
@ -180,8 +184,12 @@
'additionalProperties': False, 'additionalProperties': False,
'patternProperties': { 'patternProperties': {
r'\w[\w-]*': { r'\w[\w-]*': {
'type': 'string'},},},},}, 'type': 'string'
},
},
},
},
},
'repos': { 'repos': {
'$schema': 'http://json-schema.org/schema#', '$schema': 'http://json-schema.org/schema#',
'title': 'Spack repository configuration file schema', 'title': 'Spack repository configuration file schema',
@ -192,8 +200,11 @@
'type': 'array', 'type': 'array',
'default': [], 'default': [],
'items': { 'items': {
'type': 'string'},},},}, 'type': 'string'
},
},
},
},
'packages': { 'packages': {
'$schema': 'http://json-schema.org/schema#', '$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema', 'title': 'Spack package configuration file schema',
@ -214,11 +225,13 @@
'type': 'array', 'type': 'array',
'default': [], 'default': [],
'items': {'anyOf': [{'type': 'string'}, 'items': {'anyOf': [{'type': 'string'},
{ 'type' : 'number'}]}}, #version strings {'type': 'number'}]}
}, # version strings
'compiler': { 'compiler': {
'type': 'array', 'type': 'array',
'default': [], 'default': [],
'items' : { 'type' : 'string' } }, #compiler specs 'items': {'type': 'string'}
}, # compiler specs
'buildable': { 'buildable': {
'type': 'boolean', 'type': 'boolean',
'default': True, 'default': True,
@ -231,13 +244,20 @@
r'\w[\w-]*': { r'\w[\w-]*': {
'type': 'array', 'type': 'array',
'default': [], 'default': [],
'items' : { 'type' : 'string' },},},}, 'items': {'type': 'string'},
},
},
},
'paths': { 'paths': {
'type': 'object', 'type': 'object',
'default': {}, 'default': {},
} }
},},},},},}, },
},
},
},
},
},
'modules': { 'modules': {
'$schema': 'http://json-schema.org/schema#', '$schema': 'http://json-schema.org/schema#',
'title': 'Spack module file configuration file schema', 'title': 'Spack module file configuration file schema',
@ -283,17 +303,22 @@
} }
}, },
'autoload': {'$ref': '#/definitions/dependency_selection'}, 'autoload': {'$ref': '#/definitions/dependency_selection'},
'prerequisites': {'$ref': '#/definitions/dependency_selection'}, 'prerequisites':
{'$ref': '#/definitions/dependency_selection'},
'conflict': {'$ref': '#/definitions/array_of_strings'}, 'conflict': {'$ref': '#/definitions/array_of_strings'},
'environment': { 'environment': {
'type': 'object', 'type': 'object',
'default': {}, 'default': {},
'additionalProperties': False, 'additionalProperties': False,
'properties': { 'properties': {
'set': {'$ref': '#/definitions/dictionary_of_strings'}, 'set':
'unset': {'$ref': '#/definitions/array_of_strings'}, {'$ref': '#/definitions/dictionary_of_strings'},
'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'}, 'unset':
'append_path': {'$ref': '#/definitions/dictionary_of_strings'} {'$ref': '#/definitions/array_of_strings'},
'prepend_path':
{'$ref': '#/definitions/dictionary_of_strings'},
'append_path':
{'$ref': '#/definitions/dictionary_of_strings'}
} }
} }
} }
@ -304,15 +329,20 @@
'anyOf': [ 'anyOf': [
{ {
'properties': { 'properties': {
'whitelist': {'$ref': '#/definitions/array_of_strings'}, 'whitelist':
'blacklist': {'$ref': '#/definitions/array_of_strings'}, {'$ref': '#/definitions/array_of_strings'},
'blacklist':
{'$ref': '#/definitions/array_of_strings'},
'naming_scheme': { 'naming_scheme': {
'type': 'string' # Can we be more specific here? 'type':
'string' # Can we be more specific here?
} }
} }
}, },
{ {
'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}} 'patternProperties':
{r'\w[\w-]*':
{'$ref': '#/definitions/module_file_configuration'}}
} }
] ]
} }
@ -326,7 +356,8 @@
'prefix_inspections': { 'prefix_inspections': {
'type': 'object', 'type': 'object',
'patternProperties': { 'patternProperties': {
r'\w[\w-]*': { # path to be inspected for existence (relative to prefix) r'\w[\w-]*':
{ # path to be inspected (relative to prefix)
'$ref': '#/definitions/array_of_strings' '$ref': '#/definitions/array_of_strings'
} }
} }
@ -341,13 +372,15 @@
}, },
'tcl': { 'tcl': {
'allOf': [ 'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration {'$ref': '#/definitions/module_type_configuration'
}, # Base configuration
{} # Specific tcl extensions {} # Specific tcl extensions
] ]
}, },
'dotkit': { 'dotkit': {
'allOf': [ 'allOf': [
{'$ref': '#/definitions/module_type_configuration'}, # Base configuration {'$ref': '#/definitions/module_type_configuration'
}, # Base configuration
{} # Specific dotkit extensions {} # Specific dotkit extensions
] ]
}, },
@ -356,7 +389,6 @@
}, },
}, },
} }
"""OrderedDict of config scopes keyed by name. """OrderedDict of config scopes keyed by name.
Later scopes will override earlier scopes. Later scopes will override earlier scopes.
""" """
@ -366,12 +398,13 @@
def validate_section_name(section): def validate_section_name(section):
"""Raise a ValueError if the section is not a valid section.""" """Raise a ValueError if the section is not a valid section."""
if section not in section_schemas: if section not in section_schemas:
raise ValueError("Invalid config section: '%s'. Options are %s" raise ValueError("Invalid config section: '%s'. Options are %s" %
% (section, section_schemas)) (section, section_schemas))
def extend_with_default(validator_class): def extend_with_default(validator_class):
"""Add support for the 'default' attribute for properties and patternProperties. """Add support for the 'default' attribute for
properties and patternProperties
jsonschema does not handle this out of the box -- it only jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs validates. This allows us to set default values for configs
@ -380,13 +413,15 @@ def extend_with_default(validator_class):
""" """
validate_properties = validator_class.VALIDATORS["properties"] validate_properties = validator_class.VALIDATORS["properties"]
validate_pattern_properties = validator_class.VALIDATORS["patternProperties"] validate_pattern_properties = validator_class.VALIDATORS[
"patternProperties"]
def set_defaults(validator, properties, instance, schema): def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems(): for property, subschema in properties.iteritems():
if "default" in subschema: if "default" in subschema:
instance.setdefault(property, subschema["default"]) instance.setdefault(property, subschema["default"])
for err in validate_properties(validator, properties, instance, schema): for err in validate_properties(validator, properties, instance,
schema):
yield err yield err
def set_pp_defaults(validator, properties, instance, schema): def set_pp_defaults(validator, properties, instance, schema):
@ -397,7 +432,8 @@ def set_pp_defaults(validator, properties, instance, schema):
if re.match(property, key) and val is None: if re.match(property, key) and val is None:
instance[key] = subschema["default"] instance[key] = subschema["default"]
for err in validate_pattern_properties(validator, properties, instance, schema): for err in validate_pattern_properties(validator, properties, instance,
schema):
yield err yield err
return validators.extend(validator_class, { return validators.extend(validator_class, {
@ -408,6 +444,7 @@ def set_pp_defaults(validator, properties, instance, schema):
DefaultSettingValidator = extend_with_default(Draft4Validator) DefaultSettingValidator = extend_with_default(Draft4Validator)
def validate_section(data, schema): def validate_section(data, schema):
"""Validate data read in from a Spack YAML file. """Validate data read in from a Spack YAML file.
@ -442,16 +479,14 @@ def get_section_filename(self, section):
validate_section_name(section) validate_section_name(section)
return os.path.join(self.path, "%s.yaml" % section) return os.path.join(self.path, "%s.yaml" % section)
def get_section(self, section): def get_section(self, section):
if not section in self.sections: if section not in self.sections:
path = self.get_section_filename(section) path = self.get_section_filename(section)
schema = section_schemas[section] schema = section_schemas[section]
data = _read_config_file(path, schema) data = _read_config_file(path, schema)
self.sections[section] = data self.sections[section] = data
return self.sections[section] return self.sections[section]
def write_section(self, section): def write_section(self, section):
filename = self.get_section_filename(section) filename = self.get_section_filename(section)
data = self.get_section(section) data = self.get_section(section)
@ -463,8 +498,8 @@ def write_section(self, section):
except jsonschema.ValidationError as e: except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data) raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e: except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e)) raise ConfigFileError("Error writing to config file: '%s'" %
str(e))
def clear(self): def clear(self):
"""Empty cached config information.""" """Empty cached config information."""
@ -496,8 +531,8 @@ def validate_scope(scope):
return config_scopes[scope] return config_scopes[scope]
else: else:
raise ValueError("Invalid config scope: '%s'. Must be one of %s" raise ValueError("Invalid config scope: '%s'. Must be one of %s" %
% (scope, config_scopes.keys())) (scope, config_scopes.keys()))
def _read_config_file(filename, schema): def _read_config_file(filename, schema):
@ -523,12 +558,12 @@ def _read_config_file(filename, schema):
return data return data
except MarkedYAMLError as e: except MarkedYAMLError as e:
raise ConfigFileError( raise ConfigFileError("Error parsing yaml%s: %s" %
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem)) (str(e.context_mark), e.problem))
except IOError as e: except IOError as e:
raise ConfigFileError( raise ConfigFileError("Error reading configuration file %s: %s" %
"Error reading configuration file %s: %s" % (filename, str(e))) (filename, str(e)))
def clear_config_caches(): def clear_config_caches():
@ -551,6 +586,7 @@ def _merge_yaml(dest, source):
parent instead of merging. parent instead of merging.
""" """
def they_are(t): def they_are(t):
return isinstance(dest, t) and isinstance(source, t) return isinstance(dest, t) and isinstance(source, t)
@ -571,7 +607,7 @@ def they_are(t):
# Source dict is merged into dest. # Source dict is merged into dest.
elif they_are(dict): elif they_are(dict):
for sk, sv in source.iteritems(): for sk, sv in source.iteritems():
if not sk in dest: if sk not in dest:
dest[sk] = copy.copy(sv) dest[sk] = copy.copy(sv)
else: else:
dest[sk] = _merge_yaml(dest[sk], source[sk]) dest[sk] = _merge_yaml(dest[sk], source[sk])
@ -653,7 +689,7 @@ def print_section(section):
data = syaml.syaml_dict() data = syaml.syaml_dict()
data[section] = get_config(section) data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False) syaml.dump(data, stream=sys.stdout, default_flow_style=False)
except (yaml.YAMLError, IOError) as e: except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section) raise ConfigError("Error reading configuration: %s" % section)
@ -683,15 +719,22 @@ def is_spec_buildable(spec):
"""Return true if the spec pkgspec is configured as buildable""" """Return true if the spec pkgspec is configured as buildable"""
allpkgs = get_config('packages') allpkgs = get_config('packages')
name = spec.name name = spec.name
if not spec.name in allpkgs: if name not in allpkgs:
return True return True
if not 'buildable' in allpkgs[spec.name]: if 'buildable' not in allpkgs[name]:
return True return True
return allpkgs[spec.name]['buildable'] return allpkgs[spec.name]['buildable']
class ConfigError(SpackError): pass class ConfigError(SpackError):
class ConfigFileError(ConfigError): pass
pass
class ConfigFileError(ConfigError):
pass
def get_path(path, data): def get_path(path, data):
if path: if path:
@ -699,8 +742,10 @@ def get_path(path, data):
else: else:
return data return data
class ConfigFormatError(ConfigError): class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema.""" """Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data): def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent # Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None) instance_mark = getattr(validation_error.instance, '_start_mark', None)
@ -733,5 +778,6 @@ def __init__(self, validation_error, data):
message = '%s: %s' % (location, validation_error.message) message = '%s: %s' % (location, validation_error.message)
super(ConfigError, self).__init__(message) super(ConfigError, self).__init__(message)
class ConfigSanityError(ConfigFormatError): class ConfigSanityError(ConfigFormatError):
"""Same as ConfigFormatError, raised when config is written by Spack.""" """Same as ConfigFormatError, raised when config is written by Spack."""

View File

@ -26,7 +26,8 @@ def execute(self):
class UnsetEnv(NameModifier): class UnsetEnv(NameModifier):
def execute(self): def execute(self):
os.environ.pop(self.name, None) # Avoid throwing if the variable was not set # Avoid throwing if the variable was not set
os.environ.pop(self.name, None)
class SetPath(NameValueModifier): class SetPath(NameValueModifier):
@ -55,7 +56,9 @@ class RemovePath(NameValueModifier):
def execute(self): def execute(self):
environment_value = os.environ.get(self.name, '') environment_value = os.environ.get(self.name, '')
directories = environment_value.split(':') if environment_value else [] directories = environment_value.split(':') if environment_value else []
directories = [os.path.normpath(x) for x in directories if x != os.path.normpath(self.value)] directories = [os.path.normpath(x)
for x in directories
if x != os.path.normpath(self.value)]
os.environ[self.name] = ':'.join(directories) os.environ[self.name] = ':'.join(directories)
@ -63,7 +66,8 @@ class EnvironmentModifications(object):
""" """
Keeps track of requests to modify the current environment. Keeps track of requests to modify the current environment.
Each call to a method to modify the environment stores the extra information on the caller in the request: Each call to a method to modify the environment stores the extra
information on the caller in the request:
- 'filename' : filename of the module where the caller is defined - 'filename' : filename of the module where the caller is defined
- 'lineno': line number where the request occurred - 'lineno': line number where the request occurred
- 'context' : line of code that issued the request that failed - 'context' : line of code that issued the request that failed
@ -71,10 +75,10 @@ class EnvironmentModifications(object):
def __init__(self, other=None): def __init__(self, other=None):
""" """
Initializes a new instance, copying commands from other if it is not None Initializes a new instance, copying commands from other if not None
Args: Args:
other: another instance of EnvironmentModifications from which (optional) other: another instance of EnvironmentModifications
""" """
self.env_modifications = [] self.env_modifications = []
if other is not None: if other is not None:
@ -93,7 +97,7 @@ def extend(self, other):
@staticmethod @staticmethod
def _check_other(other): def _check_other(other):
if not isinstance(other, EnvironmentModifications): if not isinstance(other, EnvironmentModifications):
raise TypeError('other must be an instance of EnvironmentModifications') raise TypeError('not an instance of EnvironmentModifications')
def _get_outside_caller_attributes(self): def _get_outside_caller_attributes(self):
stack = inspect.stack() stack = inspect.stack()
@ -101,12 +105,10 @@ def _get_outside_caller_attributes(self):
_, filename, lineno, _, context, index = stack[2] _, filename, lineno, _, context, index = stack[2]
context = context[index].strip() context = context[index].strip()
except Exception: except Exception:
filename, lineno, context = 'unknown file', 'unknown line', 'unknown context' filename = 'unknown file'
args = { lineno = 'unknown line'
'filename': filename, context = 'unknown context'
'lineno': lineno, args = {'filename': filename, 'lineno': lineno, 'context': context}
'context': context
}
return args return args
def set(self, name, value, **kwargs): def set(self, name, value, **kwargs):
@ -170,7 +172,7 @@ def prepend_path(self, name, path, **kwargs):
def remove_path(self, name, path, **kwargs): def remove_path(self, name, path, **kwargs):
""" """
Stores in the current object a request to remove a path from a path list Stores in the current object a request to remove a path from a list
Args: Args:
name: name of the path list in the environment name: name of the path list in the environment
@ -185,7 +187,8 @@ def group_by_name(self):
Returns a dict of the modifications grouped by variable name Returns a dict of the modifications grouped by variable name
Returns: Returns:
dict mapping the environment variable name to the modifications to be done on it dict mapping the environment variable name to the modifications
to be done on it
""" """
modifications = collections.defaultdict(list) modifications = collections.defaultdict(list)
for item in self: for item in self:
@ -203,7 +206,8 @@ def apply_modifications(self):
Applies the modifications and clears the list Applies the modifications and clears the list
""" """
modifications = self.group_by_name() modifications = self.group_by_name()
# Apply the modifications to the environment variables one variable at a time # Apply the modifications to the environment variables one variable
# at a time
for name, actions in sorted(modifications.items()): for name, actions in sorted(modifications.items()):
for x in actions: for x in actions:
x.execute() x.execute()
@ -224,13 +228,17 @@ def concatenate_paths(paths):
def set_or_unset_not_first(variable, changes, errstream): def set_or_unset_not_first(variable, changes, errstream):
""" """
Check if we are going to set or unset something after other modifications have already been requested Check if we are going to set or unset something after other modifications
have already been requested
""" """
indexes = [ii for ii, item in enumerate(changes) if ii != 0 and type(item) in [SetEnv, UnsetEnv]] indexes = [ii
for ii, item in enumerate(changes)
if ii != 0 and type(item) in [SetEnv, UnsetEnv]]
if indexes: if indexes:
good = '\t \t{context} at {filename}:{lineno}' good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}' nogood = '\t--->\t{context} at {filename}:{lineno}'
errstream('Suspicious requests to set or unset the variable \'{var}\' found'.format(var=variable)) message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
errstream(message.format(var=variable))
for ii, item in enumerate(changes): for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good print_format = nogood if ii in indexes else good
errstream(print_format.format(**item.args)) errstream(print_format.format(**item.args))
@ -238,8 +246,8 @@ def set_or_unset_not_first(variable, changes, errstream):
def validate(env, errstream): def validate(env, errstream):
""" """
Validates the environment modifications to check for the presence of suspicious patterns. Prompts a warning for Validates the environment modifications to check for the presence of
everything that was found suspicious patterns. Prompts a warning for everything that was found
Current checks: Current checks:
- set or unset variables after other changes on the same variable - set or unset variables after other changes on the same variable
@ -254,7 +262,8 @@ def validate(env, errstream):
def filter_environment_blacklist(env, variables): def filter_environment_blacklist(env, variables):
""" """
Generator that filters out any change to environment variables present in the input list Generator that filters out any change to environment variables present in
the input list
Args: Args:
env: list of environment modifications env: list of environment modifications

View File

@ -23,36 +23,34 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
""" """
This module contains code for creating environment modules, which can include dotkits, tcl modules, lmod, and others. This module contains code for creating environment modules, which can include
dotkits, tcl modules, lmod, and others.
The various types of modules are installed by post-install hooks and removed after an uninstall by post-uninstall hooks. The various types of modules are installed by post-install hooks and removed
This class consolidates the logic for creating an abstract description of the information that module systems need. after an uninstall by post-uninstall hooks. This class consolidates the logic
Currently that includes a number of directories to be appended to paths in the user's environment: for creating an abstract description of the information that module systems
need.
* /bin directories to be appended to PATH This module also includes logic for coming up with unique names for the module
* /lib* directories for LD_LIBRARY_PATH files so that they can be found by the various shell-support files in
* /include directories for CPATH $SPACK/share/spack/setup-env.*.
* /man* and /share/man* directories for MANPATH
* the package prefix for CMAKE_PREFIX_PATH
This module also includes logic for coming up with unique names for the module files so that they can be found by the Each hook implements the logic for writing its specific type of module.
various shell-support files in $SPACK/share/spack/setup-env.*.
Each hook in hooks/ implements the logic for writing its specific type of module file.
""" """
import copy import copy
import datetime import datetime
import os import os
import os.path import os.path
import re import re
import textwrap
import string import string
import textwrap
import llnl.util.tty as tty import llnl.util.tty as tty
import spack import spack
import spack.config import spack.config
from llnl.util.filesystem import join_path, mkdirp from llnl.util.filesystem import join_path, mkdirp
from spack.build_environment import parent_class_modules, set_module_variables_for_package from spack.build_environment import parent_class_modules
from spack.build_environment import set_module_variables_for_package
from spack.environment import * from spack.environment import *
__all__ = ['EnvModule', 'Dotkit', 'TclModule'] __all__ = ['EnvModule', 'Dotkit', 'TclModule']
@ -67,30 +65,26 @@ def print_help():
""" """
For use by commands to tell user how to activate shell support. For use by commands to tell user how to activate shell support.
""" """
tty.msg("This command requires spack's shell integration.", tty.msg("This command requires spack's shell integration.", "",
"",
"To initialize spack's shell commands, you must run one of", "To initialize spack's shell commands, you must run one of",
"the commands below. Choose the right command for your shell.", "the commands below. Choose the right command for your shell.",
"", "", "For bash and zsh:",
"For bash and zsh:", " . %s/setup-env.sh" % spack.share_path, "",
" . %s/setup-env.sh" % spack.share_path, "For csh and tcsh:", " setenv SPACK_ROOT %s" % spack.prefix,
"", " source %s/setup-env.csh" % spack.share_path, "")
"For csh and tcsh:",
" setenv SPACK_ROOT %s" % spack.prefix,
" source %s/setup-env.csh" % spack.share_path,
"")
def inspect_path(prefix): def inspect_path(prefix):
""" """
Inspects the prefix of an installation to search for common layouts. Issues a request to modify the environment Inspects the prefix of an installation to search for common layouts.
accordingly when an item is found. Issues a request to modify the environment when an item is found.
Args: Args:
prefix: prefix of the installation prefix: prefix of the installation
Returns: Returns:
instance of EnvironmentModifications containing the requested modifications instance of EnvironmentModifications containing the requested
modifications
""" """
env = EnvironmentModifications() env = EnvironmentModifications()
# Inspect the prefix to check for the existence of common directories # Inspect the prefix to check for the existence of common directories
@ -105,18 +99,22 @@ def inspect_path(prefix):
def dependencies(spec, request='all'): def dependencies(spec, request='all'):
""" """
Returns the list of dependent specs for a given spec, according to the given request Returns the list of dependent specs for a given spec, according to the
given request
Args: Args:
spec: target spec spec: target spec
request: either 'none', 'direct' or 'all' request: either 'none', 'direct' or 'all'
Returns: Returns:
empty list if 'none', direct dependency list if 'direct', all dependencies if 'all' empty list if 'none', direct dependency list if 'direct', all
dependencies if 'all'
""" """
if request not in ('none', 'direct', 'all'): if request not in ('none', 'direct', 'all'):
raise tty.error("Wrong value for argument 'request' : should be one of ('none', 'direct', 'all') " message = "Wrong value for argument 'request' : "
" [current value is '%s']" % request) message += "should be one of ('none', 'direct', 'all')"
message += " [current value is '{0}']"
raise tty.error(message.format(request))
if request == 'none': if request == 'none':
return [] return []
@ -124,12 +122,19 @@ def dependencies(spec, request='all'):
if request == 'direct': if request == 'direct':
return [xx for _, xx in spec.dependencies.items()] return [xx for _, xx in spec.dependencies.items()]
# FIXME : during module file creation nodes seem to be visited multiple times even if cover='nodes' # FIXME : during module file creation nodes seem to be visited
# FIXME : is given. This work around permits to get a unique list of spec anyhow. # FIXME : multiple times even if cover='nodes' is given. This work around
# FIXME : Possibly we miss a merge step among nodes that refer to the same package. # FIXME : permits to get a unique list of spec anyhow. Maybe we miss a
# FIXME : merge step among nodes that refer to the same package?
seen = set() seen = set()
seen_add = seen.add seen_add = seen.add
l = [xx for xx in sorted(spec.traverse(order='post', depth=True, cover='nodes', root=False), reverse=True)] l = [xx
for xx in sorted(
spec.traverse(order='post',
depth=True,
cover='nodes',
root=False),
reverse=True)]
return [xx for ii, xx in l if not (xx in seen or seen_add(xx))] return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
@ -146,7 +151,8 @@ def update_dictionary_extending_lists(target, update):
def parse_config_options(module_generator): def parse_config_options(module_generator):
""" """
Parse the configuration file and returns a bunch of items that will be needed during module file generation Parse the configuration file and returns a bunch of items that will be
needed during module file generation
Args: Args:
module_generator: module generator for a given spec module_generator: module generator for a given spec
@ -154,11 +160,14 @@ def parse_config_options(module_generator):
Returns: Returns:
autoloads: list of specs to be autoloaded autoloads: list of specs to be autoloaded
prerequisites: list of specs to be marked as prerequisite prerequisites: list of specs to be marked as prerequisite
filters: list of environment variables whose modification is blacklisted in module files filters: list of environment variables whose modification is
env: list of custom environment modifications to be applied in the module file blacklisted in module files
env: list of custom environment modifications to be applied in the
module file
""" """
# Get the configuration for this kind of generator # Get the configuration for this kind of generator
module_configuration = copy.deepcopy(CONFIGURATION.get(module_generator.name, {})) module_configuration = copy.deepcopy(CONFIGURATION.get(
module_generator.name, {}))
##### #####
# Merge all the rules # Merge all the rules
@ -179,9 +188,12 @@ def parse_config_options(module_generator):
##### #####
# Automatic loading loads # Automatic loading loads
module_file_actions['autoload'] = dependencies(module_generator.spec, module_file_actions.get('autoload', 'none')) module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites # Prerequisites
module_file_actions['prerequisites'] = dependencies(module_generator.spec, module_file_actions.get('prerequisites', 'none')) module_file_actions['prerequisites'] = dependencies(
module_generator.spec, module_file_actions.get('prerequisites',
'none'))
# Environment modifications # Environment modifications
environment_actions = module_file_actions.pop('environment', {}) environment_actions = module_file_actions.pop('environment', {})
env = EnvironmentModifications() env = EnvironmentModifications()
@ -210,7 +222,8 @@ def process_arglist(arglist):
def filter_blacklisted(specs, module_name): def filter_blacklisted(specs, module_name):
""" """
Given a sequence of specs, filters the ones that are blacklisted in the module configuration file. Given a sequence of specs, filters the ones that are blacklisted in the
module configuration file.
Args: Args:
specs: sequence of spec instances specs: sequence of spec instances
@ -233,7 +246,8 @@ class EnvModule(object):
class __metaclass__(type): class __metaclass__(type):
def __init__(cls, name, bases, dict): def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict) type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION['enable']: if cls.name != 'env_module' and cls.name in CONFIGURATION[
'enable']:
module_types[cls.name] = cls module_types[cls.name] = cls
def __init__(self, spec=None): def __init__(self, spec=None):
@ -249,7 +263,8 @@ def __init__(self, spec=None):
# long description is the docstring with reduced whitespace. # long description is the docstring with reduced whitespace.
self.long_description = None self.long_description = None
if self.spec.package.__doc__: if self.spec.package.__doc__:
self.long_description = re.sub(r'\s+', ' ', self.spec.package.__doc__) self.long_description = re.sub(r'\s+', ' ',
self.spec.package.__doc__)
@property @property
def naming_scheme(self): def naming_scheme(self):
@ -271,12 +286,14 @@ def tokens(self):
@property @property
def use_name(self): def use_name(self):
""" """
Subclasses should implement this to return the name the module command uses to refer to the package. Subclasses should implement this to return the name the module command
uses to refer to the package.
""" """
naming_tokens = self.tokens naming_tokens = self.tokens
naming_scheme = self.naming_scheme naming_scheme = self.naming_scheme
name = naming_scheme.format(**naming_tokens) name = naming_scheme.format(**naming_tokens)
name += '-' + self.spec.dag_hash() # Always append the hash to make the module file unique name += '-' + self.spec.dag_hash(
) # Always append the hash to make the module file unique
# Not everybody is working on linux... # Not everybody is working on linux...
parts = name.split('/') parts = name.split('/')
name = join_path(*parts) name = join_path(*parts)
@ -296,8 +313,12 @@ def category(self):
@property @property
def blacklisted(self): def blacklisted(self):
configuration = CONFIGURATION.get(self.name, {}) configuration = CONFIGURATION.get(self.name, {})
whitelist_matches = [x for x in configuration.get('whitelist', []) if self.spec.satisfies(x)] whitelist_matches = [x
blacklist_matches = [x for x in configuration.get('blacklist', []) if self.spec.satisfies(x)] for x in configuration.get('whitelist', [])
if self.spec.satisfies(x)]
blacklist_matches = [x
for x in configuration.get('blacklist', [])
if self.spec.satisfies(x)]
if whitelist_matches: if whitelist_matches:
message = '\tWHITELIST : %s [matches : ' % self.spec.cshort_spec message = '\tWHITELIST : %s [matches : ' % self.spec.cshort_spec
for rule in whitelist_matches: for rule in whitelist_matches:
@ -327,7 +348,8 @@ def write(self):
""" """
if self.blacklisted: if self.blacklisted:
return return
tty.debug("\tWRITE : %s [%s]" % (self.spec.cshort_spec, self.file_name)) tty.debug("\tWRITE : %s [%s]" %
(self.spec.cshort_spec, self.file_name))
module_dir = os.path.dirname(self.file_name) module_dir = os.path.dirname(self.file_name)
if not os.path.exists(module_dir): if not os.path.exists(module_dir):
@ -337,11 +359,12 @@ def write(self):
# installation prefix # installation prefix
env = inspect_path(self.spec.prefix) env = inspect_path(self.spec.prefix)
# Let the extendee/dependency modify their extensions/dependencies before asking for # Let the extendee/dependency modify their extensions/dependencies
# package-specific modifications # before asking for package-specific modifications
spack_env = EnvironmentModifications() spack_env = EnvironmentModifications()
# TODO : the code down below is quite similar to build_environment.setup_package and needs to be # TODO : the code down below is quite similar to
# TODO : factored out to a single place # TODO : build_environment.setup_package and needs to be factored out
# TODO : to a single place
for item in dependencies(self.spec, 'all'): for item in dependencies(self.spec, 'all'):
package = self.spec[item.name].package package = self.spec[item.name].package
modules = parent_class_modules(package.__class__) modules = parent_class_modules(package.__class__)
@ -358,14 +381,18 @@ def write(self):
# Parse configuration file # Parse configuration file
module_configuration, conf_env = parse_config_options(self) module_configuration, conf_env = parse_config_options(self)
env.extend(conf_env) env.extend(conf_env)
filters = module_configuration.get('filter', {}).get('environment_blacklist',{}) filters = module_configuration.get('filter', {}).get(
'environment_blacklist', {})
# Build up the module file content # Build up the module file content
module_file_content = self.header module_file_content = self.header
for x in filter_blacklisted(module_configuration.pop('autoload', []), self.name): for x in filter_blacklisted(
module_configuration.pop('autoload', []), self.name):
module_file_content += self.autoload(x) module_file_content += self.autoload(x)
for x in filter_blacklisted(module_configuration.pop('prerequisites', []), self.name): for x in filter_blacklisted(
module_configuration.pop('prerequisites', []), self.name):
module_file_content += self.prerequisite(x) module_file_content += self.prerequisite(x)
for line in self.process_environment_command(filter_environment_blacklist(env, filters)): for line in self.process_environment_command(
filter_environment_blacklist(env, filters)):
module_file_content += line module_file_content += line
for line in self.module_specific_content(module_configuration): for line in self.module_specific_content(module_configuration):
module_file_content += line module_file_content += line
@ -392,10 +419,13 @@ def prerequisite(self, spec):
def process_environment_command(self, env): def process_environment_command(self, env):
for command in env: for command in env:
try: try:
yield self.environment_modifications_formats[type(command)].format(**command.args) yield self.environment_modifications_formats[type(
command)].format(**command.args)
except KeyError: except KeyError:
tty.warn('Cannot handle command of type {command} : skipping request'.format(command=type(command))) message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501
tty.warn('{context} at {filename}:{lineno}'.format(**command.args)) tty.warn(message.format(command=type(command)))
context = '{context} at {filename}:{lineno}'
tty.warn(context.format(**command.args))
@property @property
def file_name(self): def file_name(self):
@ -408,9 +438,12 @@ def remove(self):
if os.path.exists(mod_file): if os.path.exists(mod_file):
try: try:
os.remove(mod_file) # Remove the module file os.remove(mod_file) # Remove the module file
os.removedirs(os.path.dirname(mod_file)) # Remove all the empty directories from the leaf up os.removedirs(
os.path.dirname(mod_file)
) # Remove all the empty directories from the leaf up
except OSError: except OSError:
pass # removedirs throws OSError on first non-empty directory found # removedirs throws OSError on first non-empty directory found
pass
class Dotkit(EnvModule): class Dotkit(EnvModule):
@ -424,13 +457,12 @@ class Dotkit(EnvModule):
autoload_format = 'dk_op {module_file}\n' autoload_format = 'dk_op {module_file}\n'
prerequisite_format = None # TODO : does something like prerequisite exist for dotkit? default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}'
@property @property
def file_name(self): def file_name(self):
return join_path(Dotkit.path, self.spec.architecture, '%s.dk' % self.use_name) return join_path(Dotkit.path, self.spec.architecture,
'%s.dk' % self.use_name)
@property @property
def header(self): def header(self):
@ -474,7 +506,7 @@ class TclModule(EnvModule):
prerequisite_format = 'prereq {module_file}\n' prerequisite_format = 'prereq {module_file}\n'
default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
@property @property
def file_name(self): def file_name(self):
@ -482,9 +514,10 @@ def file_name(self):
@property @property
def header(self): def header(self):
timestamp = datetime.datetime.now()
# TCL Modulefile header # TCL Modulefile header
header = '#%Module1.0\n' header = '#%Module1.0\n'
header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % datetime.datetime.now() header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
header += '##\n' header += '##\n'
header += '## %s\n' % self.spec.short_spec header += '## %s\n' % self.spec.short_spec
header += '##\n' header += '##\n'
@ -509,16 +542,19 @@ def module_specific_content(self, configuration):
f = string.Formatter() f = string.Formatter()
for item in conflict_format: for item in conflict_format:
line = 'conflict ' + item + '\n' line = 'conflict ' + item + '\n'
if len([x for x in f.parse(line)]) > 1: # We do have placeholder to substitute if len([x for x in f.parse(line)
for naming_dir, conflict_dir in zip(self.naming_scheme.split('/'), item.split('/')): ]) > 1: # We do have placeholder to substitute
for naming_dir, conflict_dir in zip(
self.naming_scheme.split('/'), item.split('/')):
if naming_dir != conflict_dir: if naming_dir != conflict_dir:
message = 'conflict scheme does not match naming scheme [{spec}]\n\n' message = 'conflict scheme does not match naming'
message += ' [{spec}]\n\n'
message += 'naming scheme : "{nformat}"\n' message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n' message += 'conflict scheme : "{cformat}"\n\n'
message += '** You may want to check your `modules.yaml` configuration file **\n' message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501
tty.error( tty.error(message.format(spec=self.spec,
message.format(spec=self.spec, nformat=self.naming_scheme, cformat=item) nformat=self.naming_scheme,
) cformat=item))
raise SystemExit('Module generation aborted.') raise SystemExit('Module generation aborted.')
line = line.format(**naming_tokens) line = line.format(**naming_tokens)
yield line yield line

View File

@ -23,53 +23,23 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
############################################################################## ##############################################################################
import sys import sys
import unittest
import nose
from spack.test.tally_plugin import Tally
from llnl.util.filesystem import join_path
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.colify import colify import nose
import spack import spack
from llnl.util.filesystem import join_path
from llnl.util.tty.colify import colify
from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite""" """Names of tests to be included in Spack's test suite"""
test_names = ['versions', test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
'url_parse', 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
'url_substitution', 'multimethod', 'install', 'package_sanity', 'config',
'packages', 'directory_layout', 'pattern', 'python_version', 'git_fetch',
'stage', 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
'spec_syntax', 'cc', 'link_tree', 'spec_yaml', 'optional_deps',
'spec_semantics', 'make_executable', 'configure_guess', 'lock', 'database',
'spec_dag', 'namespace_trie', 'yaml', 'sbang', 'environment',
'concretize', 'cmd.uninstall', 'cmd.test_install']
'multimethod',
'install',
'package_sanity',
'config',
'directory_layout',
'pattern',
'python_version',
'git_fetch',
'svn_fetch',
'hg_fetch',
'mirror',
'modules',
'url_extrapolate',
'cc',
'link_tree',
'spec_yaml',
'optional_deps',
'make_executable',
'configure_guess',
'lock',
'database',
'namespace_trie',
'yaml',
'sbang',
'environment',
'cmd.uninstall',
'cmd.test_install']
def list_tests(): def list_tests():
@ -80,7 +50,6 @@ def list_tests():
def run(names, outputDir, verbose=False): def run(names, outputDir, verbose=False):
"""Run tests with the supplied names. Names should be a list. If """Run tests with the supplied names. Names should be a list. If
it's empty, run ALL of Spack's tests.""" it's empty, run ALL of Spack's tests."""
verbosity = 1 if not verbose else 2
if not names: if not names:
names = test_names names = test_names
@ -95,7 +64,7 @@ def run(names, outputDir, verbose=False):
tally = Tally() tally = Tally()
for test in names: for test in names:
module = 'spack.test.' + test module = 'spack.test.' + test
print module print(module)
tty.msg("Running test: %s" % test) tty.msg("Running test: %s" % test)
@ -107,13 +76,11 @@ def run(names, outputDir, verbose=False):
runOpts += ["--with-xunit", runOpts += ["--with-xunit",
"--xunit-file={0}".format(xmlOutputPath)] "--xunit-file={0}".format(xmlOutputPath)]
argv = [""] + runOpts + [module] argv = [""] + runOpts + [module]
result = nose.run(argv=argv, addplugins=[tally]) nose.run(argv=argv, addplugins=[tally])
succeeded = not tally.failCount and not tally.errorCount succeeded = not tally.failCount and not tally.errorCount
tty.msg("Tests Complete.", tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun,
"%5d tests run" % tally.numberOfTestsRun, "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount)
"%5d failures" % tally.failCount,
"%5d errors" % tally.errorCount)
if succeeded: if succeeded:
tty.info("OK", format='g') tty.info("OK", format='g')

View File

@ -2,14 +2,18 @@
from contextlib import contextmanager from contextlib import contextmanager
import StringIO import StringIO
import spack.modules
from spack.test.mock_packages_test import MockPackagesTest
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO) FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
# Monkey-patch open to write module files to a StringIO instance # Monkey-patch open to write module files to a StringIO instance
@contextmanager @contextmanager
def mock_open(filename, mode): def mock_open(filename, mode):
if not mode == 'w': if not mode == 'w':
raise RuntimeError('test.modules : unexpected opening mode for monkey-patched open') message = 'test.modules : unexpected opening mode [mock_open]'
raise RuntimeError(message)
FILE_REGISTRY[filename] = StringIO.StringIO() FILE_REGISTRY[filename] = StringIO.StringIO()
@ -20,7 +24,6 @@ def mock_open(filename, mode):
FILE_REGISTRY[filename] = handle.getvalue() FILE_REGISTRY[filename] = handle.getvalue()
handle.close() handle.close()
import spack.modules
configuration_autoload_direct = { configuration_autoload_direct = {
'enable': ['tcl'], 'enable': ['tcl'],
@ -47,7 +50,8 @@ def mock_open(filename, mode):
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']} 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
}, },
'=x86-linux': { '=x86-linux': {
'environment': {'set': {'FOO': 'foo'}, 'unset': ['BAR']} 'environment': {'set': {'FOO': 'foo'},
'unset': ['BAR']}
} }
} }
} }
@ -72,15 +76,14 @@ def mock_open(filename, mode):
} }
} }
from spack.test.mock_packages_test import MockPackagesTest
class TclTests(MockPackagesTest): class TclTests(MockPackagesTest):
def setUp(self): def setUp(self):
super(TclTests, self).setUp() super(TclTests, self).setUp()
self.configuration_obj = spack.modules.CONFIGURATION self.configuration_obj = spack.modules.CONFIGURATION
spack.modules.open = mock_open spack.modules.open = mock_open
spack.modules.CONFIGURATION = None # Make sure that a non-mocked configuration will trigger an error # Make sure that a non-mocked configuration will trigger an error
spack.modules.CONFIGURATION = None
def tearDown(self): def tearDown(self):
del spack.modules.open del spack.modules.open
@ -117,14 +120,22 @@ def test_alter_environment(self):
spack.modules.CONFIGURATION = configuration_alter_environment spack.modules.CONFIGURATION = configuration_alter_environment
spec = spack.spec.Spec('mpileaks=x86-linux') spec = spack.spec.Spec('mpileaks=x86-linux')
content = self.get_modulefile_content(spec) content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) self.assertEqual(
self.assertEqual(len([x for x in content if 'setenv FOO "foo"' in x]), 1) len([x
for x in content
if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
self.assertEqual(
len([x for x in content if 'setenv FOO "foo"' in x]), 1)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
spec = spack.spec.Spec('libdwarf=x64-linux') spec = spack.spec.Spec('libdwarf=x64-linux')
content = self.get_modulefile_content(spec) content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0) self.assertEqual(
self.assertEqual(len([x for x in content if 'setenv FOO "foo"' in x]), 0) len([x
for x in content
if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
self.assertEqual(
len([x for x in content if 'setenv FOO "foo"' in x]), 0)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0) self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0)
def test_blacklist(self): def test_blacklist(self):
@ -138,6 +149,9 @@ def test_conflicts(self):
spack.modules.CONFIGURATION = configuration_conflicts spack.modules.CONFIGURATION = configuration_conflicts
spec = spack.spec.Spec('mpileaks=x86-linux') spec = spack.spec.Spec('mpileaks=x86-linux')
content = self.get_modulefile_content(spec) content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if x.startswith('conflict')]), 2) self.assertEqual(
self.assertEqual(len([x for x in content if x == 'conflict mpileaks']), 1) len([x for x in content if x.startswith('conflict')]), 2)
self.assertEqual(len([x for x in content if x == 'conflict intel/14.0.1']), 1) self.assertEqual(
len([x for x in content if x == 'conflict mpileaks']), 1)
self.assertEqual(
len([x for x in content if x == 'conflict intel/14.0.1']), 1)