Merge branch 'develop' of https://github.com/LLNL/spack into features/custom_modulefile_from_config

Conflicts:
	lib/spack/spack/config.py
This commit is contained in:
alalazo
2016-05-11 16:11:19 +02:00
22 changed files with 386 additions and 44 deletions

View File

@@ -1456,6 +1456,51 @@ several variants:
spack deactivate -a python
Filesystem requirements
--------------------------
Spack currently needs to be run from a filesystem that supports
``flock`` locking semantics. Nearly all local filesystems and recent
versions of NFS support this, but parallel filesystems may be mounted
without ``flock`` support enabled. You can determine how your
filesystems are mounted with ``mount -p``. The output for a Lustre
filesystem might look like this:
.. code-block:: sh
$ mount -l | grep lscratch
pilsner-mds1-lnet0@o2ib100:/lsd on /p/lscratchd type lustre (rw,nosuid,noauto,_netdev,lazystatfs,flock)
porter-mds1-lnet0@o2ib100:/lse on /p/lscratche type lustre (rw,nosuid,noauto,_netdev,lazystatfs,flock)
Note the ``flock`` option on both Lustre mounts. If you do not see
this or a similar option for your filesystem, you may need ot ask your
system administrator to enable ``flock``.
This issue typically manifests with the error below:
.. code-block:: sh
$ ./spack find
Traceback (most recent call last):
File "./spack", line 176, in <module>
main()
File "./spack", line 154, in main
return_val = command(parser, args)
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
specs = set(spack.installed_db.query(**q_args))
File "./spack/lib/spack/spack/database.py", line 551, in query
with self.read_transaction():
File "./spack/lib/spack/spack/database.py", line 598, in __enter__
if self._enter() and self._acquire_fn:
File "./spack/lib/spack/spack/database.py", line 608, in _enter
return self._db.lock.acquire_read(self._timeout)
File "./spack/lib/spack/llnl/util/lock.py", line 103, in acquire_read
self._lock(fcntl.LOCK_SH, timeout) # can raise LockError.
File "./spack/lib/spack/llnl/util/lock.py", line 64, in _lock
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
IOError: [Errno 38] Function not implemented
A nicer error message is TBD in future versions of Spack.
Getting Help
-----------------------

View File

@@ -34,14 +34,14 @@
class InvalidSysTypeError(serr.SpackError):
def __init__(self, sys_type):
super(InvalidSysTypeError, self).__init__(
"Invalid sys_type value for Spack: " + sys_type)
super(InvalidSysTypeError,
self).__init__("Invalid sys_type value for Spack: " + sys_type)
class NoSysTypeError(serr.SpackError):
def __init__(self):
super(NoSysTypeError, self).__init__(
"Could not determine sys_type for this machine.")
super(NoSysTypeError,
self).__init__("Could not determine sys_type for this machine.")
def get_sys_type_from_spack_globals():
@@ -69,15 +69,15 @@ def get_sys_type_from_platform():
@memoized
def sys_type():
"""Returns a SysType for the current machine."""
methods = [get_sys_type_from_spack_globals,
get_sys_type_from_environment,
methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
get_sys_type_from_platform]
# search for a method that doesn't return None
sys_type = None
for method in methods:
sys_type = method()
if sys_type: break
if sys_type:
break
# Couldn't determine the sys_type for this machine.
if sys_type is None:

View File

@@ -1,3 +1,4 @@
# flake8: noqa
##############################################################################
# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -401,13 +402,14 @@ def set_pp_defaults(validator, properties, instance, schema):
yield err
return validators.extend(validator_class, {
"properties" : set_defaults,
"patternProperties" : set_pp_defaults
"properties": set_defaults,
"patternProperties": set_pp_defaults
})
DefaultSettingValidator = extend_with_default(Draft4Validator)
def validate_section(data, schema):
"""Validate data read in from a Spack YAML file.
@@ -442,16 +444,14 @@ def get_section_filename(self, section):
validate_section_name(section)
return os.path.join(self.path, "%s.yaml" % section)
def get_section(self, section):
if not section in self.sections:
if section not in self.sections:
path = self.get_section_filename(section)
schema = section_schemas[section]
data = _read_config_file(path, schema)
self.sections[section] = data
return self.sections[section]
def write_section(self, section):
filename = self.get_section_filename(section)
data = self.get_section(section)
@@ -465,7 +465,6 @@ def write_section(self, section):
except (yaml.YAMLError, IOError) as e:
raise ConfigFileError("Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = {}
@@ -571,7 +570,7 @@ def they_are(t):
# Source dict is merged into dest.
elif they_are(dict):
for sk, sv in source.iteritems():
if not sk in dest:
if sk not in dest:
dest[sk] = copy.copy(sv)
else:
dest[sk] = _merge_yaml(dest[sk], source[sk])
@@ -640,7 +639,10 @@ def update_config(section, update_data, scope=None):
# read in the config to ensure we've got current data
configuration = get_config(section)
configuration.update(update_data)
if isinstance(update_data, list):
configuration = update_data
else:
configuration.update(update_data)
# read only the requested section's data.
scope.sections[section] = {section: configuration}
@@ -682,16 +684,20 @@ def spec_externals(spec):
def is_spec_buildable(spec):
"""Return true if the spec pkgspec is configured as buildable"""
allpkgs = get_config('packages')
name = spec.name
if not spec.name in allpkgs:
if spec.name not in allpkgs:
return True
if not 'buildable' in allpkgs[spec.name]:
if 'buildable' not in allpkgs[spec.name]:
return True
return allpkgs[spec.name]['buildable']
class ConfigError(SpackError): pass
class ConfigFileError(ConfigError): pass
class ConfigError(SpackError):
pass
class ConfigFileError(ConfigError):
pass
def get_path(path, data):
if path:
@@ -699,6 +705,7 @@ def get_path(path, data):
else:
return data
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data):
@@ -733,5 +740,6 @@ def __init__(self, validation_error, data):
message = '%s: %s' % (location, validation_error.message)
super(ConfigError, self).__init__(message)
class ConfigSanityError(ConfigFormatError):
"""Same as ConfigFormatError, raised when config is written by Spack."""

View File

@@ -72,6 +72,10 @@
}
}
# Some Sample repo data
repos_low = [ "/some/path" ]
repos_high = [ "/some/other/path" ]
class ConfigTest(MockPackagesTest):
def setUp(self):
@@ -95,6 +99,12 @@ def check_config(self, comps, arch, *compiler_names):
actual = config[arch][key][c]
self.assertEqual(expected, actual)
def test_write_list_in_memory(self):
spack.config.update_config('repos', repos_low, 'test_low_priority')
spack.config.update_config('repos', repos_high, 'test_high_priority')
config = spack.config.get_config('repos')
self.assertEqual(config, repos_high+repos_low)
def test_write_key_in_memory(self):
# Write b_comps "on top of" a_comps.
spack.config.update_config('compilers', a_comps, 'test_low_priority')