Save changes to external repo integration
This commit is contained in:
parent
92f398a897
commit
a2f2e6a4ff
@ -47,11 +47,17 @@
|
|||||||
share_path = join_path(prefix, "share", "spack")
|
share_path = join_path(prefix, "share", "spack")
|
||||||
|
|
||||||
#
|
#
|
||||||
# Set up the packages database.
|
# Setup the spack.repos namespace
|
||||||
|
#
|
||||||
|
from spack.repo_loader import RepoNamespace
|
||||||
|
repos = RepoNamespace()
|
||||||
|
|
||||||
|
#
|
||||||
|
# Set up the default packages database.
|
||||||
#
|
#
|
||||||
from spack.packages import PackageDB
|
from spack.packages import PackageDB
|
||||||
packages_path = join_path(var_path, "packages")
|
packages_path = join_path(var_path, "packages")
|
||||||
db = PackageDB(packages_path)
|
db = PackageDB()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Paths to mock files for testing.
|
# Paths to mock files for testing.
|
||||||
@ -62,12 +68,6 @@
|
|||||||
mock_site_config = join_path(mock_config_path, "site_spackconfig")
|
mock_site_config = join_path(mock_config_path, "site_spackconfig")
|
||||||
mock_user_config = join_path(mock_config_path, "user_spackconfig")
|
mock_user_config = join_path(mock_config_path, "user_spackconfig")
|
||||||
|
|
||||||
#
|
|
||||||
# Setup the spack.repos namespace
|
|
||||||
#
|
|
||||||
from spack.repo_loader import RepoNamespace
|
|
||||||
repos = RepoNamespace()
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# This controls how spack lays out install prefixes and
|
# This controls how spack lays out install prefixes and
|
||||||
# stage directories.
|
# stage directories.
|
||||||
|
@ -32,7 +32,7 @@
|
|||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.config
|
import spack.config
|
||||||
from spack.util.environment import get_path
|
from spack.util.environment import get_path
|
||||||
from spack.packages import packagerepo_filename
|
from spack.packages import repo_config
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import exceptions
|
import exceptions
|
||||||
@ -42,43 +42,48 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(
|
sp = subparser.add_subparsers(
|
||||||
metavar='SUBCOMMAND', dest='packagerepo_command')
|
metavar='SUBCOMMAND', dest='repo_command')
|
||||||
|
|
||||||
add_parser = sp.add_parser('add', help=packagerepo_add.__doc__)
|
add_parser = sp.add_parser('add', help=repo_add.__doc__)
|
||||||
add_parser.add_argument('directory', help="Directory containing the packages.")
|
add_parser.add_argument('directory', help="Directory containing the packages.")
|
||||||
|
|
||||||
create_parser = sp.add_parser('create', help=packagerepo_create.__doc__)
|
create_parser = sp.add_parser('create', help=repo_create.__doc__)
|
||||||
create_parser.add_argument('directory', help="Directory containing the packages.")
|
create_parser.add_argument('directory', help="Directory containing the packages.")
|
||||||
create_parser.add_argument('name', help="Name of new package repository.")
|
create_parser.add_argument('name', help="Name of new package repository.")
|
||||||
|
<<<<<<< HEAD:lib/spack/spack/cmd/packagerepo.py
|
||||||
|
|
||||||
remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__)
|
remove_parser = sp.add_parser('remove', help=packagerepo_remove.__doc__)
|
||||||
|
=======
|
||||||
|
|
||||||
|
remove_parser = sp.add_parser('remove', help=repo_remove.__doc__)
|
||||||
|
>>>>>>> Save changes to external repo integration:lib/spack/spack/cmd/repo.py
|
||||||
remove_parser.add_argument('name')
|
remove_parser.add_argument('name')
|
||||||
|
|
||||||
list_parser = sp.add_parser('list', help=packagerepo_list.__doc__)
|
list_parser = sp.add_parser('list', help=repo_list.__doc__)
|
||||||
|
|
||||||
|
|
||||||
def add_to_config(dir):
|
def add_to_config(dir):
|
||||||
config = spack.config.get_config()
|
config = spack.config.get_config()
|
||||||
user_config = spack.config.get_config('user')
|
user_config = spack.config.get_config('user')
|
||||||
orig = None
|
orig = None
|
||||||
if config.has_value('packagerepo', '', 'directories'):
|
if config.has_value('repo', '', 'directories'):
|
||||||
orig = config.get_value('packagerepo', '', 'directories')
|
orig = config.get_value('repo', '', 'directories')
|
||||||
if orig and dir in orig.split(':'):
|
if orig and dir in orig.split(':'):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
newsetting = orig + ':' + dir if orig else dir
|
newsetting = orig + ':' + dir if orig else dir
|
||||||
user_config.set_value('packagerepo', '', 'directories', newsetting)
|
user_config.set_value('repo', '', 'directories', newsetting)
|
||||||
user_config.write()
|
user_config.write()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def packagerepo_add(args):
|
def repo_add(args):
|
||||||
"""Add package sources to the Spack configuration."""
|
"""Add package sources to the Spack configuration."""
|
||||||
if not add_to_config(args.directory):
|
if not add_to_config(args.directory):
|
||||||
tty.die('Repo directory %s already exists in the repo list' % dir)
|
tty.die('Repo directory %s already exists in the repo list' % dir)
|
||||||
|
|
||||||
|
|
||||||
def packagerepo_create(args):
|
def repo_create(args):
|
||||||
"""Create a new package repo at a directory and name"""
|
"""Create a new package repo at a directory and name"""
|
||||||
dir = args.directory
|
dir = args.directory
|
||||||
name = args.name
|
name = args.name
|
||||||
@ -90,7 +95,7 @@ def packagerepo_create(args):
|
|||||||
mkdirp(dir)
|
mkdirp(dir)
|
||||||
except exceptions.OSError, e:
|
except exceptions.OSError, e:
|
||||||
tty.die('Failed to create new directory %s' % dir)
|
tty.die('Failed to create new directory %s' % dir)
|
||||||
path = os.path.join(dir, packagerepo_filename)
|
path = os.path.join(dir, repo_config)
|
||||||
try:
|
try:
|
||||||
with closing(open(path, 'w')) as repofile:
|
with closing(open(path, 'w')) as repofile:
|
||||||
repofile.write(name + '\n')
|
repofile.write(name + '\n')
|
||||||
@ -101,12 +106,12 @@ def packagerepo_create(args):
|
|||||||
tty.warn('Repo directory %s already exists in the repo list' % dir)
|
tty.warn('Repo directory %s already exists in the repo list' % dir)
|
||||||
|
|
||||||
|
|
||||||
def packagerepo_remove(args):
|
def repo_remove(args):
|
||||||
"""Remove a package source from the Spack configuration"""
|
"""Remove a package source from the Spack configuration"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def packagerepo_list(args):
|
def repo_list(args):
|
||||||
"""List package sources and their mnemoics"""
|
"""List package sources and their mnemoics"""
|
||||||
root_names = spack.db.repos
|
root_names = spack.db.repos
|
||||||
max_len = max(len(s[0]) for s in root_names)
|
max_len = max(len(s[0]) for s in root_names)
|
||||||
@ -116,9 +121,9 @@ def packagerepo_list(args):
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def packagerepo(parser, args):
|
def repo(parser, args):
|
||||||
action = { 'add' : packagerepo_add,
|
action = { 'add' : repo_add,
|
||||||
'create' : packagerepo_create,
|
'create' : repo_create,
|
||||||
'remove' : packagerepo_remove,
|
'remove' : repo_remove,
|
||||||
'list' : packagerepo_list }
|
'list' : repo_list }
|
||||||
action[args.packagerepo_command](args)
|
action[args.repo_command](args)
|
@ -45,11 +45,11 @@
|
|||||||
Configuration file format
|
Configuration file format
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
Configuration files are formatted using YAML syntax.
|
Configuration files are formatted using YAML syntax. This format is
|
||||||
This format is implemented by Python's
|
implemented by libyaml (included with Spack as an external module),
|
||||||
yaml class, and it's easy to read and versatile.
|
and it's easy to read and versatile.
|
||||||
|
|
||||||
The config files are structured as trees, like this ``compiler`` section::
|
Config files are structured as trees, like this ``compiler`` section::
|
||||||
|
|
||||||
compilers:
|
compilers:
|
||||||
chaos_5_x86_64_ib:
|
chaos_5_x86_64_ib:
|
||||||
@ -83,62 +83,73 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Some routines, like get_mirrors_config and get_compilers_config may strip
|
Some convenience functions, like get_mirrors_config and
|
||||||
off the top-levels of the tree and return subtrees.
|
``get_compilers_config`` may strip off the top-levels of the tree and
|
||||||
|
return subtrees.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import exceptions
|
|
||||||
import sys
|
import sys
|
||||||
|
import copy
|
||||||
from external.ordereddict import OrderedDict
|
|
||||||
from llnl.util.lang import memoized
|
|
||||||
import spack.error
|
|
||||||
|
|
||||||
from external import yaml
|
from external import yaml
|
||||||
from external.yaml.error import MarkedYAMLError
|
from external.yaml.error import MarkedYAMLError
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
|
import spack
|
||||||
|
|
||||||
|
|
||||||
_config_sections = {}
|
_config_sections = {}
|
||||||
class _ConfigCategory:
|
class _ConfigCategory:
|
||||||
name = None
|
name = None
|
||||||
filename = None
|
filename = None
|
||||||
merge = True
|
merge = True
|
||||||
def __init__(self, n, f, m):
|
def __init__(self, name, filename, merge, strip):
|
||||||
self.name = n
|
self.name = name
|
||||||
self.filename = f
|
self.filename = filename
|
||||||
self.merge = m
|
self.merge = merge
|
||||||
|
self.strip = strip
|
||||||
self.files_read_from = []
|
self.files_read_from = []
|
||||||
self.result_dict = {}
|
self.result_dict = {}
|
||||||
_config_sections[n] = self
|
_config_sections[name] = self
|
||||||
|
|
||||||
_ConfigCategory('compilers', 'compilers.yaml', True)
|
_ConfigCategory('config', 'config.yaml', True, False)
|
||||||
_ConfigCategory('mirrors', 'mirrors.yaml', True)
|
_ConfigCategory('compilers', 'compilers.yaml', True, True)
|
||||||
_ConfigCategory('view', 'views.yaml', True)
|
_ConfigCategory('mirrors', 'mirrors.yaml', True, True)
|
||||||
_ConfigCategory('order', 'orders.yaml', True)
|
_ConfigCategory('view', 'views.yaml', True, True)
|
||||||
|
_ConfigCategory('order', 'orders.yaml', True, True)
|
||||||
|
|
||||||
"""Names of scopes and their corresponding configuration files."""
|
"""Names of scopes and their corresponding configuration files."""
|
||||||
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
|
config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
|
||||||
('user', os.path.expanduser('~/.spack'))]
|
('user', os.path.expanduser('~/.spack'))]
|
||||||
|
|
||||||
_compiler_by_arch = {}
|
_compiler_by_arch = {}
|
||||||
_read_config_file_result = {}
|
|
||||||
|
@memoized
|
||||||
def _read_config_file(filename):
|
def _read_config_file(filename):
|
||||||
"""Read a given YAML configuration file"""
|
"""Read a YAML configuration file"""
|
||||||
global _read_config_file_result
|
|
||||||
if filename in _read_config_file_result:
|
# Ignore nonexisting files.
|
||||||
return _read_config_file_result[filename]
|
if not os.path.exists(filename):
|
||||||
|
return None
|
||||||
|
|
||||||
|
elif not os.path.isfile(filename):
|
||||||
|
tty.die("Invlaid configuration. %s exists but is not a file." % filename)
|
||||||
|
|
||||||
|
elif not os.access(filename, os.R_OK):
|
||||||
|
tty.die("Configuration file %s is not readable." % filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(filename) as f:
|
with open(filename) as f:
|
||||||
ydict = yaml.load(f)
|
return yaml.load(f)
|
||||||
|
|
||||||
except MarkedYAMLError, e:
|
except MarkedYAMLError, e:
|
||||||
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
|
||||||
except exceptions.IOError, e:
|
|
||||||
_read_config_file_result[filename] = None
|
except IOError, e:
|
||||||
return None
|
tty.die("Error reading configuration file %s: %s" % (filename, str(e)))
|
||||||
_read_config_file_result[filename] = ydict
|
|
||||||
return ydict
|
|
||||||
|
|
||||||
|
|
||||||
def clear_config_caches():
|
def clear_config_caches():
|
||||||
@ -147,41 +158,66 @@ def clear_config_caches():
|
|||||||
for key,s in _config_sections.iteritems():
|
for key,s in _config_sections.iteritems():
|
||||||
s.files_read_from = []
|
s.files_read_from = []
|
||||||
s.result_dict = {}
|
s.result_dict = {}
|
||||||
spack.config._read_config_file_result = {}
|
|
||||||
|
_read_config_file.clear()
|
||||||
spack.config._compiler_by_arch = {}
|
spack.config._compiler_by_arch = {}
|
||||||
spack.compilers._cached_default_compiler = None
|
spack.compilers._cached_default_compiler = None
|
||||||
|
|
||||||
|
|
||||||
def _merge_dicts(d1, d2):
|
def _merge_yaml(dest, source):
|
||||||
"""Recursively merges two configuration trees, with entries
|
"""Merges source into dest; entries in source take precedence over dest.
|
||||||
in d2 taking precedence over d1"""
|
|
||||||
if not d1:
|
|
||||||
return d2.copy()
|
|
||||||
if not d2:
|
|
||||||
return d1
|
|
||||||
|
|
||||||
for key2, val2 in d2.iteritems():
|
Config file authors can optionally end any attribute in a dict
|
||||||
if not key2 in d1:
|
with `::` instead of `:`, and the key will override that of the
|
||||||
d1[key2] = val2
|
parent instead of merging.
|
||||||
continue
|
"""
|
||||||
val1 = d1[key2]
|
def they_are(t):
|
||||||
if isinstance(val1, dict) and isinstance(val2, dict):
|
return isinstance(dest, t) and isinstance(source, t)
|
||||||
d1[key2] = _merge_dicts(val1, val2)
|
|
||||||
continue
|
# If both are None, handle specially and return None.
|
||||||
if isinstance(val1, list) and isinstance(val2, list):
|
if source is None and dest is None:
|
||||||
val1.extend(val2)
|
return None
|
||||||
seen = set()
|
|
||||||
d1[key2] = [ x for x in val1 if not (x in seen or seen.add(x)) ]
|
# If source is None, overwrite with source.
|
||||||
continue
|
elif source is None:
|
||||||
d1[key2] = val2
|
return None
|
||||||
return d1
|
|
||||||
|
# Source list is prepended (for precedence)
|
||||||
|
if they_are(list):
|
||||||
|
seen = set(source)
|
||||||
|
dest[:] = source + [x for x in dest if x not in seen]
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# Source dict is merged into dest. Extra ':' means overwrite.
|
||||||
|
elif they_are(dict):
|
||||||
|
for sk, sv in source.iteritems():
|
||||||
|
# allow total override with, e.g., repos::
|
||||||
|
override = sk.endswith(':')
|
||||||
|
if override:
|
||||||
|
sk = sk.rstrip(':')
|
||||||
|
|
||||||
|
if override or not sk in dest:
|
||||||
|
dest[sk] = copy.copy(sv)
|
||||||
|
else:
|
||||||
|
dest[sk] = _merge_yaml(dest[sk], source[sk])
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# In any other case, overwrite with a copy of the source value.
|
||||||
|
else:
|
||||||
|
return copy.copy(source)
|
||||||
|
|
||||||
|
|
||||||
def get_config(category_name):
|
def substitute_spack_prefix(path):
|
||||||
"""Get the confguration tree for the names category. Strips off the
|
"""Replaces instances of $spack with Spack's prefix."""
|
||||||
top-level category entry from the dict"""
|
return path.replace('$spack', spack.prefix)
|
||||||
global config_scopes
|
|
||||||
category = _config_sections[category_name]
|
|
||||||
|
def get_config(category='config'):
|
||||||
|
"""Get the confguration tree for a category.
|
||||||
|
|
||||||
|
Strips off the top-level category entry from the dict
|
||||||
|
"""
|
||||||
|
category = _config_sections[category]
|
||||||
if category.result_dict:
|
if category.result_dict:
|
||||||
return category.result_dict
|
return category.result_dict
|
||||||
|
|
||||||
@ -191,14 +227,18 @@ def get_config(category_name):
|
|||||||
result = _read_config_file(path)
|
result = _read_config_file(path)
|
||||||
if not result:
|
if not result:
|
||||||
continue
|
continue
|
||||||
if not category_name in result:
|
|
||||||
continue
|
if category.strip:
|
||||||
|
if not category.name in result:
|
||||||
|
continue
|
||||||
|
result = result[category.name]
|
||||||
|
|
||||||
category.files_read_from.insert(0, path)
|
category.files_read_from.insert(0, path)
|
||||||
result = result[category_name]
|
|
||||||
if category.merge:
|
if category.merge:
|
||||||
category.result_dict = _merge_dicts(category.result_dict, result)
|
category.result_dict = _merge_yaml(category.result_dict, result)
|
||||||
else:
|
else:
|
||||||
category.result_dict = result
|
category.result_dict = result
|
||||||
|
|
||||||
return category.result_dict
|
return category.result_dict
|
||||||
|
|
||||||
|
|
||||||
@ -215,7 +255,7 @@ def get_compilers_config(arch=None):
|
|||||||
cc_config = get_config('compilers')
|
cc_config = get_config('compilers')
|
||||||
if arch in cc_config and 'all' in cc_config:
|
if arch in cc_config and 'all' in cc_config:
|
||||||
arch_compiler = dict(cc_config[arch])
|
arch_compiler = dict(cc_config[arch])
|
||||||
_compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
|
_compiler_by_arch[arch] = _merge_yaml(arch_compiler, cc_config['all'])
|
||||||
elif arch in cc_config:
|
elif arch in cc_config:
|
||||||
_compiler_by_arch[arch] = cc_config[arch]
|
_compiler_by_arch[arch] = cc_config[arch]
|
||||||
elif 'all' in cc_config:
|
elif 'all' in cc_config:
|
||||||
@ -225,6 +265,13 @@ def get_compilers_config(arch=None):
|
|||||||
return _compiler_by_arch[arch]
|
return _compiler_by_arch[arch]
|
||||||
|
|
||||||
|
|
||||||
|
def get_repos_config():
|
||||||
|
config = get_config()
|
||||||
|
if 'repos' not in config:
|
||||||
|
return []
|
||||||
|
return config['repos']
|
||||||
|
|
||||||
|
|
||||||
def get_mirror_config():
|
def get_mirror_config():
|
||||||
"""Get the mirror configuration from config files"""
|
"""Get the mirror configuration from config files"""
|
||||||
return get_config('mirrors')
|
return get_config('mirrors')
|
||||||
@ -232,7 +279,6 @@ def get_mirror_config():
|
|||||||
|
|
||||||
def get_config_scope_dirname(scope):
|
def get_config_scope_dirname(scope):
|
||||||
"""For a scope return the config directory"""
|
"""For a scope return the config directory"""
|
||||||
global config_scopes
|
|
||||||
for s,p in config_scopes:
|
for s,p in config_scopes:
|
||||||
if s == scope:
|
if s == scope:
|
||||||
return p
|
return p
|
||||||
@ -251,16 +297,16 @@ def get_config_scope_filename(scope, category_name):
|
|||||||
def add_to_config(category_name, addition_dict, scope=None):
|
def add_to_config(category_name, addition_dict, scope=None):
|
||||||
"""Merge a new dict into a configuration tree and write the new
|
"""Merge a new dict into a configuration tree and write the new
|
||||||
configuration to disk"""
|
configuration to disk"""
|
||||||
global _read_config_file_result
|
|
||||||
get_config(category_name)
|
get_config(category_name)
|
||||||
category = _config_sections[category_name]
|
category = _config_sections[category_name]
|
||||||
|
|
||||||
#If scope is specified, use it. Otherwise use the last config scope that
|
# If scope is specified, use it. Otherwise use the last config scope that
|
||||||
#we successfully parsed data from.
|
# we successfully parsed data from.
|
||||||
file = None
|
file = None
|
||||||
path = None
|
path = None
|
||||||
if not scope and not category.files_read_from:
|
if not scope and not category.files_read_from:
|
||||||
scope = 'user'
|
scope = 'user'
|
||||||
|
|
||||||
if scope:
|
if scope:
|
||||||
try:
|
try:
|
||||||
dir = get_config_scope_dirname(scope)
|
dir = get_config_scope_dirname(scope)
|
||||||
@ -268,32 +314,37 @@ def add_to_config(category_name, addition_dict, scope=None):
|
|||||||
mkdirp(dir)
|
mkdirp(dir)
|
||||||
path = os.path.join(dir, category.filename)
|
path = os.path.join(dir, category.filename)
|
||||||
file = open(path, 'w')
|
file = open(path, 'w')
|
||||||
except exceptions.IOError, e:
|
except IOError, e:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
for p in category.files_read_from:
|
for p in category.files_read_from:
|
||||||
try:
|
try:
|
||||||
file = open(p, 'w')
|
file = open(p, 'w')
|
||||||
except exceptions.IOError, e:
|
except IOError, e:
|
||||||
pass
|
pass
|
||||||
if file:
|
if file:
|
||||||
path = p
|
path = p
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if not file:
|
if not file:
|
||||||
tty.die('Unable to write to config file %s' % path)
|
tty.die('Unable to write to config file %s' % path)
|
||||||
|
|
||||||
#Merge the new information into the existing file info, then write to disk
|
# Merge the new information into the existing file info, then write to disk
|
||||||
new_dict = _read_config_file_result[path]
|
new_dict = _read_config_file(path)
|
||||||
|
|
||||||
if new_dict and category_name in new_dict:
|
if new_dict and category_name in new_dict:
|
||||||
new_dict = new_dict[category_name]
|
new_dict = new_dict[category_name]
|
||||||
new_dict = _merge_dicts(new_dict, addition_dict)
|
|
||||||
|
new_dict = _merge_yaml(new_dict, addition_dict)
|
||||||
new_dict = { category_name : new_dict }
|
new_dict = { category_name : new_dict }
|
||||||
_read_config_file_result[path] = new_dict
|
|
||||||
|
# Install new dict as memoized value, and dump to disk
|
||||||
|
_read_config_file.cache[path] = new_dict
|
||||||
yaml.dump(new_dict, stream=file, default_flow_style=False)
|
yaml.dump(new_dict, stream=file, default_flow_style=False)
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
#Merge the new information into the cached results
|
# Merge the new information into the cached results
|
||||||
category.result_dict = _merge_dicts(category.result_dict, addition_dict)
|
category.result_dict = _merge_yaml(category.result_dict, addition_dict)
|
||||||
|
|
||||||
|
|
||||||
def add_to_mirror_config(addition_dict, scope=None):
|
def add_to_mirror_config(addition_dict, scope=None):
|
||||||
@ -311,7 +362,6 @@ def add_to_compiler_config(addition_dict, scope=None, arch=None):
|
|||||||
|
|
||||||
def remove_from_config(category_name, key_to_rm, scope=None):
|
def remove_from_config(category_name, key_to_rm, scope=None):
|
||||||
"""Remove a configuration key and write a new configuration to disk"""
|
"""Remove a configuration key and write a new configuration to disk"""
|
||||||
global config_scopes
|
|
||||||
get_config(category_name)
|
get_config(category_name)
|
||||||
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
|
scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
|
||||||
category = _config_sections[category_name]
|
category = _config_sections[category_name]
|
||||||
|
@ -30,7 +30,9 @@
|
|||||||
import imp
|
import imp
|
||||||
import spack.config
|
import spack.config
|
||||||
import re
|
import re
|
||||||
from contextlib import closing
|
import itertools
|
||||||
|
import traceback
|
||||||
|
from external import yaml
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import join_path
|
from llnl.util.filesystem import join_path
|
||||||
@ -44,7 +46,7 @@
|
|||||||
from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name
|
from spack.repo_loader import RepoLoader, imported_packages_module, package_file_name
|
||||||
|
|
||||||
# Filename for package repo names
|
# Filename for package repo names
|
||||||
packagerepo_filename = 'reponame'
|
repo_config = 'repo.yaml'
|
||||||
|
|
||||||
def _autospec(function):
|
def _autospec(function):
|
||||||
"""Decorator that automatically converts the argument of a single-arg
|
"""Decorator that automatically converts the argument of a single-arg
|
||||||
@ -56,56 +58,85 @@ def converter(self, spec_like, **kwargs):
|
|||||||
return converter
|
return converter
|
||||||
|
|
||||||
|
|
||||||
|
def sliding_window(seq, n):
|
||||||
|
it = iter(seq)
|
||||||
|
result = tuple(itertools.islice(it, n))
|
||||||
|
if len(result) == n:
|
||||||
|
yield result
|
||||||
|
for elem in it:
|
||||||
|
result = result[1:] + (elem,)
|
||||||
|
yield result
|
||||||
|
|
||||||
|
|
||||||
class PackageDB(object):
|
class PackageDB(object):
|
||||||
def __init__(self, default_root):
|
def __init__(self, *repo_dirs):
|
||||||
"""Construct a new package database from a root directory."""
|
"""Construct a new package database from a list of directories.
|
||||||
|
|
||||||
#Collect the repos from the config file and read their names from the file system
|
Args:
|
||||||
repo_dirs = self._repo_list_from_config()
|
repo_dirs List of directories containing packages.
|
||||||
repo_dirs.append(default_root)
|
|
||||||
self.repos = [(self._read_reponame_from_directory(dir), dir) for dir in repo_dirs]
|
|
||||||
|
|
||||||
# Check for duplicate repo names
|
If ``repo_dirs`` is empty, gets repository list from Spack configuration.
|
||||||
s = set()
|
"""
|
||||||
dups = set(r for r in self.repos if r[0] in s or s.add(r[0]))
|
if not repo_dirs:
|
||||||
if dups:
|
repo_dirs = spack.config.get_repos_config()
|
||||||
reponame = list(dups)[0][0]
|
if not repo_dirs:
|
||||||
dir1 = list(dups)[0][1]
|
tty.die("Spack configuration contains no package repositories.")
|
||||||
dir2 = dict(s)[reponame]
|
|
||||||
tty.die("Package repo %s in directory %s has the same name as the "
|
# Collect the repos from the config file and read their names
|
||||||
"repo in directory %s" %
|
# from the file system
|
||||||
(reponame, dir1, dir2))
|
repo_dirs = [spack.config.substitute_spack_prefix(rd) for rd in repo_dirs]
|
||||||
|
|
||||||
|
self.repos = []
|
||||||
|
for rdir in repo_dirs:
|
||||||
|
rname = self._read_reponame_from_directory(rdir)
|
||||||
|
if rname:
|
||||||
|
self.repos.append((self._read_reponame_from_directory(rdir), rdir))
|
||||||
|
|
||||||
|
|
||||||
|
by_path = sorted(self.repos, key=lambda r:r[1])
|
||||||
|
by_name = sorted(self.repos, key=lambda r:r[0])
|
||||||
|
|
||||||
|
for r1, r2 in by_path:
|
||||||
|
if r1[1] == r2[1]:
|
||||||
|
tty.die("Package repos are the same:",
|
||||||
|
" %20s %s" % r1, " %20s %s" % r2)
|
||||||
|
|
||||||
|
for r1, r2 in by_name:
|
||||||
|
if r1[0] == r2[0]:
|
||||||
|
tty.die("Package repos cannot have the same name:",
|
||||||
|
" %20s %s" % r1, " %20s %s" % r2)
|
||||||
|
|
||||||
# For each repo, create a RepoLoader
|
# For each repo, create a RepoLoader
|
||||||
self.repo_loaders = dict([(r[0], RepoLoader(r[0], r[1])) for r in self.repos])
|
self.repo_loaders = dict((name, RepoLoader(name, path))
|
||||||
|
for name, path in self.repos)
|
||||||
|
|
||||||
self.instances = {}
|
self.instances = {}
|
||||||
self.provider_index = None
|
self.provider_index = None
|
||||||
|
|
||||||
|
|
||||||
def _read_reponame_from_directory(self, dir):
|
def _read_reponame_from_directory(self, dir):
|
||||||
"""For a packagerepo directory, read the repo name from the dir/reponame file"""
|
"""For a packagerepo directory, read the repo name from the
|
||||||
path = os.path.join(dir, packagerepo_filename)
|
$root/repo.yaml file"""
|
||||||
|
path = os.path.join(dir, repo_config)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with closing(open(path, 'r')) as reponame_file:
|
with open(path) as reponame_file:
|
||||||
name = reponame_file.read().lstrip().rstrip()
|
yaml_data = yaml.load(reponame_file)
|
||||||
if not re.match(r'[a-zA-Z][a-zA-Z0-9]+', name):
|
|
||||||
tty.die("Package repo name '%s', read from %s, is an invalid name. "
|
if (not yaml_data or
|
||||||
"Repo names must began with a letter and only contain letters "
|
'repo' not in yaml_data or
|
||||||
"and numbers." % (name, path))
|
'namespace' not in yaml_data['repo']):
|
||||||
|
tty.die("Invalid %s in %s" % (repo_config, dir))
|
||||||
|
|
||||||
|
name = yaml_data['repo']['namespace']
|
||||||
|
if not re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', name):
|
||||||
|
tty.die(
|
||||||
|
"Package repo name '%s', read from %s, is an invalid name. "
|
||||||
|
"Repo names must began with a letter and only contain "
|
||||||
|
"letters and numbers." % (name, path))
|
||||||
return name
|
return name
|
||||||
except exceptions.IOError, e:
|
except exceptions.IOError, e:
|
||||||
tty.die("Could not read from package repo name file %s" % path)
|
tty.die("Error reading %s when opening %s" % (repo_config, dir))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _repo_list_from_config(self):
|
|
||||||
"""Read through the spackconfig and return the list of packagerepo directories"""
|
|
||||||
config = spack.config.get_config()
|
|
||||||
if not config.has_option('packagerepo', 'directories'): return []
|
|
||||||
dir_string = config.get('packagerepo', 'directories')
|
|
||||||
return dir_string.split(':')
|
|
||||||
|
|
||||||
|
|
||||||
@_autospec
|
@_autospec
|
||||||
@ -125,7 +156,7 @@ def get(self, spec, **kwargs):
|
|||||||
except Exception, e:
|
except Exception, e:
|
||||||
if spack.debug:
|
if spack.debug:
|
||||||
sys.excepthook(*sys.exc_info())
|
sys.excepthook(*sys.exc_info())
|
||||||
raise FailedConstructorError(spec.name, e)
|
raise FailedConstructorError(spec.name, *sys.exc_info())
|
||||||
|
|
||||||
return self.instances[spec]
|
return self.instances[spec]
|
||||||
|
|
||||||
@ -304,8 +335,10 @@ def __init__(self, name, repo=None):
|
|||||||
|
|
||||||
class FailedConstructorError(spack.error.SpackError):
|
class FailedConstructorError(spack.error.SpackError):
|
||||||
"""Raised when a package's class constructor fails."""
|
"""Raised when a package's class constructor fails."""
|
||||||
def __init__(self, name, reason):
|
def __init__(self, name, exc_type, exc_obj, exc_tb):
|
||||||
super(FailedConstructorError, self).__init__(
|
super(FailedConstructorError, self).__init__(
|
||||||
"Class constructor failed for package '%s'." % name,
|
"Class constructor failed for package '%s'." % name,
|
||||||
str(reason))
|
'\nCaused by:\n' +
|
||||||
|
('%s: %s\n' % (exc_type.__name__, exc_obj)) +
|
||||||
|
''.join(traceback.format_tb(exc_tb)))
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
import spack
|
|
||||||
import spack.repos
|
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import types
|
import types
|
||||||
|
import traceback
|
||||||
|
|
||||||
from llnl.util.lang import *
|
from llnl.util.lang import *
|
||||||
|
import spack
|
||||||
|
|
||||||
# Name of module under which packages are imported
|
# Name of module under which packages are imported
|
||||||
imported_packages_module = 'spack.repos'
|
imported_packages_module = 'spack.repos'
|
||||||
@ -13,14 +15,30 @@
|
|||||||
import sys
|
import sys
|
||||||
class LazyLoader:
|
class LazyLoader:
|
||||||
"""The LazyLoader handles cases when repo modules or classes
|
"""The LazyLoader handles cases when repo modules or classes
|
||||||
are imported. It watches for 'spack.repos.*' loads, then
|
are imported. It watches for 'spack.repos.*' loads, then
|
||||||
redirects the load to the appropriate module."""
|
redirects the load to the appropriate module."""
|
||||||
def find_module(self, fullname, pathname):
|
def find_module(self, fullname, pathname):
|
||||||
if not fullname.startswith(imported_packages_module):
|
if not fullname.startswith(imported_packages_module):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
print "HERE ==="
|
||||||
|
print
|
||||||
|
for line in traceback.format_stack():
|
||||||
|
print " ", line.strip()
|
||||||
|
print
|
||||||
|
print "full: ", fullname
|
||||||
|
print "path: ", pathname
|
||||||
|
print
|
||||||
|
|
||||||
partial_name = fullname[len(imported_packages_module)+1:]
|
partial_name = fullname[len(imported_packages_module)+1:]
|
||||||
repo = partial_name.split('.')[0]
|
|
||||||
module = partial_name.split('.')[1]
|
print "partial: ", partial_name
|
||||||
|
print
|
||||||
|
|
||||||
|
last_dot = partial_name.rfind('.')
|
||||||
|
repo = partial_name[:last_dot]
|
||||||
|
module = partial_name[last_dot+1:]
|
||||||
|
|
||||||
repo_loader = spack.db.repo_loaders.get(repo)
|
repo_loader = spack.db.repo_loaders.get(repo)
|
||||||
if repo_loader:
|
if repo_loader:
|
||||||
try:
|
try:
|
||||||
@ -43,7 +61,7 @@ class RepoNamespace(types.ModuleType):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
import sys
|
import sys
|
||||||
sys.modules[imported_packages_module] = self
|
sys.modules[imported_packages_module] = self
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
if name in _reponames:
|
if name in _reponames:
|
||||||
return _reponames[name]
|
return _reponames[name]
|
||||||
@ -62,7 +80,7 @@ class RepoLoader(types.ModuleType):
|
|||||||
"""Each RepoLoader is associated with a repository, and the RepoLoader is
|
"""Each RepoLoader is associated with a repository, and the RepoLoader is
|
||||||
responsible for loading packages out of that repository. For example,
|
responsible for loading packages out of that repository. For example,
|
||||||
a RepoLoader may be responsible for spack.repos.original, and when someone
|
a RepoLoader may be responsible for spack.repos.original, and when someone
|
||||||
references spack.repos.original.libelf that RepoLoader will load the
|
references spack.repos.original.libelf that RepoLoader will load the
|
||||||
libelf package."""
|
libelf package."""
|
||||||
def __init__(self, reponame, repopath):
|
def __init__(self, reponame, repopath):
|
||||||
self.path = repopath
|
self.path = repopath
|
||||||
@ -70,7 +88,6 @@ def __init__(self, reponame, repopath):
|
|||||||
self.module_name = imported_packages_module + '.' + reponame
|
self.module_name = imported_packages_module + '.' + reponame
|
||||||
if not reponame in _reponames:
|
if not reponame in _reponames:
|
||||||
_reponames[reponame] = self
|
_reponames[reponame] = self
|
||||||
spack.repos.add_repo(reponame, self)
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
sys.modules[self.module_name] = self
|
sys.modules[self.module_name] = self
|
||||||
@ -111,5 +128,3 @@ def get_module(self, pkg_name):
|
|||||||
pkg_name, file_path, e.message))
|
pkg_name, file_path, e.message))
|
||||||
|
|
||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,45 +30,85 @@
|
|||||||
from spack.packages import PackageDB
|
from spack.packages import PackageDB
|
||||||
from spack.test.mock_packages_test import *
|
from spack.test.mock_packages_test import *
|
||||||
|
|
||||||
|
# Some sample compiler config data
|
||||||
|
a_comps = {
|
||||||
|
"gcc@4.7.3" : {
|
||||||
|
"cc" : "/gcc473",
|
||||||
|
"cxx" : "/g++473",
|
||||||
|
"f77" : None,
|
||||||
|
"f90" : None },
|
||||||
|
"gcc@4.5.0" : {
|
||||||
|
"cc" : "/gcc450",
|
||||||
|
"cxx" : "/g++450",
|
||||||
|
"f77" : "/gfortran",
|
||||||
|
"f90" : "/gfortran" },
|
||||||
|
"clang@3.3" : {
|
||||||
|
"cc" : "<overwritten>",
|
||||||
|
"cxx" : "<overwritten>",
|
||||||
|
"f77" : "<overwritten>",
|
||||||
|
"f90" : "<overwritten>" }
|
||||||
|
}
|
||||||
|
|
||||||
|
b_comps = {
|
||||||
|
"icc@10.0" : {
|
||||||
|
"cc" : "/icc100",
|
||||||
|
"cxx" : "/icc100",
|
||||||
|
"f77" : None,
|
||||||
|
"f90" : None },
|
||||||
|
"icc@11.1" : {
|
||||||
|
"cc" : "/icc111",
|
||||||
|
"cxx" : "/icp111",
|
||||||
|
"f77" : "/ifort",
|
||||||
|
"f90" : "/ifort" },
|
||||||
|
"clang@3.3" : {
|
||||||
|
"cc" : "/clang",
|
||||||
|
"cxx" : "/clang++",
|
||||||
|
"f77" : None,
|
||||||
|
"f90" : None}
|
||||||
|
}
|
||||||
|
|
||||||
class ConfigTest(MockPackagesTest):
|
class ConfigTest(MockPackagesTest):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.initmock()
|
super(ConfigTest, self).setUp()
|
||||||
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
|
||||||
spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
|
spack.config.config_scopes = [
|
||||||
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
|
('test_low_priority', os.path.join(self.tmp_dir, 'low')),
|
||||||
|
('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.cleanmock()
|
super(ConfigTest, self).tearDown()
|
||||||
shutil.rmtree(self.tmp_dir, True)
|
shutil.rmtree(self.tmp_dir, True)
|
||||||
|
|
||||||
def check_config(self, comps):
|
|
||||||
|
def check_config(self, comps, *compiler_names):
|
||||||
|
"""Check that named compilers in comps match Spack's config."""
|
||||||
config = spack.config.get_compilers_config()
|
config = spack.config.get_compilers_config()
|
||||||
compiler_list = ['cc', 'cxx', 'f77', 'f90']
|
compiler_list = ['cc', 'cxx', 'f77', 'f90']
|
||||||
for key in comps:
|
for key in compiler_names:
|
||||||
for c in compiler_list:
|
for c in compiler_list:
|
||||||
if comps[key][c] == '/bad':
|
|
||||||
continue
|
|
||||||
self.assertEqual(comps[key][c], config[key][c])
|
self.assertEqual(comps[key][c], config[key][c])
|
||||||
|
|
||||||
|
|
||||||
def test_write_key(self):
|
def test_write_key_in_memory(self):
|
||||||
a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
|
# Write b_comps "on top of" a_comps.
|
||||||
"gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
|
|
||||||
"clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
|
|
||||||
|
|
||||||
b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
|
|
||||||
"icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
|
|
||||||
"clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
|
|
||||||
|
|
||||||
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
|
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
|
||||||
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
|
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
|
||||||
|
|
||||||
self.check_config(a_comps)
|
# Make sure the config looks how we expect.
|
||||||
self.check_config(b_comps)
|
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||||
|
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_key_to_disk(self):
|
||||||
|
# Write b_comps "on top of" a_comps.
|
||||||
|
spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
|
||||||
|
spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
|
||||||
|
|
||||||
|
# Clear caches so we're forced to read from disk.
|
||||||
spack.config.clear_config_caches()
|
spack.config.clear_config_caches()
|
||||||
|
|
||||||
self.check_config(a_comps)
|
# Same check again, to ensure consistency.
|
||||||
self.check_config(b_comps)
|
self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
|
||||||
|
self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
|
||||||
|
2
var/spack/mock_packages/repo.yaml
Normal file
2
var/spack/mock_packages/repo.yaml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
repo:
|
||||||
|
namespace: mock
|
2
var/spack/packages/repo.yaml
Normal file
2
var/spack/packages/repo.yaml
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
repo:
|
||||||
|
namespace: gov.llnl.spack
|
@ -1 +0,0 @@
|
|||||||
original
|
|
Loading…
Reference in New Issue
Block a user