Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f70d2c7ccb | ||
|
|
62d18d9af7 | ||
|
|
8824040eda | ||
|
|
d744b83584 | ||
|
|
14601b59ef | ||
|
|
8d7360106f | ||
|
|
881f184eab | ||
|
|
63bec85a24 | ||
|
|
159975a561 | ||
|
|
25ba7d071a | ||
|
|
54b79d5661 | ||
|
|
235f93c241 | ||
|
|
a73c5ffb0b | ||
|
|
ce53ce284b | ||
|
|
ae68318475 | ||
|
|
bd63c19b94 | ||
|
|
3f3bcacd16 | ||
|
|
d349d4ab0b | ||
|
|
e61b3c96f6 | ||
|
|
bcbb0a3b85 | ||
|
|
3c556ab318 | ||
|
|
563ae5188e | ||
|
|
f4b3561f71 | ||
|
|
0748a1b290 | ||
|
|
0bfece0c5e | ||
|
|
6e9b16279a | ||
|
|
cf71baff30 | ||
|
|
b9831acb44 | ||
|
|
5f2edb1860 | ||
|
|
8b6809cf66 | ||
|
|
a93a4274aa | ||
|
|
59bb42ee7f | ||
|
|
11d382bff1 | ||
|
|
57889ec446 | ||
|
|
66bda49c44 | ||
|
|
94a2ab5359 | ||
|
|
6d0044f703 | ||
|
|
3fa447a84a | ||
|
|
d501ce0c7e |
@@ -11,9 +11,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1011,64 +1009,3 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -418,10 +417,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _validate_source_is_trusted(conf):
|
||||
def _source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
raise ValueError('source is not trusted')
|
||||
return False
|
||||
return trusted[name]
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -488,25 +488,34 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
errors = {}
|
||||
|
||||
for current_config in source_configs:
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -530,14 +539,15 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -552,16 +562,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
||||
@@ -136,13 +136,13 @@ def solve(parser, args):
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
@@ -27,6 +27,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -48,10 +54,6 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
@@ -235,7 +235,7 @@ def is_ready(dag_hash):
|
||||
|
||||
# If this spec is only used as a build dependency, we can uninstall
|
||||
return all(
|
||||
dspec.deptypes == ("build",) or not dspec.parent.installed
|
||||
dspec.deptypes == ("build",)
|
||||
for dspec in record.spec.edges_from_dependents()
|
||||
)
|
||||
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: whether we should write stack traces or short error messages
|
||||
#: at what level we should write stack traces or short error messages
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = False
|
||||
debug = 0
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
|
||||
@@ -375,13 +375,6 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -405,9 +398,6 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -490,18 +480,11 @@ def setup_main_options(args):
|
||||
# errors raised by spack.config.
|
||||
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
spack.error.debug = args.debug
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
|
||||
@@ -55,14 +56,6 @@
|
||||
parse_files = None
|
||||
|
||||
|
||||
#: whether we should write ASP unsat cores quickly in debug mode when the cores
|
||||
#: may be very large or take the time (sometimes hours) to minimize them
|
||||
minimize_cores = True
|
||||
|
||||
#: whether we should include all facts in the unsat cores or only error messages
|
||||
full_cores = False
|
||||
|
||||
|
||||
# backward compatibility functions for clingo ASTs
|
||||
def ast_getter(*names):
|
||||
def getter(node):
|
||||
@@ -114,7 +107,7 @@ def getter(node):
|
||||
|
||||
|
||||
def build_criteria_names(costs, tuples):
|
||||
"""Construct an ordered mapping from criteria names to indices in the cost list."""
|
||||
"""Construct an ordered mapping from criteria names to costs."""
|
||||
# pull optimization criteria names out of the solution
|
||||
priorities_names = []
|
||||
|
||||
@@ -141,7 +134,10 @@ def build_criteria_names(costs, tuples):
|
||||
# sort the criteria by priority
|
||||
priorities_names = sorted(priorities_names, reverse=True)
|
||||
|
||||
assert len(priorities_names) == len(costs), "Wrong number of optimization criteria!"
|
||||
# We only have opt-criterion values for non-error types
|
||||
# error type criteria are excluded (they come first)
|
||||
error_criteria = len(costs) - len(priorities_names)
|
||||
costs = costs[error_criteria:]
|
||||
|
||||
# split list into three parts: build criteria, fixed criteria, non-build criteria
|
||||
num_criteria = len(priorities_names)
|
||||
@@ -154,12 +150,12 @@ def build_criteria_names(costs, tuples):
|
||||
# mapping from priority to index in cost list
|
||||
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
|
||||
|
||||
# make a list that has each name with its build and non-build priority
|
||||
# make a list that has each name with its build and non-build costs
|
||||
criteria = [
|
||||
(p - fixed_priority_offset + num_build, None, name) for p, name in fixed
|
||||
(costs[p - fixed_priority_offset + num_build], None, name) for p, name in fixed
|
||||
]
|
||||
for (i, name), (b, _) in zip(installed, build):
|
||||
criteria.append((indices[i], indices[b], name))
|
||||
criteria.append((costs[indices[i]], costs[indices[b]], name))
|
||||
|
||||
return criteria
|
||||
|
||||
@@ -331,9 +327,6 @@ def format_core(self, core):
|
||||
core_symbols = []
|
||||
for atom in core:
|
||||
sym = symbols[atom]
|
||||
if sym.name in ("rule", "error"):
|
||||
# these are special symbols we use to get messages in the core
|
||||
sym = sym.arguments[0].string
|
||||
core_symbols.append(sym)
|
||||
|
||||
return sorted(str(symbol) for symbol in core_symbols)
|
||||
@@ -392,7 +385,7 @@ def raise_if_unsat(self):
|
||||
"""
|
||||
Raise an appropriate error if the result is unsatisfiable.
|
||||
|
||||
The error is a UnsatisfiableSpecError, and includes the minimized cores
|
||||
The error is an InternalConcretizerError, and includes the minimized cores
|
||||
resulting from the solve, formatted to be human readable.
|
||||
"""
|
||||
if self.satisfiable:
|
||||
@@ -402,12 +395,8 @@ def raise_if_unsat(self):
|
||||
if len(constraints) == 1:
|
||||
constraints = constraints[0]
|
||||
|
||||
if minimize_cores:
|
||||
conflicts = self.format_minimal_cores()
|
||||
else:
|
||||
conflicts = self.format_cores()
|
||||
|
||||
raise UnsatisfiableSpecError(constraints, conflicts=conflicts)
|
||||
conflicts = self.format_minimal_cores()
|
||||
raise InternalConcretizerError(constraints, conflicts=conflicts)
|
||||
|
||||
@property
|
||||
def specs(self):
|
||||
@@ -507,13 +496,11 @@ def h2(self, name):
|
||||
def newline(self):
|
||||
self.out.write('\n')
|
||||
|
||||
def fact(self, head, assumption=False):
|
||||
def fact(self, head):
|
||||
"""ASP fact (a rule without a body).
|
||||
|
||||
Arguments:
|
||||
head (AspFunction): ASP function to generate as fact
|
||||
assumption (bool): If True and using cores, use this fact as a
|
||||
choice point in ASP and include it in unsatisfiable cores
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, 'symbol') else head
|
||||
|
||||
@@ -521,10 +508,9 @@ def fact(self, head, assumption=False):
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
# with `--show-cores=full or --show-cores=minimized, make all facts
|
||||
# choices/assumptions, otherwise only if assumption=True
|
||||
choice = self.cores and (full_cores or assumption)
|
||||
|
||||
# Only functions relevant for constructing bug reports for bad error messages
|
||||
# are assumptions, and only when using cores.
|
||||
choice = self.cores and symbol.name == 'internal_error'
|
||||
self.backend.add_rule([atom], [], choice=choice)
|
||||
if choice:
|
||||
self.assumptions.append(atom)
|
||||
@@ -582,9 +568,10 @@ def visit(node):
|
||||
for term in node.body:
|
||||
if ast_type(term) == ASTType.Literal:
|
||||
if ast_type(term.atom) == ASTType.SymbolicAtom:
|
||||
if ast_sym(term.atom).name == "error":
|
||||
name = ast_sym(term.atom).name
|
||||
if name == 'internal_error':
|
||||
arg = ast_sym(ast_sym(term.atom).arguments[0])
|
||||
self.fact(fn.error(arg.string), assumption=True)
|
||||
self.fact(AspFunction(name)(arg.string))
|
||||
|
||||
path = os.path.join(parent_dir, 'concretize.lp')
|
||||
parse_files([path], visit)
|
||||
@@ -737,7 +724,7 @@ def spec_versions(self, spec):
|
||||
|
||||
# record all version constraints for later
|
||||
self.version_constraints.add((spec.name, spec.versions))
|
||||
return [fn.version_satisfies(spec.name, spec.versions)]
|
||||
return [fn.node_version_satisfies(spec.name, spec.versions)]
|
||||
|
||||
def target_ranges(self, spec, single_target_fn):
|
||||
target = spec.architecture.target
|
||||
@@ -750,13 +737,24 @@ def target_ranges(self, spec, single_target_fn):
|
||||
return [fn.node_target_satisfies(spec.name, target)]
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
default_msg = "{0} '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0} conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name)
|
||||
self.gen.fact(fn.conflict_trigger(trigger_id))
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(
|
||||
spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
|
||||
for constraint, _ in constraints:
|
||||
constraint_id = self.condition(constraint, name=pkg.name)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id))
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
if constraint == spack.spec.Spec():
|
||||
conflict_msg = no_constraint_msg.format(pkg.name, trigger)
|
||||
else:
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(
|
||||
constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(
|
||||
fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
@@ -840,9 +838,18 @@ def pkg_rules(self, pkg, tests):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
for w in when:
|
||||
cond_id = self.condition(w, name=pkg.name)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
msg = "%s has variant %s" % (pkg.name, name)
|
||||
if str(w):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
@@ -885,7 +892,9 @@ def pkg_rules(self, pkg, tests):
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
self.condition(
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (
|
||||
pkg.name, name, value, when)
|
||||
)
|
||||
|
||||
if variant.sticky:
|
||||
@@ -913,7 +922,7 @@ def pkg_rules(self, pkg, tests):
|
||||
)
|
||||
)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
@@ -922,7 +931,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
are imposed when this condition is triggered
|
||||
name (str or None): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
|
||||
msg (str or None): description of the condition
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -931,7 +940,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id))
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
|
||||
# requirements trigger the condition
|
||||
requirements = self.spec_clauses(
|
||||
@@ -963,7 +972,8 @@ def package_provider_rules(self, pkg):
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
for when in whens:
|
||||
condition_id = self.condition(when, provided, pkg.name)
|
||||
msg = '%s provides %s when %s' % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(
|
||||
condition_id, when.name, provided.name
|
||||
))
|
||||
@@ -987,7 +997,11 @@ def package_dependencies_rules(self, pkg):
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name)
|
||||
msg = '%s depends on %s' % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += ' when %s' % cond
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(
|
||||
condition_id, pkg.name, dep.spec.name
|
||||
))
|
||||
@@ -1067,7 +1081,8 @@ def external_packages(self):
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
condition_id = self.condition(spec)
|
||||
msg = '%s available as external when satisfying %s' % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(
|
||||
fn.possible_external(condition_id, pkg_name, local_idx)
|
||||
)
|
||||
@@ -1920,6 +1935,17 @@ def node_os(self, pkg, os):
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
|
||||
def error(self, priority, msg, *args):
|
||||
msg = msg.format(*args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
specs_to_construct = re.findall(r'Spec\(([^)]*)\)', msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace('Spec(%s)' % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == 'dev_path':
|
||||
@@ -2042,15 +2068,27 @@ def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
tty.warn(msg.format(pkg, version))
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple):
|
||||
name = function_tuple[0]
|
||||
if name == 'error':
|
||||
priority = function_tuple[1][0]
|
||||
return (-4, priority)
|
||||
elif name == 'hash':
|
||||
return (-3, 0)
|
||||
elif name == 'node':
|
||||
return (-2, 0)
|
||||
elif name == 'node_compiler':
|
||||
return (-1, 0)
|
||||
else:
|
||||
return (0, 0)
|
||||
|
||||
def build_specs(self, function_tuples):
|
||||
# Functions don't seem to be in particular order in output. Sort
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
function_tuples.sort(key=lambda f: {
|
||||
"hash": -3,
|
||||
"node": -2,
|
||||
"node_compiler": -1,
|
||||
}.get(f[0], 0))
|
||||
self.function_tuples = function_tuples
|
||||
self.function_tuples.sort(key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in function_tuples:
|
||||
@@ -2058,7 +2096,6 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
action = getattr(self, name, None)
|
||||
|
||||
# print out unknown actions so we can display them for debugging
|
||||
if not action:
|
||||
msg = "%s(%s)" % (name, ", ".join(str(a) for a in args))
|
||||
@@ -2068,16 +2105,18 @@ def build_specs(self, function_tuples):
|
||||
assert action and callable(action)
|
||||
|
||||
# ignore predicates on virtual packages, as they're used for
|
||||
# solving but don't construct anything
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != 'error':
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -2201,25 +2240,27 @@ def solve(
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
self.provided = None
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, provided, conflicts):
|
||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||
conflict_msg = ''.join(indented)
|
||||
issue = 'conflicts' if full_cores else 'errors'
|
||||
msg = '%s is unsatisfiable, %s are:\n%s' % (provided, issue, conflict_msg)
|
||||
|
||||
newline_indent = '\n '
|
||||
if not full_cores:
|
||||
msg += newline_indent + 'To see full clingo unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=full`'
|
||||
if not minimize_cores or not full_cores:
|
||||
# not solver.minimalize_cores and not solver.full_cores impossible
|
||||
msg += newline_indent + 'For full, subset-minimal unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=minimized'
|
||||
msg += newline_indent
|
||||
msg += 'Warning: This may take (up to) hours for some specs'
|
||||
error_msg = ''.join(indented)
|
||||
msg = 'Spack concretizer internal error. Please submit a bug report'
|
||||
msg += '\n Please include the command, environment if applicable,'
|
||||
msg += '\n and the following error message.'
|
||||
msg = '\n %s is unsatisfiable, errors are:\n%s' % (provided, error_msg)
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -7,22 +7,6 @@
|
||||
% This logic program implements Spack's concretizer
|
||||
%=============================================================================
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Generic constraints on nodes
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% each node must have a single version
|
||||
:- not 1 { version(Package, _) } 1, node(Package).
|
||||
|
||||
% each node must have a single platform, os and target
|
||||
:- not 1 { node_platform(Package, _) } 1, node(Package), error("A node must have exactly one platform").
|
||||
:- not 1 { node_os(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_target(Package, _) } 1, node(Package).
|
||||
|
||||
% each node has a single compiler associated with it
|
||||
:- not 1 { node_compiler(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_compiler_version(Package, _, _) } 1, node(Package).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -35,7 +19,7 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
|
||||
:- version_declared(Package, Version, Weight, Origin1),
|
||||
version_declared(Package, Version, Weight, Origin2),
|
||||
Origin1 < Origin2,
|
||||
error("Internal error: two versions with identical weights").
|
||||
internal_error("Two versions with identical weights").
|
||||
|
||||
% We cannot use a version declared for an installed package if we end up building it
|
||||
:- version_declared(Package, Version, Weight, "installed"),
|
||||
@@ -48,11 +32,27 @@ version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
|
||||
% If something is a package, it has only one version and that must be a
|
||||
% declared version.
|
||||
1 { version(Package, Version) : version_declared(Package, Version) } 1
|
||||
:- node(Package), error("Each node must have exactly one version").
|
||||
% We allow clingo to choose any version(s), and infer an error if there
|
||||
% is not precisely one version chosen. Error facts are heavily optimized
|
||||
% against to ensure they cannot be inferred when a non-error solution is
|
||||
% possible
|
||||
{ version(Package, Version) : version_declared(Package, Version) }
|
||||
:- node(Package).
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- node(Package),
|
||||
version(Package, Version1),
|
||||
version(Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% A virtual package may have or not a version, but never has more than one
|
||||
:- virtual_node(Package), 2 { version(Package, _) }.
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- node(Package), not version(Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Virtual, Version1, Version2)
|
||||
:- virtual_node(Virtual),
|
||||
version(Virtual, Version1),
|
||||
version(Virtual, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% If we select a deprecated version, mark the package as deprecated
|
||||
deprecated(Package, Version) :- version(Package, Version), deprecated_version(Package, Version).
|
||||
@@ -61,14 +61,27 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_declared(Package, Version, Weight).
|
||||
|
||||
1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package), error("Internal error: Package version must have a unique weight").
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
node(Package),
|
||||
Weight = #min{W : version_declared(Package, Version, W)}.
|
||||
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1
|
||||
:- version_satisfies(Package, Constraint),
|
||||
error("no version satisfies the given constraints").
|
||||
version_satisfies(Package, Constraint)
|
||||
% While this choice rule appears redundant with the initial choice rule for
|
||||
% versions, virtual nodes with version constraints require this rule to be
|
||||
% able to choose versions
|
||||
{ version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
:- node_version_satisfies(Package, Constraint),
|
||||
C = #count{ Version : version(Package, Version), version_satisfies(Package, Constraint, Version)},
|
||||
C < 1.
|
||||
|
||||
node_version_satisfies(Package, Constraint)
|
||||
:- version(Package, Version), version_satisfies(Package, Constraint, Version).
|
||||
|
||||
#defined version_satisfies/3.
|
||||
@@ -87,7 +100,7 @@ version_satisfies(Package, Constraint)
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
condition(ID, _);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
@@ -106,7 +119,7 @@ attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
variant_value(Package, Variant, Value),
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value).
|
||||
|
||||
#defined condition/1.
|
||||
#defined condition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
@@ -133,9 +146,7 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
build(Package),
|
||||
not external(Package).
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
@@ -161,17 +172,18 @@ node(Dependency) :- node(Package), depends_on(Package, Dependency).
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
needed(Package) :- root(Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
:- node(Package), not needed(Package),
|
||||
error("All dependencies must be reachable from root").
|
||||
error(1, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
:- node(Package),
|
||||
not needed(Package).
|
||||
|
||||
% Avoid cycles in the DAG
|
||||
% some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A), error("Cyclic dependencies are not allowed").
|
||||
|
||||
#defined error/1.
|
||||
error(2, "Cyclic dependency detected between '{0}' and '{1}'\n Consider changing variants to avoid the cycle", A, B)
|
||||
:- path(A, B),
|
||||
path(B, A).
|
||||
|
||||
#defined dependency_type/2.
|
||||
#defined dependency_condition/3.
|
||||
@@ -179,14 +191,13 @@ path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
error(0, Msg) :- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID, Msg),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID),
|
||||
not external(Package), % ignore conflicts for externals
|
||||
error("A conflict was triggered").
|
||||
not external(Package). % ignore conflicts for externals
|
||||
|
||||
#defined conflict/3.
|
||||
#defined conflict/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
@@ -206,8 +217,17 @@ virtual_node(Virtual)
|
||||
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
1 { provider(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual), error("Virtual packages must be satisfied by a unique provider").
|
||||
{ provider(Package, Virtual) : possible_provider(Package, Virtual) }
|
||||
:- virtual_node(Virtual).
|
||||
error(2, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- virtual_node(Virtual),
|
||||
P = #count{ Package : provider(Package, Virtual)},
|
||||
P < 1.
|
||||
error(2, "Spec cannot include multiple providers for virtual '{0}'\n Requested '{1}' and '{2}'", Virtual, P1, P2)
|
||||
:- virtual_node(Virtual),
|
||||
provider(P1, Virtual),
|
||||
provider(P2, Virtual),
|
||||
P1 < P2.
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
@@ -232,7 +252,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
||||
error("Internal error: virtual when provides not respected").
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
@@ -245,7 +265,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% we select the weight, among the possible ones, that minimizes the overall objective function.
|
||||
1 { provider_weight(Dependency, Virtual, Weight, Reason) :
|
||||
possible_provider_weight(Dependency, Virtual, Weight, Reason) } 1
|
||||
:- provider(Dependency, Virtual), error("Internal error: package provider weights must be unique").
|
||||
:- provider(Dependency, Virtual), internal_error("Package provider weights must be unique").
|
||||
|
||||
% Get rid or the reason for enabling the possible weight (useful for debugging)
|
||||
provider_weight(Dependency, Virtual, Weight) :- provider_weight(Dependency, Virtual, Weight, _).
|
||||
@@ -291,7 +311,7 @@ node(Package) :- attr("node", Package).
|
||||
virtual_node(Virtual) :- attr("virtual_node", Virtual).
|
||||
hash(Package, Hash) :- attr("hash", Package, Hash).
|
||||
version(Package, Version) :- attr("version", Package, Version).
|
||||
version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint).
|
||||
node_version_satisfies(Package, Constraint) :- attr("node_version_satisfies", Package, Constraint).
|
||||
node_platform(Package, Platform) :- attr("node_platform", Package, Platform).
|
||||
node_os(Package, OS) :- attr("node_os", Package, OS).
|
||||
node_target(Package, Target) :- attr("node_target", Package, Target).
|
||||
@@ -310,7 +330,7 @@ attr("node", Package) :- node(Package).
|
||||
attr("virtual_node", Virtual) :- virtual_node(Virtual).
|
||||
attr("hash", Package, Hash) :- hash(Package, Hash).
|
||||
attr("version", Package, Version) :- version(Package, Version).
|
||||
attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint).
|
||||
attr("node_version_satisfies", Package, Constraint) :- node_version_satisfies(Package, Constraint).
|
||||
attr("node_platform", Package, Platform) :- node_platform(Package, Platform).
|
||||
attr("node_os", Package, OS) :- node_os(Package, OS).
|
||||
attr("node_target", Package, Target) :- node_target(Package, Target).
|
||||
@@ -338,7 +358,7 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
#defined external_only/1.
|
||||
#defined pkg_provider_preference/4.
|
||||
#defined default_provider_preference/3.
|
||||
#defined version_satisfies/2.
|
||||
#defined node_version_satisfies/2.
|
||||
#defined node_compiler_version_satisfies/3.
|
||||
#defined root/1.
|
||||
|
||||
@@ -347,9 +367,17 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if a package is external its version must be one of the external versions
|
||||
1 { external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") } 1
|
||||
:- external(Package), error("External package version does not satisfy external spec").
|
||||
{ external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") }
|
||||
:- external(Package).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_version(Package, _, _).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
external_version(Package, Version1, Weight1),
|
||||
external_version(Package, Version2, Weight2),
|
||||
(Version1, Weight1) < (Version2, Weight2). % see[1]
|
||||
|
||||
version_weight(Package, Weight) :- external_version(Package, Version, Weight).
|
||||
version(Package, Version) :- external_version(Package, Version, Weight).
|
||||
@@ -369,7 +397,7 @@ external(Package) :- external_spec_selected(Package, _).
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package),
|
||||
error("Internal error: external weight used for internal spec").
|
||||
internal_error("External weight used for internal spec").
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
@@ -381,8 +409,9 @@ external_conditions_hold(Package, LocalIndex) :-
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_conditions_hold(Package, _),
|
||||
error("External package does not satisfy external spec").
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
@@ -399,16 +428,16 @@ variant(Package, Variant) :- variant_condition(ID, Package, Variant),
|
||||
condition_holds(ID).
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot be set").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot take on a variant value").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% if a variant is sticky and not set its value is the default value
|
||||
variant_value(Package, Variant, Value) :-
|
||||
@@ -418,27 +447,30 @@ variant_value(Package, Variant, Value) :-
|
||||
variant_default_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% one variant value for single-valued variants.
|
||||
1 {
|
||||
% at most one variant value for single-valued variants.
|
||||
{
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
} 1
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Single valued variants must have a single value").
|
||||
|
||||
% at least one variant value for multi-valued variants.
|
||||
1 {
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
}
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
not variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Internal error: All variants must have a value").
|
||||
build(Package).
|
||||
|
||||
|
||||
error(2, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
Value1 < Value2. % see[1]
|
||||
error(2, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
build(Package),
|
||||
C = #count{ Value : variant_value(Package, Variant, Value) },
|
||||
C < 1.
|
||||
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
@@ -446,21 +478,21 @@ variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
% This only applies to packages we need to build -- concrete packages may
|
||||
% have been built w/different variants from older/different package versions.
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package),
|
||||
error("Variant set to invalid value").
|
||||
error(1, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2,
|
||||
build(Package),
|
||||
error("Variant values selected from multiple disjoint sets").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come from disjoing value sets", Package, Variant, Value1, Value2)
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2, % see[1]
|
||||
build(Package).
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
@@ -518,12 +550,11 @@ variant_default_value(Package, Variant, Value) :- variant_default_value_from_cli
|
||||
|
||||
% Treat 'none' in a special way - it cannot be combined with other
|
||||
% values even if the variant is multi-valued
|
||||
:- 2 {
|
||||
variant_value(Package, Variant, Value) : variant_possible_value(Package, Variant, Value)
|
||||
},
|
||||
variant_value(Package, Variant, "none"),
|
||||
build(Package),
|
||||
error("Variant value 'none' cannot be combined with any other value").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_value(Package, Variant, "none"),
|
||||
Value != "none",
|
||||
build(Package).
|
||||
|
||||
% patches and dev_path are special variants -- they don't have to be
|
||||
% declared in the package, so we just allow them to spring into existence
|
||||
@@ -567,6 +598,18 @@ node_platform(Package, Platform)
|
||||
% platform is set if set to anything
|
||||
node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
|
||||
% each node must have a single platform
|
||||
error(2, "No valid platform found for {0}", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Platform : node_platform(Package, Platform)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple platforms\n Requested 'platform={1}' and 'platform={2}'", Package, Platform1, Platform2)
|
||||
:- node(Package),
|
||||
node_platform(Package, Platform1),
|
||||
node_platform(Package, Platform2),
|
||||
Platform1 < Platform2. % see[1]
|
||||
|
||||
#defined node_platform_set/2. % avoid warnings
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -576,20 +619,32 @@ node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
os(OS) :- os(OS, _).
|
||||
|
||||
% one os per node
|
||||
1 { node_os(Package, OS) : os(OS) } 1 :-
|
||||
node(Package), error("Each node must have exactly one OS").
|
||||
{ node_os(Package, OS) : os(OS) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid operating system for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ OS : node_os(Package, OS)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple operating systems\n Requested 'os={1}' and 'os={2}'", Package, OS1, OS2)
|
||||
:- node(Package),
|
||||
node_os(Package, OS1),
|
||||
node_os(Package, OS2),
|
||||
OS1 < OS2. %see [1]
|
||||
|
||||
% can't have a non-buildable OS on a node we need to build
|
||||
:- build(Package), node_os(Package, OS), not buildable_os(OS),
|
||||
error("No available OS can be built for").
|
||||
error(2, "Cannot concretize '{0} os={1}'. Operating system '{1}' is not buildable", Package, OS)
|
||||
:- build(Package),
|
||||
node_os(Package, OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package),
|
||||
error("Dependencies must have compatible OS's with their dependents").
|
||||
error(2, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageOS, DependencyOS)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package).
|
||||
|
||||
% give OS choice weights according to os declarations
|
||||
node_os_weight(Package, Weight)
|
||||
@@ -621,14 +676,24 @@ node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Each node has only one target chosen among the known targets
|
||||
1 { node_target(Package, Target) : target(Target) } 1 :- node(Package), error("Each node must have exactly one target").
|
||||
{ node_target(Package, Target) : target(Target) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid target for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{Target : node_target(Package, Target)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize '{0}' with multiple targets\n Requested 'target={1}' and 'target={2}'", Package, Target1, Target2)
|
||||
:- node(Package),
|
||||
node_target(Package, Target1),
|
||||
node_target(Package, Target2),
|
||||
Target1 < Target2. % see[1]
|
||||
|
||||
% If a node must satisfy a target constraint, enforce it
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target),
|
||||
error("Node targets must satisfy node target constraints").
|
||||
|
||||
error(1, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target and the target satisfies a constraint, then the target
|
||||
% associated with the node satisfies the same constraint
|
||||
@@ -636,10 +701,10 @@ node_target_satisfies(Package, Constraint)
|
||||
:- node_target(Package, Target), target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target, all of its dependencies must be compatible with that target
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target),
|
||||
error("Dependency node targets must be compatible with dependent targets").
|
||||
error(2, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target).
|
||||
|
||||
% Intermediate step for performance reasons
|
||||
% When the integrity constraint above was formulated including this logic
|
||||
@@ -680,12 +745,12 @@ target_weight(Target, Package, Weight)
|
||||
:- package_target_weight(Target, Package, Weight).
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying target").
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
node_target(Package, Target)
|
||||
@@ -712,8 +777,10 @@ node_target_mismatch(Parent, Dependency)
|
||||
not node_target_match(Parent, Dependency).
|
||||
|
||||
% disallow reusing concrete specs that don't have a compatible target
|
||||
:- node(Package), node_target(Package, Target), not target(Target),
|
||||
error("No satisfying package's target is compatible with this machine").
|
||||
error(2, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
:- node(Package),
|
||||
node_target(Package, Target),
|
||||
not target(Target).
|
||||
|
||||
#defined node_target_set/2.
|
||||
#defined package_target_weight/3.
|
||||
@@ -725,10 +792,19 @@ compiler(Compiler) :- compiler_version(Compiler, _).
|
||||
|
||||
% There must be only one compiler set per built node. The compiler
|
||||
% is chosen among available versions.
|
||||
1 { node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } 1 :-
|
||||
{ node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } :-
|
||||
node(Package),
|
||||
build(Package),
|
||||
error("Each node must have exactly one compiler").
|
||||
build(Package).
|
||||
|
||||
error(2, "No valid compiler version found for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Version : node_compiler_version(Package, _, Version)},
|
||||
C < 1.
|
||||
error(2, "'{0}' compiler constraints '%{1}@{2}' and '%{3}@{4}' are incompatible", Package, Compiler1, Version1, Compiler2, Version2)
|
||||
:- node(Package),
|
||||
node_compiler_version(Package, Compiler1, Version1),
|
||||
node_compiler_version(Package, Compiler2, Version2),
|
||||
(Compiler1, Version1) < (Compiler2, Version2). % see[1]
|
||||
|
||||
% Sometimes we just need to know the compiler and not the version
|
||||
node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
@@ -737,14 +813,22 @@ node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, _),
|
||||
Compiler1 != Compiler2,
|
||||
error("Internal error: mismatch between selected compiler and compiler version").
|
||||
internal_error("Mismatch between selected compiler and compiler version").
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, ":"),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, ":", Version) },
|
||||
C < 1.
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
1 { node_compiler_version(Package, Compiler, Version)
|
||||
: compiler_version_satisfies(Compiler, Constraint, Version) } 1 :-
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
error("Internal error: node compiler version mismatch").
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, Constraint, Version) },
|
||||
C < 1.
|
||||
|
||||
% If the node is associated with a compiler and the compiler satisfy a constraint, then
|
||||
% the compiler associated with the node satisfy the same constraint
|
||||
@@ -762,11 +846,12 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
:- node_compiler_version(Package, Compiler, Version), node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying os").
|
||||
error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- node_compiler_version(Package, Compiler, Version),
|
||||
node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
@@ -859,7 +944,7 @@ no_flags(Package, FlagType)
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ hash(Package, Hash) : installed_hash(Package, Hash) } 1
|
||||
:- node(Package), error("Internal error: package must resolve to at most one hash").
|
||||
:- node(Package), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- hash(Package, Hash), variant_value(Package, "dev_path", _).
|
||||
@@ -909,6 +994,23 @@ build_priority(Package, 0) :- node(Package), not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
% Some errors are handled as rules instead of constraints because
|
||||
% it allows us to explain why something failed. Here we optimize
|
||||
% HEAVILY against the facts generated by those rules.
|
||||
#minimize{ 0@1000: #true}.
|
||||
#minimize{ 0@1001: #true}.
|
||||
#minimize{ 0@1002: #true}.
|
||||
|
||||
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% How to optimize the spec (high to low priority)
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1088,3 +1190,11 @@ opt_criterion(1, "non-preferred targets").
|
||||
#heuristic variant_value(Package, Variant, Value) : variant_default_value(Package, Variant, Value), node(Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node(Package) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
%-----------
|
||||
|
||||
% [1] Clingo ensures a total ordering among all atoms. We rely on that total ordering
|
||||
% to reduce symmetry in the solution by checking `<` instead of `!=` in symmetric
|
||||
% cases. These choices are made without loss of generality.
|
||||
|
||||
@@ -34,3 +34,13 @@
|
||||
|
||||
% deprecated packages
|
||||
#show deprecated/2.
|
||||
|
||||
% error types
|
||||
#show error/2.
|
||||
#show error/3.
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
|
||||
% debug
|
||||
|
||||
@@ -62,22 +62,6 @@ def test_raising_exception_if_bootstrap_disabled(mutable_config):
|
||||
spack.bootstrap.store_path()
|
||||
|
||||
|
||||
def test_raising_exception_module_importable():
|
||||
with pytest.raises(
|
||||
ImportError,
|
||||
match='cannot bootstrap the "asdf" Python module',
|
||||
):
|
||||
spack.bootstrap.ensure_module_importable_or_raise("asdf")
|
||||
|
||||
|
||||
def test_raising_exception_executables_in_path():
|
||||
with pytest.raises(
|
||||
RuntimeError,
|
||||
match="cannot bootstrap any of the asdf, fdsa executables",
|
||||
):
|
||||
spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
|
||||
|
||||
|
||||
@pytest.mark.regression('25603')
|
||||
def test_bootstrap_deactivates_environments(active_mock_environment):
|
||||
assert spack.environment.active_environment() == active_mock_environment
|
||||
|
||||
@@ -532,7 +532,7 @@ def test_cdash_report_concretization_error(tmpdir, mock_fetch, install_mockery,
|
||||
# new or the old concretizer
|
||||
expected_messages = (
|
||||
'Conflicts in concretized spec',
|
||||
'A conflict was triggered',
|
||||
'conflicts with',
|
||||
)
|
||||
assert any(x in content for x in expected_messages)
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ def check_stage_path(monkeypatch, tmpdir):
|
||||
|
||||
def fake_stage(pkg, mirror_only=False):
|
||||
assert pkg.path == expected_path
|
||||
assert os.path.isdir(expected_path), expected_path
|
||||
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_stage', fake_stage)
|
||||
|
||||
|
||||
@@ -638,14 +638,11 @@ def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Testing debug statements specific to new concretizer')
|
||||
|
||||
monkeypatch.setattr(spack.solver.asp, 'full_cores', True)
|
||||
monkeypatch.setattr(spack.solver.asp, 'minimize_cores', False)
|
||||
|
||||
s = Spec(conflict_spec)
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
s.concretize()
|
||||
|
||||
assert "conflict_trigger(" in e.value.message
|
||||
assert "conflict" in e.value.message
|
||||
|
||||
def test_conflict_in_all_directives_true(self):
|
||||
s = Spec('when-directives-true')
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os.path
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -271,37 +270,3 @@ def f(*args, **kwargs):
|
||||
def test_dedupe():
|
||||
assert [x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3]
|
||||
assert [x for x in dedupe([1, -2, 1, 3, 2], key=abs)] == [1, -2, 3]
|
||||
|
||||
|
||||
def test_grouped_exception():
|
||||
h = llnl.util.lang.GroupedExceptionHandler()
|
||||
|
||||
def inner():
|
||||
raise ValueError('wow!')
|
||||
|
||||
with h.forward('inner method'):
|
||||
inner()
|
||||
|
||||
with h.forward('top-level'):
|
||||
raise TypeError('ok')
|
||||
|
||||
assert h.grouped_message(with_tracebacks=False) == dedent("""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
top-level raised TypeError: ok""")
|
||||
|
||||
assert h.grouped_message(with_tracebacks=True) == dedent("""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
File "{0}", \
|
||||
line 283, in test_grouped_exception
|
||||
inner()
|
||||
File "{0}", \
|
||||
line 280, in inner
|
||||
raise ValueError('wow!')
|
||||
|
||||
top-level raised TypeError: ok
|
||||
File "{0}", \
|
||||
line 286, in test_grouped_exception
|
||||
raise TypeError('ok')
|
||||
""").format(__file__)
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import product
|
||||
|
||||
from spack.util.executable import which
|
||||
@@ -19,8 +18,6 @@
|
||||
ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
|
||||
PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
|
||||
|
||||
is_windows = sys.platform == 'win32'
|
||||
|
||||
|
||||
def allowed_archive(path):
|
||||
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
@@ -51,14 +48,15 @@ def _unzip(archive_file):
|
||||
Args:
|
||||
archive_file (str): absolute path of the file to be decompressed
|
||||
"""
|
||||
exe = 'unzip'
|
||||
arg = '-q'
|
||||
if is_windows:
|
||||
exe = 'tar'
|
||||
arg = '-xf'
|
||||
unzip = which(exe, required=True)
|
||||
unzip.add_default_arg(arg)
|
||||
unzip(archive_file)
|
||||
try:
|
||||
from zipfile import ZipFile
|
||||
destination_abspath = os.getcwd()
|
||||
with ZipFile(archive_file, 'r') as zf:
|
||||
zf.extractall(destination_abspath)
|
||||
except ImportError:
|
||||
unzip = which('unzip', required=True)
|
||||
unzip.add_default_arg('-q')
|
||||
return unzip
|
||||
|
||||
|
||||
def decompressor_for(path, extension=None):
|
||||
|
||||
@@ -60,8 +60,8 @@ default:
|
||||
.develop-build:
|
||||
extends: [ ".develop", ".build" ]
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
AWS_ACCESS_KEY_ID: ${MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}
|
||||
|
||||
########################################
|
||||
|
||||
@@ -335,7 +335,7 @@ _spacktivate() {
|
||||
_spack() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --show-cores --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers make-installer mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
fi
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
diff --git a/src/cmake/thirdparty/SetupADIOS.cmake b/src/cmake/thirdparty/SetupADIOS.cmake
|
||||
index 7a7f038d..4b56b7e0 100644
|
||||
--- a/src/cmake/thirdparty/SetupADIOS.cmake
|
||||
+++ b/src/cmake/thirdparty/SetupADIOS.cmake
|
||||
@@ -10,32 +10,32 @@
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
-# first Check for ADIOS_DIR
|
||||
+# first Check for ADIOS_ROOT
|
||||
|
||||
-if(NOT ADIOS_DIR)
|
||||
- MESSAGE(FATAL_ERROR "ADIOS support needs explicit ADIOS_DIR")
|
||||
+if(NOT ADIOS_ROOT)
|
||||
+ MESSAGE(FATAL_ERROR "ADIOS support needs explicit ADIOS_ROOT")
|
||||
endif()
|
||||
|
||||
-MESSAGE(STATUS "Looking for ADIOS using ADIOS_DIR = ${ADIOS_DIR}")
|
||||
+MESSAGE(STATUS "Looking for ADIOS using ADIOS_ROOT = ${ADIOS_ROOT}")
|
||||
|
||||
# CMake's FindADIOS module uses the ADIOS_ROOT env var
|
||||
-set(ADIOS_ROOT ${ADIOS_DIR})
|
||||
+set(ADIOS_ROOT ${ADIOS_ROOT})
|
||||
set(ENV{ADIOS_ROOT} ${ADIOS_ROOT})
|
||||
|
||||
# Use CMake's FindADIOS module, which uses hdf5's compiler wrappers to extract
|
||||
# all the info about the hdf5 install
|
||||
-include(${ADIOS_DIR}/etc/FindADIOS.cmake)
|
||||
+include(${ADIOS_ROOT}/etc/FindADIOS.cmake)
|
||||
|
||||
-# FindADIOS sets ADIOS_DIR to it's installed CMake info if it exists
|
||||
-# we want to keep ADIOS_DIR as the root dir of the install to be
|
||||
+# FindADIOS sets ADIOS_ROOT to it's installed CMake info if it exists
|
||||
+# we want to keep ADIOS_ROOT as the root dir of the install to be
|
||||
# consistent with other packages
|
||||
|
||||
-set(ADIOS_DIR ${ADIOS_ROOT} CACHE PATH "" FORCE)
|
||||
+set(ADIOS_ROOT ${ADIOS_ROOT} CACHE PATH "" FORCE)
|
||||
# not sure why we need to set this, but we do
|
||||
#set(ADIOS_FOUND TRUE CACHE PATH "" FORCE)
|
||||
|
||||
if(NOT ADIOS_FOUND)
|
||||
- message(FATAL_ERROR "ADIOS_DIR is not a path to a valid ADIOS install")
|
||||
+ message(FATAL_ERROR "ADIOS_ROOT is not a path to a valid ADIOS install")
|
||||
endif()
|
||||
|
||||
blt_register_library(NAME adios
|
||||
diff --git a/src/cmake/thirdparty/SetupADIOS2.cmake b/src/cmake/thirdparty/SetupADIOS2.cmake
|
||||
index d93c3e5b..3133c72c 100644
|
||||
--- a/src/cmake/thirdparty/SetupADIOS2.cmake
|
||||
+++ b/src/cmake/thirdparty/SetupADIOS2.cmake
|
||||
@@ -10,21 +10,39 @@
|
||||
#
|
||||
###############################################################################
|
||||
|
||||
-# first Check for ADIOS_DIR
|
||||
-if(NOT ADIOS2_DIR)
|
||||
- MESSAGE(FATAL_ERROR "ADIOS2 support needs explicit ADIOS2_DIR")
|
||||
+# Handle legacy usage of ADIOS2_DIR
|
||||
+if (ADIOS2_DIR AND NOT ADIOS2_ROOT)
|
||||
+ # If find_package(ADIOS2) has already been called this will fail
|
||||
+ if (NOT EXISTS ${ADIOS2_DIR}/include)
|
||||
+ get_filename_component(tmp "${ADIOS2_DIR}" DIRECTORY)
|
||||
+ get_filename_component(tmp "${tmp}" DIRECTORY)
|
||||
+ get_filename_component(tmp "${tmp}" DIRECTORY)
|
||||
+ if (EXISTS ${tmp}/include)
|
||||
+ set(ADIOS2_ROOT "${tmp}" CACHE PATH "")
|
||||
+ else ()
|
||||
+ message(FATAL_ERROR "Could not determine ADIOS2_ROOT from ADIOS2_DIR")
|
||||
+ endif ()
|
||||
+ else ()
|
||||
+ set(ADIOS2_ROOT "${ADIOS2_DIR}" CACHE PATH "")
|
||||
+ endif ()
|
||||
+endif ()
|
||||
+
|
||||
+# Check for ADIOS_ROOT
|
||||
+if(NOT ADIOS2_ROOT)
|
||||
+ MESSAGE(FATAL_ERROR "ADIOS2 support needs explicit ADIOS2_ROOT")
|
||||
endif()
|
||||
|
||||
-MESSAGE(STATUS "Looking for ADIOS2 using ADIOS2_DIR = ${ADIOS2_DIR}")
|
||||
+MESSAGE(STATUS "Looking for ADIOS2 using ADIOS2_ROOT = ${ADIOS2_ROOT}")
|
||||
|
||||
-set(ADIOS2_DIR_ORIG ${ADIOS2_DIR})
|
||||
+set(ADIOS2_DIR_ORIG ${ADIOS2_ROOT})
|
||||
|
||||
find_package(ADIOS2 REQUIRED
|
||||
NO_DEFAULT_PATH
|
||||
- PATHS ${ADIOS2_DIR}/lib/cmake/adios2)
|
||||
+ PATHS ${ADIOS2_ROOT})
|
||||
|
||||
+# ADIOS2_DIR is set by find_package
|
||||
message(STATUS "FOUND ADIOS2 at ${ADIOS2_DIR}")
|
||||
|
||||
blt_register_library(NAME adios2
|
||||
- INCLUDES ${ADIOS2_INCLUDE_DIR}
|
||||
- LIBRARIES ${ADIOS2_LIB_DIRS} ${ADIOS2_LIBRARIES} )
|
||||
+ INCLUDES ${ADIOS2_INCLUDE_DIR}
|
||||
+ LIBRARIES ${ADIOS2_LIB_DIRS} ${ADIOS2_LIBRARIES} )
|
||||
@@ -102,9 +102,6 @@ class Ascent(CMakePackage, CudaPackage):
|
||||
# patch for allowing +shared+cuda
|
||||
# https://github.com/Alpine-DAV/ascent/pull/903
|
||||
patch('ascent-shared-cuda-pr903.patch', when='@0.8.0')
|
||||
# patch for finding ADIOS2 more reliably
|
||||
# https://github.com/Alpine-DAV/ascent/pull/922
|
||||
patch('ascent-find-adios2-pr922.patch', when='@0.8.0')
|
||||
|
||||
##########################################################################
|
||||
# package dependencies
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Bricks(CMakePackage):
|
||||
|
||||
"""Bricks is a data layout and code generation framework,
|
||||
enabling performance-portable stencil computations across
|
||||
a multitude of architectures."""
|
||||
|
||||
# url for your package's homepage here.
|
||||
homepage = "https://bricks.run/"
|
||||
git = 'https://github.com/CtopCsUtahEdu/bricklib.git'
|
||||
|
||||
test_requires_compiler = True
|
||||
|
||||
# List of GitHub accounts to notify when the package is updated.
|
||||
maintainers = ['ztuowen', 'drhansj']
|
||||
|
||||
version('r0.1', branch='r0.1')
|
||||
|
||||
variant('cuda', default=False, description='Build bricks with CUDA enabled')
|
||||
|
||||
# Building a variant of cmake without openssl is to match how the
|
||||
# ECP E4S project builds cmake in their e4s-base-cuda Docker image
|
||||
depends_on('cmake', type='build')
|
||||
depends_on('autoconf', type='build')
|
||||
depends_on('automake', type='build')
|
||||
depends_on('libtool', type='build')
|
||||
depends_on('opencl-clhpp', when='+cuda')
|
||||
depends_on('cuda', when='+cuda')
|
||||
depends_on('mpi')
|
||||
|
||||
def cmake_args(self):
|
||||
"""CMake arguments for configure stage"""
|
||||
args = []
|
||||
|
||||
return args
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
"""Set build flags as needed"""
|
||||
if name in ['cflags', 'cxxflags', 'cppflags']:
|
||||
# There are many vector instrinsics used in this package. If
|
||||
# the package is built on a native architecture, then it likely
|
||||
# will not run (illegal instruction fault) on a less feature-
|
||||
# rich architecture.
|
||||
# If you intend to use this package in an architecturally-
|
||||
# heterogeneous environment, then the package should be build
|
||||
# with "target=x86_64". This will ensure that all Intel
|
||||
# architectures can use the libraries and tests in this
|
||||
# project by forceing the AVX2 flag in gcc.
|
||||
if name == 'cxxflags' and self.spec.target == 'x86_64':
|
||||
flags.append('-mavx2')
|
||||
return (None, flags, None)
|
||||
return(flags, None, None)
|
||||
|
||||
@run_after('install')
|
||||
def copy_test_sources(self):
|
||||
"""Files to copy into test cache"""
|
||||
srcs = [join_path('examples', 'external', 'CMakeLists.txt'),
|
||||
join_path('examples', 'external', 'main.cpp'),
|
||||
join_path('examples', 'external', '7pt.py')]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
|
||||
def test(self):
|
||||
"""Test bricklib package"""
|
||||
# Test prebuilt binary
|
||||
source_dir = join_path(self.test_suite.current_test_cache_dir,
|
||||
'examples', 'external')
|
||||
|
||||
self.run_test(exe='cmake',
|
||||
options=['.'],
|
||||
purpose='Configure bricklib example',
|
||||
work_dir=source_dir)
|
||||
|
||||
self.run_test(exe='cmake',
|
||||
options=['--build', '.'],
|
||||
purpose='Build bricklib example',
|
||||
work_dir=source_dir)
|
||||
|
||||
self.run_test(exe=join_path(source_dir, 'example'),
|
||||
options=[],
|
||||
purpose='Execute bricklib example',
|
||||
work_dir=source_dir)
|
||||
@@ -18,7 +18,6 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
maintainers = ['jedbrown', 'v-dobrev', 'tzanio']
|
||||
|
||||
version('5.0.0')
|
||||
version('4.0.0')
|
||||
version('3.0.0')
|
||||
version('2.0.0')
|
||||
@@ -33,24 +32,11 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
description='Build PETSc and HPGMG')
|
||||
variant('pumi', default=True,
|
||||
description='Build PUMI')
|
||||
variant('omega-h', default=True,
|
||||
description='Build Omega_h')
|
||||
variant('quickbuild', default=True,
|
||||
description='Speed-up the build by disabling variants in packages')
|
||||
# TODO: Add 'int64' variant?
|
||||
|
||||
# LibCEED
|
||||
# ceed 5.0
|
||||
with when('@5.0.0'):
|
||||
depends_on('libceed@0.10~occa')
|
||||
depends_on('libceed~cuda', when='~cuda')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('libceed+cuda+magma cuda_arch={0}'.format(arch),
|
||||
when='+cuda cuda_arch={0}'.format(arch))
|
||||
depends_on('libceed~rocm', when='~rocm')
|
||||
for target in ROCmPackage.amdgpu_targets:
|
||||
depends_on('libceed+rocm amdgpu_target={0}'.format(target),
|
||||
when='+rocm amdgpu_target={0}'.format(target))
|
||||
# ceed 4.0
|
||||
depends_on('libceed@0.8~cuda', when='@4.0.0~cuda')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
@@ -78,15 +64,7 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('libceed@0.2+occa', when='@1.0.0+occa')
|
||||
depends_on('libceed@0.2~occa', when='@1.0.0~occa')
|
||||
|
||||
# FMS
|
||||
# ceed-5.0
|
||||
depends_on('libfms@0.2.0', when='@5.0.0')
|
||||
depends_on('libfms@0.2.0~conduit', when='@5.0.0+quickbuild')
|
||||
|
||||
# OCCA
|
||||
# ceed-5.0
|
||||
depends_on('occa@1.1.0~cuda', when='@5.0.0+occa~cuda')
|
||||
depends_on('occa@1.1.0+cuda', when='@5.0.0+occa+cuda')
|
||||
# ceed-4.0
|
||||
depends_on('occa@1.1.0~cuda', when='@4.0.0+occa~cuda')
|
||||
depends_on('occa@1.1.0+cuda', when='@4.0.0+occa+cuda')
|
||||
@@ -101,24 +79,22 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('occa@1.0.0-alpha.5+cuda', when='@1.0.0+occa+cuda')
|
||||
|
||||
# NekRS
|
||||
# ceed-4.0 and ceed-5.0
|
||||
depends_on('nekrs@21.0%gcc', when='@4.0.0:5+nek')
|
||||
# ceed-4.0
|
||||
depends_on('nekrs@21.0', when='@4.0.0+nek')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('nekrs@21.0+cuda cuda_arch={0}'.format(arch),
|
||||
when='@4.0.0:5+nek+cuda cuda_arch={0}'.format(arch))
|
||||
when='@4.0.0+nek+cuda cuda_arch={0}'.format(arch))
|
||||
for target in ROCmPackage.amdgpu_targets:
|
||||
depends_on('nekrs@21.0+rocm amdgpu_target={0}'.format(target),
|
||||
when='@4.0.0:5+nek+rocm amdgpu_target={0}'.format(target))
|
||||
when='@4.0.0+nek+rocm amdgpu_target={0}'.format(target))
|
||||
|
||||
# Nek5000, GSLIB, Nekbone, and NekCEM
|
||||
# ceed-5.0 - specific
|
||||
depends_on('gslib@1.0.7', when='@5.0.0+nek')
|
||||
# ceed-3.0, ceed-4.0, and ceed-5.0
|
||||
depends_on('nek5000@19.0', when='@3.0.0:5+nek')
|
||||
depends_on('nektools@19.0%gcc', when='@3.0.0:5+nek')
|
||||
# ceed-3.0 and ceed-4.0
|
||||
depends_on('nek5000@19.0', when='@3.0.0:4+nek')
|
||||
depends_on('nektools@19.0%gcc', when='@3.0.0:4+nek')
|
||||
depends_on('gslib@1.0.6', when='@3.0.0:4+nek')
|
||||
depends_on('nekbone@17.0', when='@3.0.0:5+nek')
|
||||
depends_on('nekcem@c8db04b', when='@3.0.0:5+nek')
|
||||
depends_on('nekbone@17.0', when='@3.0.0:4+nek')
|
||||
depends_on('nekcem@c8db04b', when='@3.0.0:4+nek')
|
||||
# ceed-2.0
|
||||
depends_on('nek5000@17.0', when='@2.0.0+nek')
|
||||
depends_on('nektools@17.0%gcc', when='@2.0.0+nek')
|
||||
@@ -133,21 +109,6 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('nekcem@0b8bedd', when='@1.0.0+nek')
|
||||
|
||||
# PETSc
|
||||
# ceed 5.0
|
||||
with when('@5.0.0+petsc'):
|
||||
depends_on('petsc@3.17')
|
||||
depends_on('ratel@0.1.2')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('petsc+cuda cuda_arch={0}'.format(arch),
|
||||
when='+cuda cuda_arch={0}'.format(arch))
|
||||
depends_on('ratel+cuda cuda_arch={0}'.format(arch),
|
||||
when='+cuda cuda_arch={0}'.format(arch))
|
||||
for target in ROCmPackage.amdgpu_targets:
|
||||
depends_on('petsc+rocm amdgpu_target={0}'.format(target),
|
||||
when='+rocm amdgpu_target={0}'.format(target))
|
||||
depends_on('ratel+rocm amdgpu_target={0}'.format(target),
|
||||
when='+rocm amdgpu_target={0}'.format(target))
|
||||
depends_on('petsc~hdf5~superlu-dist', when='+quickbuild')
|
||||
# ceed 4.0
|
||||
depends_on('petsc@3.15.0:3.15', when='@4.0.0:4+petsc')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
@@ -198,13 +159,6 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('hpgmg@a0a5510df23b+fe', when='@1.0.0+petsc')
|
||||
|
||||
# MAGMA
|
||||
# ceed 5.0
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('magma@2.6.2+cuda cuda_arch={0}'.format(arch),
|
||||
when='@5.0.0+cuda cuda_arch={0}'.format(arch))
|
||||
for target in ROCmPackage.amdgpu_targets:
|
||||
depends_on('magma@2.6.2~cuda+rocm amdgpu_target={0}'.format(target),
|
||||
when='@5.0.0+rocm amdgpu_target={0}'.format(target))
|
||||
# ceed-4.0
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('magma@2.5.4 cuda_arch={0}'.format(arch),
|
||||
@@ -217,8 +171,6 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('magma@2.3.0', when='@1.0.0+cuda')
|
||||
|
||||
# PUMI
|
||||
# ceed-5.0
|
||||
depends_on('pumi@2.2.7', when='@5.0.0+pumi')
|
||||
# ceed-4.0
|
||||
depends_on('pumi@2.2.5', when='@4.0.0+pumi')
|
||||
# ceed-3.0
|
||||
@@ -228,28 +180,7 @@ class Ceed(BundlePackage, CudaPackage, ROCmPackage):
|
||||
# ceed-1.0
|
||||
depends_on('pumi@2.1.0', when='@1.0.0+pumi')
|
||||
|
||||
# Omega_h
|
||||
# ceed-5.0
|
||||
depends_on('omega-h@scorec.10.1.0', when='@5.0.0+omega-h')
|
||||
depends_on('omega-h~trilinos', when='@5.0.0+omega-h+quickbuild')
|
||||
|
||||
# MFEM, Laghos, Remhos
|
||||
# ceed 5.0
|
||||
with when('@5.0.0+mfem'):
|
||||
depends_on('mfem@4.4.0+mpi+examples+miniapps')
|
||||
depends_on('mfem+petsc', when='+petsc')
|
||||
depends_on('mfem+pumi', when='+pumi')
|
||||
depends_on('mfem+gslib', when='+nek')
|
||||
depends_on('mfem+libceed+fms')
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on('mfem+cuda cuda_arch={0}'.format(arch),
|
||||
when='+cuda cuda_arch={0}'.format(arch))
|
||||
for target in ROCmPackage.amdgpu_targets:
|
||||
depends_on('mfem+rocm amdgpu_target={0}'.format(target),
|
||||
when='+rocm amdgpu_target={0}'.format(target))
|
||||
depends_on('mfem+occa', when='+occa')
|
||||
depends_on('laghos@3.1')
|
||||
depends_on('remhos@1.0')
|
||||
# ceed-4.0
|
||||
depends_on('mfem@4.2.0+mpi+examples+miniapps', when='@4.0.0+mfem~petsc')
|
||||
depends_on('mfem@4.2.0+mpi+petsc+examples+miniapps',
|
||||
|
||||
@@ -13,7 +13,6 @@ class Gslib(Package):
|
||||
git = "https://github.com/gslib/gslib.git"
|
||||
|
||||
version('develop', branch='master')
|
||||
version('1.0.7', tag='v1.0.7')
|
||||
version('1.0.6', tag='v1.0.6')
|
||||
version('1.0.5', tag='v1.0.5')
|
||||
version('1.0.4', tag='v1.0.4')
|
||||
|
||||
@@ -34,7 +34,7 @@ class Laghos(MakefilePackage):
|
||||
depends_on('mfem+mpi~metis', when='~metis')
|
||||
|
||||
depends_on('mfem@develop', when='@develop')
|
||||
depends_on('mfem@4.2.0:', when='@3.1')
|
||||
depends_on('mfem@4.2.0', when='@3.1')
|
||||
depends_on('mfem@4.1.0:4.1', when='@3.0')
|
||||
# Recommended mfem version for laghos v2.0 is: ^mfem@3.4.1-laghos-v2.0
|
||||
depends_on('mfem@3.4.0:', when='@2.0')
|
||||
|
||||
@@ -12,7 +12,7 @@ class Libceed(MakefilePackage, CudaPackage, ROCmPackage):
|
||||
homepage = "https://github.com/CEED/libCEED"
|
||||
git = "https://github.com/CEED/libCEED.git"
|
||||
|
||||
maintainers = ['jedbrown', 'v-dobrev', 'tzanio', 'jeremylt']
|
||||
maintainers = ['jedbrown', 'v-dobrev', 'tzanio']
|
||||
|
||||
version('develop', branch='main')
|
||||
version('0.10.1', tag='v0.10.1')
|
||||
@@ -107,8 +107,6 @@ def common_make_opts(self):
|
||||
|
||||
if '+cuda' in spec:
|
||||
makeopts += ['CUDA_DIR=%s' % spec['cuda'].prefix]
|
||||
makeopts += ['CUDA_ARCH=sm_%s' %
|
||||
spec.variants['cuda_arch'].value]
|
||||
if spec.satisfies('@:0.4'):
|
||||
nvccflags = ['-ccbin %s -Xcompiler "%s" -Xcompiler %s' %
|
||||
(compiler.cxx, opt, compiler.cc_pic_flag)]
|
||||
@@ -120,8 +118,6 @@ def common_make_opts(self):
|
||||
|
||||
if '+rocm' in spec:
|
||||
makeopts += ['HIP_DIR=%s' % spec['hip'].prefix]
|
||||
amdgpu_target = ','.join(spec.variants['amdgpu_target'].value)
|
||||
makeopts += ['HIP_ARCH=%s' % amdgpu_target]
|
||||
if spec.satisfies('@0.8'):
|
||||
makeopts += ['HIPBLAS_DIR=%s' % spec['hipblas'].prefix]
|
||||
|
||||
@@ -142,8 +138,7 @@ def build_targets(self):
|
||||
|
||||
@property
|
||||
def install_targets(self):
|
||||
return ['install', 'prefix={0}'.format(self.prefix)] + \
|
||||
self.common_make_opts
|
||||
return ['prefix={0}'.format(self.prefix)] + self.common_make_opts
|
||||
|
||||
def check(self):
|
||||
make('prove', *self.common_make_opts, parallel=False)
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class Libfms(CMakePackage):
|
||||
"""Field and Mesh Specification (FMS) library"""
|
||||
|
||||
homepage = "https://github.com/CEED/FMS"
|
||||
git = "https://github.com/CEED/FMS.git"
|
||||
|
||||
tags = ['FEM', 'Meshes', 'Fields', 'High-order', 'I/O', 'Data-exchange']
|
||||
|
||||
maintainers = ['v-dobrev', 'tzanio', 'cwsmith']
|
||||
|
||||
version('develop', branch='master')
|
||||
version('0.2.0', tag='v0.2')
|
||||
|
||||
variant('conduit', default=True,
|
||||
description='Build with Conduit I/O support')
|
||||
variant('shared', default=True,
|
||||
description='Build shared libraries')
|
||||
|
||||
depends_on('cmake@3.1:', type='build')
|
||||
depends_on('conduit@0.7.1:', when='+conduit')
|
||||
|
||||
def cmake_args(self):
|
||||
args = []
|
||||
args.extend([
|
||||
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
])
|
||||
if '+conduit' in self.spec:
|
||||
args.extend([
|
||||
self.define('CONDUIT_DIR', self.spec['conduit'].prefix)
|
||||
])
|
||||
|
||||
return args
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Export the FMS headers.
|
||||
Sample usage: spec['libfms'].headers.cpp_flags
|
||||
"""
|
||||
fms_h_names = ['fms', 'fmsio']
|
||||
hdrs = find_headers(fms_h_names, self.prefix.include, recursive=False)
|
||||
return hdrs or None # Raise an error if no headers are found
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
"""Export the FMS library.
|
||||
Sample usage: spec['libfms'].libs.ld_flags
|
||||
"""
|
||||
is_shared = '+shared' in self.spec
|
||||
libs = find_libraries('libfms', root=self.prefix, shared=is_shared,
|
||||
recursive=True)
|
||||
return libs or None # Raise an error if no libs are found
|
||||
@@ -102,10 +102,10 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
description='Build static library')
|
||||
variant('shared', default=False,
|
||||
description='Build shared library')
|
||||
variant('mpi', default=True, sticky=True,
|
||||
variant('mpi', default=True,
|
||||
description='Enable MPI parallelism')
|
||||
# Can we make the default value for 'metis' to depend on the 'mpi' value?
|
||||
variant('metis', default=True, sticky=True,
|
||||
variant('metis', default=True,
|
||||
description='Enable METIS support')
|
||||
variant('openmp', default=False,
|
||||
description='Enable OpenMP parallelism')
|
||||
@@ -153,8 +153,6 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
description='Enable secure sockets using GnuTLS')
|
||||
variant('libunwind', default=False,
|
||||
description='Enable backtrace on error support using Libunwind')
|
||||
variant('fms', default=False, when='@4.3.0:',
|
||||
description='Enable FMS I/O support')
|
||||
# TODO: SIMD, Ginkgo, ADIOS2, HiOp, MKL CPardiso, Axom/Sidre
|
||||
variant('timer', default='auto',
|
||||
values=('auto', 'std', 'posix', 'mac', 'mpi'),
|
||||
@@ -289,7 +287,6 @@ class Mfem(Package, CudaPackage, ROCmPackage):
|
||||
depends_on('gnutls', when='+gnutls')
|
||||
depends_on('conduit@0.3.1:,master:', when='+conduit')
|
||||
depends_on('conduit+mpi', when='+conduit+mpi')
|
||||
depends_on('libfms@0.2.0:', when='+fms')
|
||||
|
||||
# The MFEM 4.0.0 SuperLU interface fails when using hypre@2.16.0 and
|
||||
# superlu-dist@6.1.1. See https://github.com/mfem/mfem/issues/983.
|
||||
@@ -489,7 +486,6 @@ def find_optional_library(name, prefix):
|
||||
'MFEM_USE_AMGX=%s' % yes_no('+amgx'),
|
||||
'MFEM_USE_CEED=%s' % yes_no('+libceed'),
|
||||
'MFEM_USE_UMPIRE=%s' % yes_no('+umpire'),
|
||||
'MFEM_USE_FMS=%s' % yes_no('+fms'),
|
||||
'MFEM_MPIEXEC=%s' % mfem_mpiexec,
|
||||
'MFEM_MPIEXEC_NP=%s' % mfem_mpiexec_np]
|
||||
|
||||
@@ -834,12 +830,6 @@ def find_optional_library(name, prefix):
|
||||
'CONDUIT_OPT=%s' % conduit_opt_flags,
|
||||
'CONDUIT_LIB=%s' % ld_flags_from_library_list(libs)]
|
||||
|
||||
if '+fms' in spec:
|
||||
libfms = spec['libfms']
|
||||
options += [
|
||||
'FMS_OPT=%s' % libfms.headers.cpp_flags,
|
||||
'FMS_LIB=%s' % ld_flags_from_library_list(libfms.libs)]
|
||||
|
||||
make('config', *options, parallel=False)
|
||||
make('info', parallel=False)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
class OmegaH(CMakePackage, CudaPackage):
|
||||
class OmegaH(CMakePackage):
|
||||
"""Omega_h is a C++11 library providing data structures and algorithms
|
||||
for adaptive discretizations. Its specialty is anisotropic triangle and
|
||||
tetrahedral mesh adaptation. It runs efficiently on most modern HPC
|
||||
@@ -45,12 +45,6 @@ class OmegaH(CMakePackage, CudaPackage):
|
||||
depends_on('mpi', when='+mpi')
|
||||
depends_on('trilinos +kokkos', when='+trilinos')
|
||||
depends_on('zlib', when='+zlib')
|
||||
# Note: '+cuda' and 'cuda_arch' variants are added by the CudaPackage
|
||||
depends_on('cuda', when='+cuda')
|
||||
conflicts('cuda@11.2:', when='@scorec.10.1.0:', msg='Thrust is broken in CUDA >= 11.2.* see https://github.com/sandialabs/omega_h/issues/366')
|
||||
# the sandia repo has a fix for cuda > 11.2 support
|
||||
# see github.com/sandialabs/omega_h/pull/373
|
||||
conflicts('cuda@11.2:', when='@:9.34.4', msg='Thrust is broken in CUDA >= 11.2.* see https://github.com/sandialabs/omega_h/issues/366')
|
||||
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86610
|
||||
conflicts('%gcc@8:8.2', when='@:9.22.1')
|
||||
@@ -72,21 +66,10 @@ def cmake_args(self):
|
||||
args.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
|
||||
if '+mpi' in self.spec:
|
||||
args.append('-DOmega_h_USE_MPI:BOOL=ON')
|
||||
ver = self.spec.version
|
||||
# old versions don't call find_package(MPI)
|
||||
if ver < Version('9.33.2') and 'scorec' not in str(ver):
|
||||
args.append('-DCMAKE_CXX_COMPILER:FILEPATH={0}'.format(
|
||||
self.spec['mpi'].mpicxx))
|
||||
args.append('-DCMAKE_CXX_COMPILER:FILEPATH={0}'.format(
|
||||
self.spec['mpi'].mpicxx))
|
||||
else:
|
||||
args.append('-DOmega_h_USE_MPI:BOOL=OFF')
|
||||
if '+cuda' in self.spec:
|
||||
args.append('-DOmega_h_USE_CUDA:BOOL=ON')
|
||||
cuda_arch_list = self.spec.variants['cuda_arch'].value
|
||||
cuda_arch = cuda_arch_list[0]
|
||||
if cuda_arch != 'none':
|
||||
args.append('-DOmega_h_CUDA_ARCH={0}'.format(cuda_arch))
|
||||
else:
|
||||
args.append('-DOmega_h_USE_CUDA:BOOL=OFF')
|
||||
if '+trilinos' in self.spec:
|
||||
args.append('-DOmega_h_USE_Trilinos:BOOL=ON')
|
||||
if '+zlib' in self.spec:
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class PyPyworld(PythonPackage):
|
||||
"""PyWorld wrappers WORLD, which is a free software for high-quality speech
|
||||
analysis, manipulation and synthesis. It can estimate fundamental frequency
|
||||
(F0), aperiodicity and spectral envelope and also generate the speech like
|
||||
input speech with only estimated parameters.i"""
|
||||
|
||||
homepage = "https://github.com/JeremyCCHsu/Python-Wrapper-for-World-Vocoder"
|
||||
pypi = "pyworld/pyworld-0.3.0.tar.gz"
|
||||
|
||||
version('0.3.0', sha256='e19b5d8445e0c4fc45ded71863aeaaf2680064b4626b0e7c90f72e9ace9f6b5b')
|
||||
|
||||
depends_on('py-setuptools', type='build')
|
||||
depends_on('py-numpy@:1.19', type=('build', 'run'))
|
||||
depends_on('py-cython@0.24.0:', type='build')
|
||||
@@ -30,7 +30,6 @@ class Qt(Package):
|
||||
|
||||
phases = ['configure', 'build', 'install']
|
||||
|
||||
version('5.15.4', sha256='615ff68d7af8eef3167de1fd15eac1b150e1fd69d1e2f4239e54447e7797253b')
|
||||
version('5.15.3', sha256='b7412734698a87f4a0ae20751bab32b1b07fdc351476ad8e35328dbe10efdedb')
|
||||
version('5.15.2', sha256='3a530d1b243b5dec00bc54937455471aaa3e56849d2593edb8ded07228202240')
|
||||
version('5.14.2', sha256='c6fcd53c744df89e7d3223c02838a33309bd1c291fcb6f9341505fe99f7f19fa')
|
||||
|
||||
@@ -64,8 +64,7 @@ def build_targets(self):
|
||||
|
||||
@property
|
||||
def install_targets(self):
|
||||
return ['install', 'prefix={0}'.format(self.prefix)] + \
|
||||
self.common_make_opts
|
||||
return ['prefix={0}'.format(self.prefix)] + self.common_make_opts
|
||||
|
||||
def check(self):
|
||||
make('prove', *self.common_make_opts, parallel=False)
|
||||
|
||||
@@ -28,7 +28,6 @@ class Tau(Package):
|
||||
tags = ['e4s']
|
||||
|
||||
version('master', branch='master')
|
||||
version('2.31.1', sha256='bf445b9d4fe40a5672a7b175044d2133791c4dfb36a214c1a55a931aebc06b9d')
|
||||
version('2.31', sha256='27e73c395dd2a42b91591ce4a76b88b1f67663ef13aa19ef4297c68f45d946c2')
|
||||
version('2.30.2', sha256='43f84a15b71a226f8a64d966f0cb46022bcfbaefb341295ecc6fa80bb82bbfb4')
|
||||
version('2.30.1', sha256='9c20ca1b4f4e80d885f24491cee598068871f0e9dd67906a5e47e4b4147d08fc')
|
||||
|
||||
34
var/spack/repos/builtin/packages/warpx/2626.patch
Normal file
34
var/spack/repos/builtin/packages/warpx/2626.patch
Normal file
@@ -0,0 +1,34 @@
|
||||
From 9785e706229622626133c4b03c7abd004f62023f Mon Sep 17 00:00:00 2001
|
||||
From: Axel Huebl <axel.huebl@plasma.ninja>
|
||||
Date: Sat, 4 Dec 2021 15:28:13 -0800
|
||||
Subject: [PATCH] Fix: Installed Symlink LIB
|
||||
|
||||
The latest patch to these routines broke our library alias in installs.
|
||||
|
||||
By default, this variable is relative and needs the prefix appended.
|
||||
In some cases, e.g., if externally set, it can already be absolute. In that
|
||||
case, we skip adding the prefix.
|
||||
---
|
||||
CMakeLists.txt | 7 ++++++-
|
||||
1 file changed, 6 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 04092ba962..a549546ab9 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -343,9 +343,14 @@ if(WarpX_LIB)
|
||||
else()
|
||||
set(mod_ext "so")
|
||||
endif()
|
||||
+ if(IS_ABSOLUTE ${CMAKE_INSTALL_LIBDIR})
|
||||
+ set(ABS_INSTALL_LIB_DIR ${CMAKE_INSTALL_LIBDIR})
|
||||
+ else()
|
||||
+ set(ABS_INSTALL_LIB_DIR ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR})
|
||||
+ endif()
|
||||
install(CODE "file(CREATE_LINK
|
||||
$<TARGET_FILE_NAME:shared>
|
||||
- ${CMAKE_INSTALL_LIBDIR}/libwarpx.${lib_suffix}.${mod_ext}
|
||||
+ ${ABS_INSTALL_LIB_DIR}/libwarpx.${lib_suffix}.${mod_ext}
|
||||
COPY_ON_ERROR SYMBOLIC)")
|
||||
endif()
|
||||
|
||||
@@ -132,22 +132,7 @@ class Warpx(CMakePackage):
|
||||
# The symbolic aliases for our +lib target were missing in the install
|
||||
# location
|
||||
# https://github.com/ECP-WarpX/WarpX/pull/2626
|
||||
patch('https://github.com/ECP-WarpX/WarpX/pull/2626.patch?full_index=1',
|
||||
sha256='a431d4664049d6dcb6454166d6a948d8069322a111816ca5ce01553800607544',
|
||||
when='@21.12')
|
||||
|
||||
# Workaround for AMReX<=22.06 no-MPI Gather
|
||||
# https://github.com/ECP-WarpX/WarpX/pull/3134
|
||||
# https://github.com/AMReX-Codes/amrex/pull/2793
|
||||
patch('https://github.com/ECP-WarpX/WarpX/pull/3134.patch?full_index=1',
|
||||
sha256='b786ce64a3c2c2b96ff2e635f0ee48532e4ae7ad9637dbf03f11c0768c290690',
|
||||
when='@22.02:22.05')
|
||||
|
||||
# Forgot to install ABLASTR library
|
||||
# https://github.com/ECP-WarpX/WarpX/pull/3141
|
||||
patch('https://github.com/ECP-WarpX/WarpX/pull/3141.patch?full_index=1',
|
||||
sha256='dab6fb44556ee1fd466a4cb0e20f89bde1ce445c9a51a2c0f59d1740863b5e7d',
|
||||
when='@22.04,22.05')
|
||||
patch('2626.patch', when='@21.12')
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
@@ -183,15 +168,10 @@ def cmake_args(self):
|
||||
def libs(self):
|
||||
libsuffix = {'1': '1d', '2': '2d', '3': '3d', 'rz': 'rz'}
|
||||
dims = self.spec.variants['dims'].value
|
||||
libs = find_libraries(
|
||||
return find_libraries(
|
||||
['libwarpx.' + libsuffix[dims]], root=self.prefix, recursive=True,
|
||||
shared=True
|
||||
)
|
||||
libs += find_libraries(
|
||||
['libablastr'], root=self.prefix, recursive=True,
|
||||
shared=self.spec.variants['shared']
|
||||
)
|
||||
return libs
|
||||
|
||||
# WarpX has many examples to serve as a suitable smoke check. One
|
||||
# that is typical was chosen here
|
||||
|
||||
Reference in New Issue
Block a user