Compare commits
142 Commits
develop-20
...
features/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
54e5439dd6 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 |
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -60,7 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.2
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
@@ -10,4 +10,4 @@ pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -1754,9 +1754,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -2383,22 +2383,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -480,11 +480,18 @@ def _add_externals_if_missing() -> None:
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1027,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1048,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1102,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1149,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1174,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1183,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1198,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1227,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -69,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -170,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -187,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
|
||||
@@ -149,12 +149,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
@@ -118,7 +118,7 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
@@ -126,7 +126,7 @@ def clean(parser, args):
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -444,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1278,9 +1280,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1339,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
@@ -1994,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2448,13 +2444,13 @@ def make_repo_path(root):
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.push_scope(scope)
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
if not type(original_method) == SpecMultiMethod:
|
||||
if not isinstance(original_method, SpecMultiMethod):
|
||||
original_method = SpecMultiMethod(original_method)
|
||||
|
||||
if self.spec is not None:
|
||||
|
||||
@@ -10,11 +10,12 @@
|
||||
dependencies.
|
||||
"""
|
||||
import os
|
||||
from pathlib import PurePath
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||
prefix = llnl.util.filesystem.ancestor(__file__, 4)
|
||||
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||
|
||||
#: synonym for prefix
|
||||
spack_root = prefix
|
||||
@@ -88,7 +89,7 @@ def _get_user_cache_path():
|
||||
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
||||
|
||||
|
||||
user_cache_path = _get_user_cache_path()
|
||||
user_cache_path = str(PurePath(_get_user_cache_path()))
|
||||
|
||||
#: junit, cdash, etc. reports about builds
|
||||
reports_path = os.path.join(user_cache_path, "reports")
|
||||
|
||||
@@ -64,7 +64,7 @@ def use_platform(new_platform):
|
||||
host = _PickleableCallable(new_platform)
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
@@ -73,5 +73,5 @@ def use_platform(new_platform):
|
||||
host = original_host_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
@@ -647,7 +647,7 @@ class RepoPath:
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@@ -966,7 +966,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
@@ -1357,7 +1357,7 @@ def create_or_construct(path, namespace=None):
|
||||
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
@@ -1404,14 +1404,14 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
PATH, saved = create(configuration=spack.config.config), PATH
|
||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||
try:
|
||||
yield PATH
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
PATH = saved
|
||||
|
||||
|
||||
|
||||
@@ -28,6 +28,12 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,12 @@
|
||||
import enum
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import List
|
||||
from typing import List, NamedTuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -50,6 +51,8 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
|
||||
# these are from clingo.ast and bootstrapped later
|
||||
ASTType = None
|
||||
parse_files = None
|
||||
@@ -77,9 +80,7 @@ def default_clingo_control():
|
||||
"""Return a control object with the default settings used in Spack"""
|
||||
control = clingo.Control()
|
||||
control.configuration.configuration = "tweety"
|
||||
control.configuration.solve.models = 0
|
||||
control.configuration.solver.heuristic = "Domain"
|
||||
control.configuration.solve.parallel_mode = "1"
|
||||
control.configuration.solver.opt_strategy = "usc,one"
|
||||
return control
|
||||
|
||||
@@ -266,12 +267,14 @@ def _id(thing):
|
||||
|
||||
@llnl.util.lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
__slots__ = ["name", "args"]
|
||||
|
||||
def __init__(self, name, args=None):
|
||||
self.name = name
|
||||
self.args = () if args is None else tuple(args)
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.args)
|
||||
return self.name, self.args
|
||||
|
||||
def __call__(self, *args):
|
||||
"""Return a new instance of this function with added arguments.
|
||||
@@ -302,6 +305,8 @@ def argify(arg):
|
||||
return clingo.String(str(arg))
|
||||
elif isinstance(arg, int):
|
||||
return clingo.Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo.Function(arg.name, [argify(x) for x in arg.args], positive=positive)
|
||||
else:
|
||||
return clingo.String(str(arg))
|
||||
|
||||
@@ -322,6 +327,15 @@ def __getattr__(self, name):
|
||||
fn = AspFunctionBuilder()
|
||||
|
||||
|
||||
def _create_counter(specs, tests):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
return FullDuplicatesCounter(specs, tests=tests)
|
||||
if strategy == "minimal":
|
||||
return MinimalDuplicatesCounter(specs, tests=tests)
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
|
||||
@@ -513,15 +527,17 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
key = input_spec.name
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(key)]
|
||||
key = providers[0]
|
||||
candidate = answer.get(key)
|
||||
providers = [
|
||||
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
||||
]
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[key])
|
||||
self._concrete_specs_by_input[input_spec] = answer[key]
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
self._unsolved_specs.append(input_spec)
|
||||
|
||||
@@ -572,16 +588,33 @@ def bootstrap_clingo():
|
||||
from clingo import parse_files
|
||||
|
||||
|
||||
def stringify(sym):
|
||||
"""Stringify symbols from clingo models.
|
||||
class NodeArgument(NamedTuple):
|
||||
id: str
|
||||
pkg: str
|
||||
|
||||
This will turn a ``clingo.Symbol`` into a string, or a sequence of ``clingo.Symbol``
|
||||
objects into a tuple of strings.
|
||||
|
||||
def intermediate_repr(sym):
|
||||
"""Returns an intermediate representation of clingo models for Spack's spec builder.
|
||||
|
||||
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
|
||||
|
||||
Returns:
|
||||
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
|
||||
``clingo.Symbol`` objects into a tuple of those objects.
|
||||
"""
|
||||
# TODO: simplify this when we no longer have to support older clingo versions.
|
||||
if isinstance(sym, (list, tuple)):
|
||||
return tuple(stringify(a) for a in sym)
|
||||
return tuple(intermediate_repr(a) for a in sym)
|
||||
|
||||
try:
|
||||
if sym.name == "node":
|
||||
return NodeArgument(
|
||||
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
|
||||
)
|
||||
except RuntimeError:
|
||||
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
|
||||
# that are not functions
|
||||
pass
|
||||
|
||||
if clingo_cffi:
|
||||
# Clingo w/ CFFI will throw an exception on failure
|
||||
@@ -596,10 +629,10 @@ def stringify(sym):
|
||||
def extract_args(model, predicate_name):
|
||||
"""Extract the arguments to predicates with the provided name from a model.
|
||||
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and return
|
||||
their stringified arguments as tuples.
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and
|
||||
return their intermediate representation.
|
||||
"""
|
||||
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
@@ -700,7 +733,9 @@ def fact(self, head):
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, "symbol") else head
|
||||
|
||||
self.out.write("%s.\n" % str(symbol))
|
||||
# This is commented out to avoid evaluating str(symbol) when we have no stream
|
||||
if not isinstance(self.out, llnl.util.lang.Devnull):
|
||||
self.out.write(f"{str(symbol)}.\n")
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
@@ -772,8 +807,11 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -801,6 +839,14 @@ def on_model(model):
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
if solve_result.satisfiable and self._model_has_cycles(models):
|
||||
tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]")
|
||||
self.control.load(os.path.join(parent_dir, "cycle_detection.lp"))
|
||||
self.control.ground([("no_cycle", [])])
|
||||
models.clear()
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
@@ -836,7 +882,8 @@ def on_model(model):
|
||||
for sym in best_model:
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
"UNKNOWN SYMBOL: %s(%s)"
|
||||
% (sym.name, ", ".join(intermediate_repr(sym.arguments)))
|
||||
)
|
||||
|
||||
elif cores:
|
||||
@@ -853,6 +900,26 @@ def on_model(model):
|
||||
|
||||
return result, timer, self.control.statistics
|
||||
|
||||
def _model_has_cycles(self, models):
|
||||
"""Returns true if the best model has cycles in it"""
|
||||
cycle_detection = clingo.Control()
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
lp_file = parent_dir / "cycle_detection.lp"
|
||||
|
||||
min_cost, best_model = min(models)
|
||||
with cycle_detection.backend() as backend:
|
||||
for atom in best_model:
|
||||
if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"':
|
||||
symbol = fn.depends_on(atom.arguments[1], atom.arguments[2])
|
||||
atom_id = backend.add_atom(symbol.symbol())
|
||||
backend.add_rule([atom_id], [], choice=False)
|
||||
|
||||
cycle_detection.load(str(lp_file))
|
||||
cycle_detection.ground([("base", []), ("no_cycle", [])])
|
||||
cycle_result = cycle_detection.solve()
|
||||
|
||||
return cycle_result.unsatisfiable
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
@@ -883,6 +950,10 @@ def __init__(self, tests=False):
|
||||
|
||||
# id for dummy variables
|
||||
self._condition_id_counter = itertools.count()
|
||||
self._trigger_id_counter = itertools.count()
|
||||
self._trigger_cache = collections.defaultdict(dict)
|
||||
self._effect_id_counter = itertools.count()
|
||||
self._effect_cache = collections.defaultdict(dict)
|
||||
|
||||
# Caches to optimize the setup phase of the solver
|
||||
self.target_specs_cache = None
|
||||
@@ -919,15 +990,18 @@ def key_fn(version):
|
||||
|
||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(
|
||||
fn.version_declared(
|
||||
pkg.name, declared_version.version, weight, str(declared_version.origin)
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.version_declared(
|
||||
declared_version.version, weight, str(declared_version.origin)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Declare deprecated versions for this package, if any
|
||||
deprecated = self.deprecated_versions[pkg.name]
|
||||
for v in sorted(deprecated):
|
||||
self.gen.fact(fn.deprecated_version(pkg.name, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.deprecated_version(v)))
|
||||
|
||||
def spec_versions(self, spec):
|
||||
"""Return list of clauses expressing spec's version constraints."""
|
||||
@@ -960,7 +1034,10 @@ def conflict_rules(self, pkg):
|
||||
no_constraint_msg = "{0}: conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
trigger_spec = spack.spec.Spec(trigger)
|
||||
trigger_id = self.condition(
|
||||
trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg
|
||||
)
|
||||
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
@@ -970,7 +1047,9 @@ def conflict_rules(self, pkg):
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_facts(self):
|
||||
@@ -1023,8 +1102,11 @@ def package_compiler_defaults(self, pkg):
|
||||
|
||||
for i, compiler in enumerate(reversed(matches)):
|
||||
self.gen.fact(
|
||||
fn.node_compiler_preference(
|
||||
pkg.name, compiler.spec.name, compiler.spec.version, -i * 100
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.node_compiler_preference(
|
||||
compiler.spec.name, compiler.spec.version, -i * 100
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1114,12 +1196,65 @@ def pkg_rules(self, pkg, tests):
|
||||
self.gen.newline()
|
||||
|
||||
# variants
|
||||
self.variant_rules(pkg)
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
lambda v, p, i: self.gen.fact(fn.pkg_fact(pkg.name, fn.provider_preference(v, p, i))),
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
# trigger and effect tables
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def trigger_rules(self):
|
||||
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Trigger conditions")
|
||||
for name in self._trigger_cache:
|
||||
cache = self._trigger_cache[name]
|
||||
for spec_str, (trigger_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.condition_requirement(trigger_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._trigger_cache.clear()
|
||||
|
||||
def effect_rules(self):
|
||||
"""Flushes all the effect rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
cache = self._effect_cache[name]
|
||||
for spec_str, (effect_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.imposed_constraint(effect_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._effect_cache.clear()
|
||||
|
||||
def variant_rules(self, pkg):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant(name)))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
@@ -1128,19 +1263,23 @@ def pkg_rules(self, pkg, tests):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.conditional_variant(cond_id, name)))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
self.gen.fact(fn.variant_single_value(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_single_value(name)))
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_package_py(pkg.name, name, variant.default)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_default_value_from_package_py(name, variant.default)
|
||||
)
|
||||
)
|
||||
else:
|
||||
spec_variant = variant.make_default()
|
||||
defaults = spec_variant.value
|
||||
for val in sorted(defaults):
|
||||
self.gen.fact(fn.variant_default_value_from_package_py(pkg.name, name, val))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.variant_default_value_from_package_py(name, val))
|
||||
)
|
||||
|
||||
values = variant.values
|
||||
if values is None:
|
||||
@@ -1151,7 +1290,9 @@ def pkg_rules(self, pkg, tests):
|
||||
for sid, s in enumerate(values.sets):
|
||||
for value in s:
|
||||
self.gen.fact(
|
||||
fn.variant_value_from_disjoint_sets(pkg.name, name, value, sid)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_value_from_disjoint_sets(name, value, sid)
|
||||
)
|
||||
)
|
||||
union.update(s)
|
||||
values = union
|
||||
@@ -1178,7 +1319,9 @@ def pkg_rules(self, pkg, tests):
|
||||
msg="empty (total) conflict constraint",
|
||||
)
|
||||
msg = "variant {0}={1} is conditionally disabled".format(name, value)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, msg))
|
||||
)
|
||||
else:
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
@@ -1189,32 +1332,13 @@ def pkg_rules(self, pkg, tests):
|
||||
name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (pkg.name, name, value, when),
|
||||
)
|
||||
self.gen.fact(fn.variant_possible_value(pkg.name, name, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_possible_value(name, value)))
|
||||
|
||||
if variant.sticky:
|
||||
self.gen.fact(fn.variant_sticky(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_sticky(name)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
@@ -1232,21 +1356,41 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
assert named_cond.name, "must provide name for anonymous conditions!"
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the caller,
|
||||
# we won't emit partial facts.
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
for pred in requirements:
|
||||
self.gen.fact(fn.condition_requirement(condition_id, *pred.args))
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
if imposed_spec:
|
||||
self.impose(condition_id, imposed_spec, node=node, name=name)
|
||||
cache = self._trigger_cache[named_cond.name]
|
||||
|
||||
named_cond_key = str(named_cond)
|
||||
if named_cond_key not in cache:
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
cache[named_cond_key] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
cache = self._effect_cache[named_cond.name]
|
||||
imposed_spec_key = str(imposed_spec)
|
||||
if imposed_spec_key not in cache:
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
|
||||
if not node:
|
||||
requirements = list(
|
||||
filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
|
||||
)
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
@@ -1259,13 +1403,19 @@ def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
|
||||
def package_provider_rules(self, pkg):
|
||||
for provider_name in sorted(set(s.name for s in pkg.provided.keys())):
|
||||
self.gen.fact(fn.possible_provider(pkg.name, provider_name))
|
||||
if provider_name not in self.possible_virtuals:
|
||||
continue
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(provider_name)))
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
if provided.name not in self.possible_virtuals:
|
||||
continue
|
||||
for when in whens:
|
||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(when.name, fn.provider_condition(condition_id, provided.name))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
@@ -1289,9 +1439,13 @@ def package_dependencies_rules(self, pkg):
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += " when %s" % cond
|
||||
else:
|
||||
pass
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
|
||||
)
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
@@ -1328,7 +1482,7 @@ def provider_requirements(self):
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
packages_yaml = spack.config.config.get("packages")
|
||||
packages_yaml = spack.config.CONFIG.get("packages")
|
||||
assert self.possible_virtuals is not None, msg
|
||||
for virtual_str in sorted(self.possible_virtuals):
|
||||
requirements = packages_yaml.get(virtual_str, {}).get("require", [])
|
||||
@@ -1336,6 +1490,8 @@ def provider_requirements(self):
|
||||
virtual_str, requirements, kind=RequirementKind.VIRTUAL
|
||||
)
|
||||
self.emit_facts_from_requirement_rules(rules)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
"""Generate facts to enforce requirements.
|
||||
@@ -1449,10 +1605,12 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx)))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
self.trigger_rules()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
"""Facts on concretization preferences, as read from packages.yaml"""
|
||||
preferences = spack.package_prefs.PackagePrefs
|
||||
@@ -1495,7 +1653,9 @@ def target_preferences(self, pkg_name):
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(
|
||||
fn.target_weight(pkg_name, str(preferred.architecture.target), i + offset)
|
||||
fn.pkg_fact(
|
||||
pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset)
|
||||
)
|
||||
)
|
||||
|
||||
def spec_clauses(self, *args, **kwargs):
|
||||
@@ -2041,11 +2201,11 @@ def define_version_constraints(self):
|
||||
# generate facts for each package constraint and the version
|
||||
# that satisfies it
|
||||
for v in sorted(v for v in self.possible_versions[pkg_name] if v.satisfies(versions)):
|
||||
self.gen.fact(fn.version_satisfies(pkg_name, versions, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.version_satisfies(versions, v)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def define_virtual_constraints(self):
|
||||
def collect_virtual_constraints(self):
|
||||
"""Define versions for constraints on virtuals.
|
||||
|
||||
Must be called before define_version_constraints().
|
||||
@@ -2131,7 +2291,7 @@ def define_variant_values(self):
|
||||
# spec_clauses(). We might want to order these facts by pkg and name
|
||||
# if we are debugging.
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.variant_possible_value(pkg, variant, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value)))
|
||||
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
# tell the solver about any installed packages that could
|
||||
@@ -2191,20 +2351,19 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
# get list of all possible dependencies
|
||||
self.possible_virtuals = set(x.name for x in specs if x.virtual)
|
||||
possible = spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
|
||||
)
|
||||
|
||||
node_counter = _create_counter(specs, tests=self.tests)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
|
||||
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
|
||||
]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
|
||||
self.pkgs = set(possible)
|
||||
|
||||
# driver is used by all the functions below to add facts and
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
@@ -2228,13 +2387,16 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
self._facts_from_concrete_spec(reusable_spec, self.pkgs)
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
self.gen.h1("Possible flags on nodes")
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
@@ -2255,7 +2417,7 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.build_version_dict(self.pkgs)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
@@ -2270,9 +2432,12 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
|
||||
self.gen.h1("Develop specs")
|
||||
# Inject dev_path from environment
|
||||
for ds in dev_specs:
|
||||
self.condition(spack.spec.Spec(ds.name), ds, msg="%s is a develop spec" % ds.name)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
@@ -2280,10 +2445,8 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
self.gen.h1("Virtual Constraints")
|
||||
self.define_virtual_constraints()
|
||||
|
||||
self.gen.h1("Version Constraints")
|
||||
self.collect_virtual_constraints()
|
||||
self.define_version_constraints()
|
||||
|
||||
self.gen.h1("Compiler Version Constraints")
|
||||
@@ -2305,8 +2468,8 @@ def literal_specs(self, specs):
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
@@ -2401,6 +2564,16 @@ class SpecBuilder:
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""Given a package name, returns the string representation of the "min_dupe_id" node in
|
||||
the ASP encoding.
|
||||
|
||||
Args:
|
||||
pkg: name of a package
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
@@ -2413,101 +2586,105 @@ def __init__(self, specs, hash_lookup=None):
|
||||
# from this dictionary during reconstruction
|
||||
self._hash_lookup = hash_lookup or {}
|
||||
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
self._hash_specs.append(pkg)
|
||||
def hash(self, node, h):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = self._hash_lookup[h]
|
||||
self._hash_specs.append(node)
|
||||
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = spack.spec.Spec(pkg)
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||
|
||||
def _arch(self, pkg):
|
||||
arch = self._specs[pkg].architecture
|
||||
def _arch(self, node):
|
||||
arch = self._specs[node].architecture
|
||||
if not arch:
|
||||
arch = spack.spec.ArchSpec()
|
||||
self._specs[pkg].architecture = arch
|
||||
self._specs[node].architecture = arch
|
||||
return arch
|
||||
|
||||
def node_platform(self, pkg, platform):
|
||||
self._arch(pkg).platform = platform
|
||||
def node_platform(self, node, platform):
|
||||
self._arch(node).platform = platform
|
||||
|
||||
def node_os(self, pkg, os):
|
||||
self._arch(pkg).os = os
|
||||
def node_os(self, node, os):
|
||||
self._arch(node).os = os
|
||||
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
def node_target(self, node, target):
|
||||
self._arch(node).target = target
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
def variant_value(self, node, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == "dev_path":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.SingleValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
if name == "patches":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.MultiValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
self._specs[pkg].update_variant_validate(name, value)
|
||||
self._specs[node].update_variant_validate(name, value)
|
||||
|
||||
def version(self, pkg, version):
|
||||
self._specs[pkg].versions = vn.VersionList([vn.Version(version)])
|
||||
def version(self, node, version):
|
||||
self._specs[node].versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_compiler_version(self, pkg, compiler, version):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[pkg].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
def node_compiler_version(self, node, compiler, version):
|
||||
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_flag_compiler_default(self, pkg):
|
||||
self._flag_compiler_defaults.add(pkg)
|
||||
def node_flag_compiler_default(self, node):
|
||||
self._flag_compiler_defaults.add(node)
|
||||
|
||||
def node_flag(self, pkg, flag_type, flag):
|
||||
self._specs[pkg].compiler_flags.add_flag(flag_type, flag, False)
|
||||
def node_flag(self, node, flag_type, flag):
|
||||
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, pkg, flag_type, source):
|
||||
self._flag_sources[(pkg, flag_type)].add(source)
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
spec_info = packages_yaml[pkg]["externals"][int(idx)]
|
||||
self._specs[pkg].external_path = spec_info.get("prefix", None)
|
||||
self._specs[pkg].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
|
||||
self._specs[node].external_path = spec_info.get("prefix", None)
|
||||
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info.get("modules", None)
|
||||
)
|
||||
self._specs[pkg].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = self._specs[pkg].package_class(self._specs[pkg])
|
||||
package = self._specs[node].package_class(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
|
||||
if extendee_spec:
|
||||
package.update_external_dependencies(self._specs.get(extendee_spec.name, None))
|
||||
extendee_node = SpecBuilder.make_node(pkg=extendee_spec.name)
|
||||
package.update_external_dependencies(self._specs.get(extendee_node, None))
|
||||
|
||||
def depends_on(self, pkg, dep, type):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
|
||||
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||
msg = "Current solver does not handle multiple dependency edges of the same name"
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], deptypes=(type,), virtuals=()
|
||||
)
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
edges[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
provider_spec = self._specs[provider_node]
|
||||
dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)]
|
||||
assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}"
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
@@ -2537,19 +2714,23 @@ def reorder_flags(self):
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
source_key = (spec.name, flag_type)
|
||||
node = SpecBuilder.make_node(pkg=spec.name)
|
||||
source_key = (node, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
order = [
|
||||
SpecBuilder.make_node(pkg=s.name)
|
||||
for s in spec.traverse(order="post", direction="parents")
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for name in sorted_sources:
|
||||
for node in sorted_sources:
|
||||
all_src_flags = list()
|
||||
per_pkg_sources = [self._specs[name]]
|
||||
if name in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[name])
|
||||
per_pkg_sources = [self._specs[node]]
|
||||
if node.pkg in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[node.pkg])
|
||||
for source in per_pkg_sources:
|
||||
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
|
||||
extend_flag_list(from_sources, all_src_flags)
|
||||
@@ -2620,14 +2801,15 @@ def build_specs(self, function_tuples):
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
node = args[0]
|
||||
pkg = node.pkg
|
||||
if spack.repo.PATH.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(pkg)
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
@@ -2645,10 +2827,12 @@ def build_specs(self, function_tuples):
|
||||
# fix flags after all specs are constructed
|
||||
self.reorder_flags()
|
||||
|
||||
# cycle detection
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -2768,7 +2952,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
timers (bool): Print out coarse fimers for different solve phases.
|
||||
timers (bool): Print out coarse timers for different solve phases.
|
||||
stats (bool): Print out detailed stats from clingo.
|
||||
tests (bool or tuple): If True, concretize test dependencies for all packages.
|
||||
If a tuple of package names, concretize test dependencies for named
|
||||
@@ -2776,6 +2960,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
@@ -2799,6 +2984,7 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
stats (bool): print internal statistics if set to True
|
||||
tests (bool): add test dependencies to the solve
|
||||
"""
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
159
lib/spack/spack/solver/counter.py
Normal file
159
lib/spack/spack/solver/counter.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import spack.dependency
|
||||
import spack.package_base
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
|
||||
class Counter:
|
||||
"""Computes the possible packages and the maximum number of duplicates
|
||||
allowed for each of them.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to concretize
|
||||
tests: if True, add test dependencies to the list of possible packages
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
|
||||
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
|
||||
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
|
||||
if not tests:
|
||||
self.link_run_types = ("link", "run")
|
||||
self.all_types = ("link", "run", "build")
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_dependencies
|
||||
|
||||
def possible_virtuals(self) -> Set[str]:
|
||||
"""Returns the list of possible virtuals"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_virtuals
|
||||
|
||||
def ensure_cache_values(self) -> None:
|
||||
"""Ensure the cache values have been computed"""
|
||||
if self._possible_dependencies:
|
||||
return
|
||||
self._compute_cache_values()
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
|
||||
"""Emit facts associated with the possible packages"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def _compute_cache_values(self):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self):
|
||||
result = spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
self._possible_dependencies = set(result)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
gen.h2("Maximum number of nodes (packages)")
|
||||
for package_name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||
def __init__(self, specs, tests):
|
||||
super().__init__(specs, tests)
|
||||
self._link_run: PossibleDependencies = set()
|
||||
self._direct_build: PossibleDependencies = set()
|
||||
self._total_build: PossibleDependencies = set()
|
||||
self._link_run_virtuals: Set[str] = set()
|
||||
|
||||
def _compute_cache_values(self):
|
||||
self._link_run = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
|
||||
)
|
||||
)
|
||||
self._link_run_virtuals.update(self._possible_virtuals)
|
||||
for x in self._link_run:
|
||||
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
|
||||
self._direct_build.update(current)
|
||||
|
||||
self._total_build = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
gen.h2("Packages with at most a single node")
|
||||
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 2))
|
||||
gen.fact(fn.multiple_unification_sets(package_name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
counter = collections.Counter(
|
||||
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||
)
|
||||
gen.h2("Maximum number of nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
count = min(count, 2)
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Build unification sets ")
|
||||
for name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.multiple_unification_sets(name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
counter = collections.Counter(
|
||||
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||
)
|
||||
gen.h2("Maximum number of virtual nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
@@ -0,0 +1,21 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Avoid cycles in the DAG
|
||||
%
|
||||
% Some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them. Note that these rules are quite
|
||||
% demanding on both grounding and solving, since they need to compute and
|
||||
% consider all possible paths between pair of nodes.
|
||||
%=============================================================================
|
||||
|
||||
|
||||
#program no_cycle.
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, A).
|
||||
|
||||
#defined depends_on/2.
|
||||
44
lib/spack/spack/solver/heuristic.lp
Normal file
44
lib/spack/spack/solver/heuristic.lp
Normal file
@@ -0,0 +1,44 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves
|
||||
%=============================================================================
|
||||
|
||||
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
|
||||
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
|
||||
|
||||
% Root node
|
||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
@@ -3,9 +3,11 @@
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
%=============================================================================
|
||||
|
||||
% macOS
|
||||
os_compatible("monterey", "bigsur").
|
||||
|
||||
27
lib/spack/spack/solver/when_possible.lp
Normal file
27
lib/spack/spack/solver/when_possible.lp
Normal file
@@ -0,0 +1,27 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Minimize the number of literals that are not solved
|
||||
%
|
||||
% This minimization is used for the "when_possible" concretization mode,
|
||||
% otherwise we assume that all literals must be solved.
|
||||
%=============================================================================
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ solve_literal(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not solve_literal(ID), literal(ID).
|
||||
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { solve_literal(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
#heuristic literal_solved(ID) : literal(ID). [1, sign]
|
||||
#heuristic literal_solved(ID) : literal(ID). [50, init]
|
||||
@@ -985,16 +985,14 @@ def __iter__(self):
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge):
|
||||
"""Adds a new edge to this object.
|
||||
|
||||
Args:
|
||||
edge (DependencySpec): edge to be added
|
||||
"""
|
||||
def add(self, edge: DependencySpec):
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
current_list = self.edges.setdefault(key, [])
|
||||
current_list.append(edge)
|
||||
current_list.sort(key=_sort_by_dep_types)
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
lst.append(edge)
|
||||
lst.sort(key=_sort_by_dep_types)
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
@@ -1927,19 +1925,15 @@ def _lookup_hash(self):
|
||||
store, or finally, binary caches."""
|
||||
import spack.environment
|
||||
|
||||
matches = []
|
||||
active_env = spack.environment.active_environment()
|
||||
|
||||
if active_env:
|
||||
env_matches = active_env.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in env_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in db_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
query = spack.binary_distribution.BinaryCacheQuery(True)
|
||||
remote_matches = query("/" + self.abstract_hash) or []
|
||||
matches = [m for m in remote_matches if m._satisfies(self)]
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(self) if active_env else [])
|
||||
or spack.store.STORE.db.query(self, installed=any)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||
)
|
||||
|
||||
if not matches:
|
||||
raise InvalidHashError(self, self.abstract_hash)
|
||||
|
||||
@@ -1960,19 +1954,17 @@ def lookup_hash(self):
|
||||
spec = self.copy(deps=False)
|
||||
# root spec is replaced
|
||||
if spec.abstract_hash:
|
||||
new = self._lookup_hash()
|
||||
spec._dup(new)
|
||||
spec._dup(self._lookup_hash())
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
@@ -1985,9 +1977,7 @@ def replace_hash(self):
|
||||
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
||||
return
|
||||
|
||||
spec_by_hash = self.lookup_hash()
|
||||
|
||||
self._dup(spec_by_hash)
|
||||
self._dup(self.lookup_hash())
|
||||
|
||||
def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary representing the state of this Spec.
|
||||
@@ -2983,9 +2973,12 @@ def _new_concretize(self, tests=False):
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||
name = providers[0]
|
||||
|
||||
assert name in answer
|
||||
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
||||
assert (
|
||||
node in answer
|
||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||
|
||||
concretized = answer[name]
|
||||
concretized = answer[node]
|
||||
self._dup(concretized)
|
||||
|
||||
def concretize(self, tests=False):
|
||||
@@ -3519,7 +3512,8 @@ def update_variant_validate(self, variant_name, values):
|
||||
for value in values:
|
||||
if self.variants.get(variant_name):
|
||||
msg = (
|
||||
"Cannot append a value to a single-valued " "variant with an already set value"
|
||||
f"cannot append the new value '{value}' to the single-valued "
|
||||
f"variant '{self.variants[variant_name]}'"
|
||||
)
|
||||
assert pkg_variant.multi, msg
|
||||
self.variants[variant_name].append(value)
|
||||
@@ -3719,15 +3713,19 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._intersects(rhs, deps)
|
||||
|
||||
def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if other.concrete and self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
|
||||
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
|
||||
|
||||
if (
|
||||
self_hash
|
||||
and other_hash
|
||||
and not (self_hash.startswith(other_hash) or other_hash.startswith(self_hash))
|
||||
):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
if self.virtual and other.virtual:
|
||||
@@ -3787,19 +3785,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
return self._intersects_dependencies(other)
|
||||
else:
|
||||
return True
|
||||
|
||||
def satisfies(self, other, deps=True):
|
||||
"""
|
||||
This checks constraints on common dependencies against each other.
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._satisfies(rhs, deps=deps)
|
||||
return True
|
||||
|
||||
def _intersects_dependencies(self, other):
|
||||
if not other._dependencies or not self._dependencies:
|
||||
@@ -3836,7 +3823,7 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
return True
|
||||
|
||||
def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
@@ -3851,6 +3838,13 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# objects.
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the right-hand side has an abstract hash, make sure it's a prefix of the
|
||||
# left-hand side's (abstract) hash.
|
||||
if other.abstract_hash:
|
||||
compare_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
if not compare_hash or not compare_hash.startswith(other.abstract_hash):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
@@ -4227,9 +4221,7 @@ def eq_node(self, other):
|
||||
def _cmp_iter(self):
|
||||
"""Lazily yield components of self for comparison."""
|
||||
|
||||
cmp_spec = self.lookup_hash() or self
|
||||
|
||||
for item in cmp_spec._cmp_node():
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
@@ -4240,10 +4232,10 @@ def _cmp_iter(self):
|
||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||
# TODO: spec hashing.
|
||||
yield cmp_spec.process_hash() if cmp_spec.concrete else None
|
||||
yield self.process_hash() if self.concrete else None
|
||||
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
yield tuple(sorted(dep.deptypes))
|
||||
yield hash(dep.spec)
|
||||
@@ -4315,6 +4307,19 @@ def format(self, format_string=default_format, **kwargs):
|
||||
``\{`` and ``\}`` for literal braces, and ``\\`` for the
|
||||
literal ``\`` character.
|
||||
|
||||
The ``?`` sigil may be used to conditionally add a
|
||||
value. Conditional format values are used if constructing the
|
||||
value would not throw any error, and are ignored if it would
|
||||
throw an error. For example, ``{?^mpi.name}`` will print
|
||||
``Spec["mpi"].name`` if such a node exists, and otherwise
|
||||
prints nothing.
|
||||
|
||||
The ``?`` sigil may also be combined with a conditional
|
||||
separator. This separator is prepended if anything is printed
|
||||
for the conditional attribute. The syntax for this is
|
||||
``?sep?attribute``,
|
||||
e.g. ``{name}-{version}{?/?^mpi.name}{?-?^mpi.version}``.
|
||||
|
||||
Args:
|
||||
format_string (str): string containing the format to be expanded
|
||||
|
||||
@@ -4338,6 +4343,15 @@ def write(s, c=None):
|
||||
def write_attribute(spec, attribute, color):
|
||||
attribute = attribute.lower()
|
||||
|
||||
conditional = False
|
||||
conditional_sep = ""
|
||||
matches_conditional_sep = re.match(r"^\?(.*)\?", attribute)
|
||||
if matches_conditional_sep:
|
||||
conditional = True
|
||||
conditional_sep = matches_conditional_sep.group(1)
|
||||
if attribute.startswith("?"):
|
||||
conditional = True
|
||||
|
||||
sig = ""
|
||||
if attribute.startswith(("@", "%", "/")):
|
||||
# color sigils that are inside braces
|
||||
@@ -4369,6 +4383,9 @@ def write_attribute(spec, attribute, color):
|
||||
elif sig == " arch=" and attribute not in ("architecture", "arch"):
|
||||
raise SpecFormatSigilError(sig, "the architecture", attribute)
|
||||
|
||||
# Now that we're done testing sig, combine it with conditional sep
|
||||
sig = conditional_sep + sig
|
||||
|
||||
# find the morph function for our attribute
|
||||
morph = transform.get(attribute, lambda s, x: x)
|
||||
|
||||
@@ -4398,7 +4415,12 @@ def write_attribute(spec, attribute, color):
|
||||
else:
|
||||
if isinstance(current, vt.VariantMap):
|
||||
# subscript instead of getattr for variant names
|
||||
current = current[part]
|
||||
try:
|
||||
current = current[part]
|
||||
except KeyError:
|
||||
if conditional:
|
||||
return
|
||||
raise
|
||||
else:
|
||||
# aliases
|
||||
if part == "arch":
|
||||
@@ -4414,6 +4436,8 @@ def write_attribute(spec, attribute, color):
|
||||
try:
|
||||
current = getattr(current, part)
|
||||
except AttributeError:
|
||||
if conditional:
|
||||
return
|
||||
parent = ".".join(parts[:idx])
|
||||
m = "Attempted to format attribute %s." % attribute
|
||||
m += "Spec %s has no attribute %s" % (parent, part)
|
||||
|
||||
@@ -197,7 +197,9 @@ def _expand_matrix_constraints(matrix_config):
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
flat_combo = [Spec(x) for x in flat_combo]
|
||||
|
||||
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
|
||||
@@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
|
||||
if self.default_fetcher.cachable:
|
||||
for rel_path in reversed(list(self.mirror_paths)):
|
||||
cache_fetcher = spack.caches.fetch_cache.fetcher(
|
||||
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
|
||||
rel_path, digest, expand=expand, extension=extension
|
||||
)
|
||||
fetchers.insert(0, cache_fetcher)
|
||||
@@ -577,7 +577,7 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
|
||||
def cache_mirror(self, mirror, stats):
|
||||
"""Perform a fetch if the resource is not already cached
|
||||
|
||||
@@ -212,7 +212,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
Args:
|
||||
configuration: configuration to create a store.
|
||||
"""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
config_dict = configuration.get("config")
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = configuration.get("config:install_hash_length")
|
||||
@@ -234,7 +234,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
|
||||
|
||||
def _create_global() -> Store:
|
||||
result = create(configuration=spack.config.config)
|
||||
result = create(configuration=spack.config.CONFIG)
|
||||
return result
|
||||
|
||||
|
||||
@@ -372,10 +372,10 @@ def use_store(
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
ensure_singleton_created()
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||
)
|
||||
temporary_store = create(configuration=spack.config.config)
|
||||
temporary_store = create(configuration=spack.config.CONFIG)
|
||||
original_store, STORE = STORE, temporary_store
|
||||
|
||||
try:
|
||||
@@ -383,7 +383,7 @@ def use_store(
|
||||
finally:
|
||||
# Restore the original store
|
||||
STORE = original_store
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
|
||||
|
||||
class MatchError(spack.error.SpackError):
|
||||
|
||||
@@ -94,14 +94,14 @@ class TestState:
|
||||
|
||||
def __init__(self):
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.config
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
if _SERIALIZE:
|
||||
spack.config.config = self.config
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
|
||||
@@ -199,15 +199,11 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
|
||||
# use foobar=bar to make the problem simpler for the old concretizer
|
||||
# the new concretizer should not need that help
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
||||
|
||||
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
||||
root_target_range,
|
||||
dep_target_range,
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
mirror_cmd = spack.main.SpackCommand("mirror")
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
@@ -51,7 +51,7 @@
|
||||
def cache_directory(tmpdir):
|
||||
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
||||
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
|
||||
|
||||
yield spack.config.caches
|
||||
|
||||
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
]
|
||||
)
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
|
||||
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -138,9 +138,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
if not timeout:
|
||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||
|
||||
yield spack.config.config
|
||||
yield spack.config.CONFIG
|
||||
|
||||
spack.config.config = old_config
|
||||
spack.config.CONFIG = old_config
|
||||
mutable_dir.remove()
|
||||
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path):
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -40,11 +40,11 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
|
||||
"""
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -54,15 +54,15 @@ def test_install_tree_customization_is_respected(mutable_config, tmp_path):
|
||||
"""
|
||||
spack.store.reinitialize()
|
||||
store_dir = tmp_path / "store"
|
||||
spack.config.config.set("config:install_tree:root", str(store_dir))
|
||||
spack.config.CONFIG.set("config:install_tree:root", str(store_dir))
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert (
|
||||
spack.config.config.get("config:install_tree:root")
|
||||
spack.config.CONFIG.get("config:install_tree:root")
|
||||
== spack.bootstrap.config.store_path()
|
||||
)
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.store.STORE.root == str(store_dir)
|
||||
|
||||
|
||||
@@ -185,12 +185,12 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
|
||||
|
||||
def test_nested_use_of_context_manager(mutable_config):
|
||||
"""Test nested use of the context manager"""
|
||||
user_config = spack.config.config
|
||||
user_config = spack.config.CONFIG
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.config == user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
assert spack.config.CONFIG == user_config
|
||||
|
||||
|
||||
@pytest.mark.parametrize("expected_missing", [False, True])
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
install = spack.main.SpackCommand("install")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -119,7 +118,7 @@ def __call__(self, *args, **kwargs):
|
||||
return mock_module_cmd
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
|
||||
@pytest.mark.not_on_windows("Static to Shared not supported on Win (yet)")
|
||||
def test_static_to_shared_library(build_environment):
|
||||
os.environ["SPACK_TEST_COMMAND"] = "dump-args"
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import py.path
|
||||
import pytest
|
||||
@@ -43,7 +42,7 @@ def _func(dir_str):
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="make not available on Windows")
|
||||
@pytest.mark.not_on_windows("make not available on Windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
class TestTargets:
|
||||
@pytest.mark.parametrize(
|
||||
@@ -92,7 +91,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
s.package._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="autotools not available on windows")
|
||||
@pytest.mark.not_on_windows("autotools not available on windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestAutotoolsPackage:
|
||||
def test_with_or_without(self, default_mock_concretization):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -106,10 +105,7 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
||||
assert value == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="log_ouput cannot currently be used outside of subprocess on Windows",
|
||||
)
|
||||
@pytest.mark.not_on_windows("log_ouput cannot currently be used outside of subprocess on Windows")
|
||||
@pytest.mark.regression("33928")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -153,7 +149,7 @@ def test_monkey_patching_test_log_file():
|
||||
|
||||
# Windows context manager's __exit__ fails with ValueError ("I/O operation
|
||||
# on closed file").
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Does not run on windows")
|
||||
@pytest.mark.not_on_windows("Does not run on windows")
|
||||
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
||||
"""Confirm able to run stand-alone test as a post-install callback."""
|
||||
s = spack.spec.Spec("py-test-callback").concretized()
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
arguments correctly.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -145,7 +144,7 @@
|
||||
+ test_args_without_paths
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -35,7 +34,7 @@ def test_urlencode_string():
|
||||
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_import_signing_key(mock_gnupghome):
|
||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
||||
@@ -427,18 +426,14 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
|
||||
assert len(mpileaks_specs) == 2, e.all_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_process_command(repro_dir):
|
||||
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
|
||||
help_sh = repro_dir / "help.sh"
|
||||
assert help_sh.exists() and not result
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_process_command_fail(repro_dir, monkeypatch):
|
||||
msg = "subprocess wait exception"
|
||||
|
||||
@@ -489,9 +484,7 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
assert "Reproduction directory is required" in err
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
@@ -509,9 +502,7 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
assert os.path.getsize(log_file) > 0
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -35,7 +33,7 @@ def test_blame_by_percent(mock_packages):
|
||||
assert "EMAIL" in out
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_blame_file(mock_packages):
|
||||
"""Sanity check the blame command to make sure it works."""
|
||||
with working_dir(spack.paths.prefix):
|
||||
@@ -68,7 +66,7 @@ def test_blame_json(mock_packages):
|
||||
assert key in loaded["authors"][0]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="git hangs")
|
||||
@pytest.mark.not_on_windows("git hangs")
|
||||
def test_blame_by_git(mock_packages, capfd):
|
||||
"""Sanity check the blame command to make sure it works."""
|
||||
with capfd.disabled():
|
||||
|
||||
@@ -50,7 +50,7 @@ def test_reset_in_file_scopes(mutable_config, scopes):
|
||||
bootstrap_yaml_files = []
|
||||
for s in scopes:
|
||||
_bootstrap("disable", "--scope={0}".format(s))
|
||||
scope_path = spack.config.config.scopes[s].path
|
||||
scope_path = spack.config.CONFIG.scopes[s].path
|
||||
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
|
||||
assert os.path.exists(bootstrap_yaml)
|
||||
bootstrap_yaml_files.append(bootstrap_yaml)
|
||||
@@ -80,7 +80,7 @@ def test_reset_in_environment(mutable_mock_env_path, mutable_config):
|
||||
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
|
||||
# Create a bootstrap.yaml with some config
|
||||
_bootstrap("disable", "--scope=site")
|
||||
scope_path = spack.config.config.scopes["site"].path
|
||||
scope_path = spack.config.CONFIG.scopes["site"].path
|
||||
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
|
||||
assert os.path.exists(bootstrap_yaml)
|
||||
|
||||
@@ -174,7 +174,7 @@ def test_remove_and_add_a_source(mutable_config):
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
|
||||
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
|
||||
`spack bootstrap add`. Here we don't download data, since that would be an
|
||||
|
||||
@@ -54,7 +54,7 @@ def test_pickle(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--pickle", _out_file, "zlib")
|
||||
environment = pickle.load(open(_out_file, "rb"))
|
||||
assert type(environment) == dict
|
||||
assert isinstance(environment, dict)
|
||||
assert "PATH" in environment
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -26,7 +25,7 @@
|
||||
mirror = spack.main.SpackCommand("mirror")
|
||||
uninstall = spack.main.SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -36,7 +35,7 @@ def test_checksum_args(arguments, expected):
|
||||
assert check == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,expected",
|
||||
[
|
||||
@@ -57,7 +56,7 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag
|
||||
assert "version(" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
|
||||
# TODO: mock_fetch doesn't actually work with stage, working around with ignoring
|
||||
# fail_on_error for now
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import jsonschema
|
||||
import pytest
|
||||
@@ -41,10 +40,7 @@
|
||||
uninstall_cmd = spack.main.SpackCommand("uninstall")
|
||||
buildcache_cmd = spack.main.SpackCommand("buildcache")
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
|
||||
pytest.mark.maybeslow,
|
||||
]
|
||||
pytestmark = [pytest.mark.not_on_windows("does not run on windows"), pytest.mark.maybeslow]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
@@ -33,8 +33,8 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "do_clean", Counter("package"))
|
||||
monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
|
||||
monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.caches.FETCH_CACHE, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.MISC_CACHE, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.store.STORE.failure_tracker, "clear_all", Counter("failures"))
|
||||
monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -254,9 +253,7 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
||||
|
||||
|
||||
# Note: this test is never expected to be supported on Windows
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="shell completion script generator fails on windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Shell completion script generator fails on windows")
|
||||
@pytest.mark.parametrize("shell", ["bash", "fish"])
|
||||
def test_updated_completion_scripts(shell, tmpdir):
|
||||
"""Make sure our shell tab completion scripts remain up-to-date."""
|
||||
|
||||
@@ -64,7 +64,7 @@ def compilers_dir(mock_executable):
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
@@ -127,7 +127,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
@@ -157,7 +157,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
@@ -189,7 +189,7 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
@@ -210,7 +210,7 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
with open(cfg_file, "w") as f:
|
||||
syaml.dump(data, stream=f)
|
||||
return cfg_file
|
||||
@@ -80,8 +80,8 @@ def test_config_edit(mutable_config, working_env):
|
||||
"""Ensure `spack config edit` edits the right paths."""
|
||||
|
||||
dms = spack.config.default_modify_scope("compilers")
|
||||
dms_path = spack.config.config.scopes[dms].path
|
||||
user_path = spack.config.config.scopes["user"].path
|
||||
dms_path = spack.config.CONFIG.scopes[dms].path
|
||||
user_path = spack.config.CONFIG.scopes["user"].path
|
||||
|
||||
comp_path = os.path.join(dms_path, "compilers.yaml")
|
||||
repos_path = os.path.join(user_path, "repos.yaml")
|
||||
@@ -544,7 +544,7 @@ def test_config_update_not_needed(mutable_config):
|
||||
def test_config_update_can_handle_comments(mutable_config):
|
||||
# Create an outdated config file with comments
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "config")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
f.write(
|
||||
"""
|
||||
@@ -574,7 +574,7 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
@pytest.mark.regression("18050")
|
||||
def test_config_update_works_for_empty_paths(mutable_config):
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "config")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
f.write(
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ def test_config_prefer_upstream(
|
||||
|
||||
output = config("prefer-upstream")
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "packages")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "packages")
|
||||
packages = syaml.load(open(cfg_file))["packages"]
|
||||
|
||||
# Make sure only the non-default variants are set.
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
|
||||
debug = SpackCommand("debug")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -16,7 +14,7 @@
|
||||
deprecate = SpackCommand("deprecate")
|
||||
find = SpackCommand("find")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,7 +17,7 @@
|
||||
install = SpackCommand("install")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
develop = SpackCommand("develop")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.diff
|
||||
@@ -45,7 +43,7 @@ def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
assert ["hash", "mpileaks %s" % specB.dag_hash()] in c["b_not_a"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test with and without the --first option"""
|
||||
install_cmd("mpileaks")
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
|
||||
import pytest
|
||||
@@ -41,7 +40,7 @@
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("mutable_mock_env_path", "config", "mutable_mock_repo"),
|
||||
pytest.mark.maybeslow,
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Window"),
|
||||
pytest.mark.not_on_windows("Envs unsupported on Window"),
|
||||
]
|
||||
|
||||
env = SpackCommand("env")
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@ def python_database(mock_packages, mutable_database):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="All Fetchers Failed")
|
||||
@pytest.mark.not_on_windows("All Fetchers Failed")
|
||||
@pytest.mark.db
|
||||
def test_extensions(mock_packages, python_database, config, capsys):
|
||||
ext2 = Spec("py-extension2").concretized()
|
||||
|
||||
@@ -212,7 +212,7 @@ def test_find_external_empty_default_manifest_dir(
|
||||
external("find")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Can't chmod on Windows")
|
||||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config,
|
||||
@@ -399,7 +399,7 @@ def test_use_tags_for_detection(command_args, mock_executable, mutable_config, m
|
||||
|
||||
|
||||
@pytest.mark.regression("38733")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="the test uses bash scripts")
|
||||
@pytest.mark.not_on_windows("the test uses bash scripts")
|
||||
def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
mock_executable, monkeypatch, mutable_config
|
||||
):
|
||||
|
||||
@@ -332,7 +332,7 @@ def test_find_command_basic_usage(database):
|
||||
assert "mpileaks" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="envirnment is not yet supported on windows")
|
||||
@pytest.mark.not_on_windows("envirnment is not yet supported on windows")
|
||||
@pytest.mark.regression("9875")
|
||||
def test_find_prefix_in_env(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch, mock_packages, mock_archive, config
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,7 +12,7 @@
|
||||
|
||||
gc = spack.main.SpackCommand("gc")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,7 +21,7 @@
|
||||
bootstrap = SpackCommand("bootstrap")
|
||||
mirror = SpackCommand("mirror")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
# test gpg command detection
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
import filecmp
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
import pytest
|
||||
@@ -38,8 +38,6 @@
|
||||
buildcache = SpackCommand("buildcache")
|
||||
find = SpackCommand("find")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def noop_install(monkeypatch):
|
||||
@@ -204,7 +202,7 @@ def test_show_log_on_error(
|
||||
assert isinstance(install.error, spack.build_environment.ChildError)
|
||||
assert install.error.pkg.name == "build-error"
|
||||
|
||||
assert "==> Installing build-error" in out
|
||||
assert "Installing build-error" in out
|
||||
assert "See build log for details:" in out
|
||||
|
||||
|
||||
@@ -263,9 +261,9 @@ def test_install_commit(mock_git_version_info, install_mockery, mock_packages, m
|
||||
|
||||
"""
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
|
||||
)
|
||||
file_url = pathlib.Path(repo_path).as_uri()
|
||||
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "git", file_url, raising=False)
|
||||
|
||||
# Use the earliest commit in the respository
|
||||
spec = Spec(f"git-test-commit@{commits[-1]}").concretized()
|
||||
@@ -548,6 +546,7 @@ def test_cdash_report_concretization_error(
|
||||
assert any(x in content for x in expected_messages)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows log_output logs phase header out of order")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_cdash_upload_build_error(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
# capfd interferes with Spack's capturing
|
||||
@@ -747,6 +746,7 @@ def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
|
||||
assert os.path.exists(root.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
@pytest.mark.regression("12002")
|
||||
def test_install_only_dependencies_in_env(
|
||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
@@ -896,7 +896,7 @@ def test_install_help_does_not_show_cdash_options(capsys):
|
||||
assert "CDash URL" not in captured.out
|
||||
|
||||
|
||||
def test_install_help_cdash(capsys):
|
||||
def test_install_help_cdash():
|
||||
"""Make sure `spack install --help-cdash` describes CDash arguments"""
|
||||
install_cmd = SpackCommand("install")
|
||||
out = install_cmd("--help-cdash")
|
||||
@@ -913,6 +913,7 @@ def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "Using CDash auth token from environment" in out
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows log_output logs phase header out of order")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
||||
@@ -938,6 +939,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "foo: No such file or directory" in content
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
def test_compiler_bootstrap(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
@@ -954,6 +956,7 @@ def test_compiler_bootstrap(
|
||||
install("a%gcc@=12.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
|
||||
def test_compiler_bootstrap_from_binary_mirror(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
@@ -994,6 +997,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("--no-cache", "--only", "package", "b%gcc@10.2.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
@pytest.mark.regression("16221")
|
||||
def test_compiler_bootstrap_already_installed(
|
||||
install_mockery_mutable_config,
|
||||
@@ -1037,6 +1041,7 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||
assert "using the `spack.yaml` in this directory" in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_all(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1048,6 +1053,7 @@ def test_install_env_with_tests_all(
|
||||
assert os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_root(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1059,6 +1065,7 @@ def test_install_env_with_tests_root(
|
||||
assert not os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_empty_env(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1072,6 +1079,7 @@ def test_install_empty_env(
|
||||
assert "no specs to install" in out
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows logger I/O operation on closed file when install fails")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.parametrize(
|
||||
"name,method",
|
||||
@@ -1095,6 +1103,7 @@ def test_installation_fail_tests(install_mockery, mock_fetch, name, method):
|
||||
assert "See test log for details" in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Buildcache not supported on windows")
|
||||
def test_install_use_buildcache(
|
||||
capsys,
|
||||
mock_packages,
|
||||
@@ -1172,6 +1181,7 @@ def install_use_buildcache(opt):
|
||||
install_use_buildcache(opt)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows logger I/O operation on closed file when install fails")
|
||||
@pytest.mark.regression("34006")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_padded_install_runtests_root(install_mockery_mutable_config, mock_fetch):
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
|
||||
license = SpackCommand("license")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_list_files():
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
install = SpackCommand("install")
|
||||
location = SpackCommand("location")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_manpath_trailing_colon(
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -19,7 +18,7 @@
|
||||
# Everything here uses (or can use) the mock config and database.
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("config", "database"),
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
|
||||
pytest.mark.not_on_windows("does not run on windows"),
|
||||
]
|
||||
# location prints out "locations of packages and spack directories"
|
||||
location = SpackCommand("location")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@
|
||||
buildcache = SpackCommand("buildcache")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
module = spack.main.SpackCommand("module")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
#: make sure module files are generated for all the tests here
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -133,7 +132,7 @@ def test_pkg_add(git, mock_pkg_git_repo):
|
||||
pkg("add", "does-not-exist")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_list(mock_pkg_git_repo, mock_pkg_names):
|
||||
out = split(pkg("list", "HEAD^^"))
|
||||
assert sorted(mock_pkg_names) == sorted(out)
|
||||
@@ -149,7 +148,7 @@ def test_pkg_list(mock_pkg_git_repo, mock_pkg_names):
|
||||
assert sorted(mock_pkg_names) == sorted(out)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_diff(mock_pkg_git_repo, mock_pkg_names):
|
||||
out = split(pkg("diff", "HEAD^^", "HEAD^"))
|
||||
assert out == ["HEAD^:", "pkg-a", "pkg-b", "pkg-c"]
|
||||
@@ -161,7 +160,7 @@ def test_pkg_diff(mock_pkg_git_repo, mock_pkg_names):
|
||||
assert out == ["HEAD^:", "pkg-c", "HEAD:", "pkg-d"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_added(mock_pkg_git_repo):
|
||||
out = split(pkg("added", "HEAD^^", "HEAD^"))
|
||||
assert ["pkg-a", "pkg-b", "pkg-c"] == out
|
||||
@@ -176,7 +175,7 @@ def test_pkg_added(mock_pkg_git_repo):
|
||||
assert out == []
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_removed(mock_pkg_git_repo):
|
||||
out = split(pkg("removed", "HEAD^^", "HEAD^"))
|
||||
assert out == []
|
||||
@@ -188,7 +187,7 @@ def test_pkg_removed(mock_pkg_git_repo):
|
||||
assert out == ["pkg-c"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_changed(mock_pkg_git_repo):
|
||||
out = split(pkg("changed", "HEAD^^", "HEAD^"))
|
||||
assert out == []
|
||||
|
||||
@@ -4,17 +4,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand
|
||||
|
||||
providers = SpackCommand("providers")
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Providers not currently supported on Windows"
|
||||
)
|
||||
pytestmark = pytest.mark.not_on_windows("Providers not currently supported on Windows")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
deprecate = SpackCommand("deprecate")
|
||||
reindex = SpackCommand("reindex")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_reindex_basic(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
|
||||
@@ -32,6 +32,7 @@ def test_spec():
|
||||
assert "mpich@3.0.4" in output
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("Known failure of the original concretizer")
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
"""End-to-end test of CLI concretizer prefs.
|
||||
|
||||
@@ -39,9 +40,6 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
options to `solver.py`, and that config options are not
|
||||
lost along the way.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# remove two non-preferred mpileaks installations
|
||||
# so that reuse will pick up the zmpi one
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@
|
||||
pytestmark = pytest.mark.usefixtures("install_mockery", "mock_packages")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_spec(monkeypatch):
|
||||
"""Verify that staging specs works."""
|
||||
@@ -52,7 +51,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
return expected_path
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="PermissionError")
|
||||
@pytest.mark.not_on_windows("PermissionError")
|
||||
def test_stage_path(check_stage_path):
|
||||
"""Verify that --path only works with single specs."""
|
||||
stage("--path={0}".format(check_stage_path), "trivial-install-test-package")
|
||||
@@ -64,7 +63,7 @@ def test_stage_path_errors_multiple_specs(check_stage_path):
|
||||
stage(f"--path={check_stage_path}", "trivial-install-test-package", "mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_with_env_outside_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage concretizes specs not in environment instead of erroring."""
|
||||
@@ -83,7 +82,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
stage("trivial-install-test-package")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_with_env_inside_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage filters specs in environment instead of reconcretizing."""
|
||||
@@ -102,7 +101,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
stage("mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_full_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage filters specs in environment."""
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,7 +21,7 @@
|
||||
install = SpackCommand("install")
|
||||
spack_test = SpackCommand("test")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_test_package_not_installed(
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -15,7 +14,7 @@
|
||||
env = SpackCommand("env")
|
||||
concretize = SpackCommand("concretize")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -206,7 +205,7 @@ def _warn(*args, **kwargs):
|
||||
|
||||
# Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module
|
||||
# the style formatter insists on separating these two lines.
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows")
|
||||
@pytest.mark.not_on_windows("Envs unsupported on Windows")
|
||||
class TestUninstallFromEnv:
|
||||
"""Tests an installation with two environments e1 and e2, which each have
|
||||
shared package installations:
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
install = SpackCommand("install")
|
||||
view = SpackCommand("view")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def create_projection_file(tmpdir, projection):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test basic behavior of compilers in Spack"""
|
||||
import os
|
||||
import sys
|
||||
from copy import copy
|
||||
|
||||
import pytest
|
||||
@@ -192,7 +191,7 @@ def call_compiler(exe, *args, **kwargs):
|
||||
return no_flag_output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"exe,flagname",
|
||||
[
|
||||
@@ -248,7 +247,7 @@ def test_get_compiler_link_paths_no_verbose_flag():
|
||||
assert dirs == []
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
@@ -693,7 +692,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = "2.2.2"
|
||||
@@ -796,9 +795,7 @@ def _call(*args, **kwargs):
|
||||
assert "SPACK_TEST_CMP_ON" not in os.environ
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Bash scripting unsupported on Windows (for now)"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Bash scripting unsupported on Windows (for now)")
|
||||
def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test detection of compiler version"""
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -414,7 +413,7 @@ def test_xl_version_detection(version_str, expected_version):
|
||||
assert version == expected_version
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"compiler,version",
|
||||
[
|
||||
|
||||
@@ -152,7 +152,7 @@ class Root(Package):
|
||||
version("1.0", sha256="abcde")
|
||||
depends_on("changing")
|
||||
|
||||
conflicts("changing~foo")
|
||||
conflicts("^changing~foo")
|
||||
"""
|
||||
packages_dir.join("root", "package.py").write(root_pkg_str, ensure=True)
|
||||
|
||||
@@ -353,9 +353,8 @@ def test_concretize_compiler_flag_propagate(self):
|
||||
|
||||
assert spec.satisfies("^openblas cflags='-g'")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Optional compiler propagation isn't deprecated for original concretizer",
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_compiler_flag_does_not_propagate(self):
|
||||
spec = Spec("hypre cflags='-g' ^openblas")
|
||||
@@ -363,9 +362,8 @@ def test_concretize_compiler_flag_does_not_propagate(self):
|
||||
|
||||
assert not spec.satisfies("^openblas cflags='-g'")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Optional compiler propagation isn't deprecated for original concretizer",
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self):
|
||||
spec = Spec("hypre cflags=='-g' ^openblas cflags='-O3'")
|
||||
@@ -394,14 +392,12 @@ def test_architecture_inheritance(self):
|
||||
spec.concretize()
|
||||
assert spec["cmake"].architecture == spec.architecture
|
||||
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer")
|
||||
def test_architecture_deep_inheritance(self, mock_targets):
|
||||
"""Make sure that indirect dependencies receive architecture
|
||||
information from the root even when partial architecture information
|
||||
is provided by an intermediate dependency.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
||||
|
||||
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL"
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
@@ -457,9 +453,8 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Optional compiler propagation isn't deprecated for original concretizer",
|
||||
@pytest.mark.only_clingo(
|
||||
"Optional compiler propagation isn't deprecated for original concretizer"
|
||||
)
|
||||
def test_concretize_propagate_disabled_variant(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
@@ -619,10 +614,8 @@ def test_conflicts_in_spec(self, conflict_spec):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.only_clingo("Testing debug statements specific to new concretizer")
|
||||
def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Testing debug statements specific to new concretizer")
|
||||
|
||||
s = Spec(conflict_spec)
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
s.concretize()
|
||||
@@ -742,7 +735,7 @@ def test_noversion_pkg(self, spec):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec(spec).concretized()
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
# Include targets to prevent regression on 20537
|
||||
@pytest.mark.parametrize(
|
||||
"spec, best_achievable",
|
||||
@@ -795,10 +788,8 @@ def test_concretize_anonymous_dep(self, spec_str):
|
||||
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot work around conflicts")
|
||||
def test_compiler_conflicts_in_package_py(self, spec_str, expected_str):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer cannot work around conflicts")
|
||||
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.satisfies(expected_str)
|
||||
|
||||
@@ -902,10 +893,8 @@ def test_patching_dependencies(self, spec_str, patched_deps):
|
||||
("quantum-espresso~veritas", ["^libelf@0.8.13"]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_working_around_conflicting_defaults(self, spec_str, expected):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert s.concrete
|
||||
@@ -917,10 +906,8 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
|
||||
"spec_str,expected",
|
||||
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
packages_yaml = {
|
||||
"all": {"compiler": ["clang", "gcc"]},
|
||||
"cmake": {
|
||||
@@ -936,14 +923,12 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
assert s.satisfies(condition)
|
||||
|
||||
@pytest.mark.regression("5651")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_package_with_constraint_not_met_by_external(self):
|
||||
"""Check that if we have an external package A at version X.Y in
|
||||
packages.yaml, but our spec doesn't allow X.Y as a version, then
|
||||
a new version of A is built that meets the requirements.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
packages_yaml = {"libelf": {"externals": [{"spec": "libelf@0.8.13", "prefix": "/usr"}]}}
|
||||
spack.config.set("packages", packages_yaml)
|
||||
|
||||
@@ -953,10 +938,8 @@ def test_package_with_constraint_not_met_by_external(self):
|
||||
assert not s["libelf"].external
|
||||
|
||||
@pytest.mark.regression("9744")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_cumulative_version_ranges_with_different_length(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
s = Spec("cumulative-vrange-root").concretized()
|
||||
assert s.concrete
|
||||
assert s.satisfies("^cumulative-vrange-bottom@2.2")
|
||||
@@ -983,10 +966,8 @@ def test_dependency_conditional_on_another_dependency_state(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected", [("cmake %gcc", "%gcc"), ("cmake %clang", "%clang")]
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_compiler_constraint_with_external_package(self, spec_str, expected):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
packages_yaml = {
|
||||
"cmake": {"externals": [{"spec": "cmake@3.4.3", "prefix": "/usr"}], "buildable": False}
|
||||
}
|
||||
@@ -1035,10 +1016,8 @@ def test_compiler_in_nonbuildable_external_package(
|
||||
assert s.satisfies(expected)
|
||||
assert "external-common-perl" not in [d.name for d in s.dependencies()]
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_external_packages_have_consistent_hash(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("This tests needs the ASP-based concretizer")
|
||||
|
||||
s, t = Spec("externaltool"), Spec("externaltool")
|
||||
s._old_concretize(), t._new_concretize()
|
||||
|
||||
@@ -1062,10 +1041,8 @@ def test_transitive_conditional_virtual_dependency(self):
|
||||
assert "externalvirtual" in s
|
||||
|
||||
@pytest.mark.regression("20040")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_conditional_provides_or_depends_on(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# Check that we can concretize correctly a spec that can either
|
||||
# provide a virtual or depend on it based on the value of a variant
|
||||
s = Spec("conditional-provider +disable-v1").concretized()
|
||||
@@ -1103,10 +1080,8 @@ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, witho
|
||||
assert not node.dependencies(deptype="test"), msg.format(pkg_name)
|
||||
|
||||
@pytest.mark.regression("20019")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_compiler_match_is_preferred_to_newer_version(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# This spec depends on openblas. Openblas has a conflict
|
||||
# that doesn't allow newer versions with gcc@4.4.0. Check
|
||||
# that an old version of openblas is selected, rather than
|
||||
@@ -1122,9 +1097,8 @@ def test_target_ranges_in_conflicts(self):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec("impossible-concretization").concretized()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_target_compatibility(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
Spec("libdwarf target=x86_64 ^libelf target=x86_64_v2").concretized()
|
||||
|
||||
@@ -1140,10 +1114,8 @@ def test_variant_not_default(self):
|
||||
assert "+foo+bar+baz" in d
|
||||
|
||||
@pytest.mark.regression("20055")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_custom_compiler_version(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
s = Spec("a %gcc@10foo os=redhat6").concretized()
|
||||
assert "%gcc@10foo" in s
|
||||
|
||||
@@ -1240,12 +1212,10 @@ def mock_fn(*args, **kwargs):
|
||||
{"add_variant": True, "delete_variant": True},
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_installed_packages_when_package_def_changes(
|
||||
self, context, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# Install a spec
|
||||
root = Spec("root").concretized()
|
||||
dependency = root["changing"].copy()
|
||||
@@ -1269,10 +1239,8 @@ def test_reuse_installed_packages_when_package_def_changes(
|
||||
# Structure and package hash will be different without reuse
|
||||
assert root.dag_hash() != new_root_without_reuse.dag_hash()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_with_flags(self, mutable_database, mutable_config):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer does not reuse")
|
||||
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
spec = Spec("a cflags=-g cxxflags=-g").concretized()
|
||||
spack.store.STORE.db.add(spec, None)
|
||||
@@ -1292,10 +1260,8 @@ def test_concretization_of_test_dependencies(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str", ["wrong-variant-in-conflicts", "wrong-variant-in-depends-on"]
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_error_message_for_inconsistent_variants(self, spec_str):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
s = Spec(spec_str)
|
||||
with pytest.raises(RuntimeError, match="not found in package"):
|
||||
s.concretize()
|
||||
@@ -1392,12 +1358,9 @@ def test_multivalued_variants_from_cli(self, spec_str, expected_dict):
|
||||
("deprecated-versions@1.1.0", ["deprecated-versions@1.1.0"]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_deprecated_versions_not_selected(self, spec_str, expected):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for abstract_spec in expected:
|
||||
assert abstract_spec in s
|
||||
|
||||
@@ -1456,12 +1419,10 @@ def test_non_default_provider_of_multiple_virtuals(self):
|
||||
"spec_str,expect_installed",
|
||||
[("mpich", True), ("mpich+debug", False), ("mpich~debug", True)],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_concrete_specs_are_not_modified_on_reuse(
|
||||
self, mutable_database, spec_str, expect_installed, config
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer cannot reuse specs")
|
||||
|
||||
# Test the internal consistency of solve + DAG reconstruction
|
||||
# when reused specs are added to the mix. This prevents things
|
||||
# like additional constraints being added to concrete specs in
|
||||
@@ -1472,10 +1433,8 @@ def test_concrete_specs_are_not_modified_on_reuse(
|
||||
assert s.satisfies(spec_str)
|
||||
|
||||
@pytest.mark.regression("26721,19736")
|
||||
@pytest.mark.only_clingo("Original concretizer cannot use sticky variants")
|
||||
def test_sticky_variant_in_package(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer cannot use sticky variants")
|
||||
|
||||
# Here we test that a sticky variant cannot be changed from its default value
|
||||
# by the ASP solver if not set explicitly. The package used in the test needs
|
||||
# to have +allow-gcc set to be concretized with %gcc and clingo is not allowed
|
||||
@@ -1489,10 +1448,8 @@ def test_sticky_variant_in_package(self):
|
||||
s = Spec("sticky-variant %clang").concretized()
|
||||
assert s.satisfies("%clang") and s.satisfies("~allow-gcc")
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_do_not_invent_new_concrete_versions_unless_necessary(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Original concretizer doesn't resolve concrete versions to known ones")
|
||||
|
||||
# ensure we select a known satisfying version rather than creating
|
||||
# a new '2.7' version.
|
||||
assert ver("=2.7.11") == Spec("python@2.7").concretized().version
|
||||
@@ -1513,30 +1470,24 @@ def test_do_not_invent_new_concrete_versions_unless_necessary(self):
|
||||
("conditional-values-in-variant foo=foo", True),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_conditional_values_in_variants(self, spec_str, valid):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer doesn't resolve conditional values in variants")
|
||||
|
||||
s = Spec(spec_str)
|
||||
raises = pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError))
|
||||
with llnl.util.lang.nullcontext() if valid else raises:
|
||||
s.concretize()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_conditional_values_in_conditional_variant(self):
|
||||
"""Test that conditional variants play well with conditional possible values"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer doesn't resolve conditional values in variants")
|
||||
|
||||
s = Spec("conditional-values-in-variant@1.50.0").concretized()
|
||||
assert "cxxstd" not in s.variants
|
||||
|
||||
s = Spec("conditional-values-in-variant@1.60.0").concretized()
|
||||
assert "cxxstd" in s.variants
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_target_granularity(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for target granularity")
|
||||
|
||||
# The test architecture uses core2 as the default target. Check that when
|
||||
# we configure Spack for "generic" granularity we concretize for x86_64
|
||||
default_target = spack.platforms.test.Test.default
|
||||
@@ -1546,10 +1497,8 @@ def test_target_granularity(self):
|
||||
with spack.config.override("concretizer:targets", {"granularity": "generic"}):
|
||||
assert s.concretized().satisfies("target=%s" % generic_target)
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_host_compatible_concretization(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for host compatibility")
|
||||
|
||||
# Check that after setting "host_compatible" to false we cannot concretize.
|
||||
# Here we use "k10" to set a target non-compatible with the current host
|
||||
# to avoid a lot of boilerplate when mocking the test platform. The issue
|
||||
@@ -1561,10 +1510,8 @@ def test_host_compatible_concretization(self):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretized()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_add_microarchitectures_on_explicit_request(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for host compatibility")
|
||||
|
||||
# Check that if we consider only "generic" targets, we can still solve for
|
||||
# specific microarchitectures on explicit requests
|
||||
with spack.config.override("concretizer:targets", {"granularity": "generic"}):
|
||||
@@ -1572,13 +1519,11 @@ def test_add_microarchitectures_on_explicit_request(self):
|
||||
assert s.satisfies("target=k10")
|
||||
|
||||
@pytest.mark.regression("29201")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
|
||||
"""Test that we can reuse installed specs with versions not
|
||||
declared in package.py
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
root = Spec("root").concretized()
|
||||
root.package.do_install(fake=True, explicit=True)
|
||||
repo_with_changing_recipe.change({"delete_version": True})
|
||||
@@ -1589,17 +1534,15 @@ def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_rec
|
||||
assert root.dag_hash() == new_root.dag_hash()
|
||||
|
||||
@pytest.mark.regression("29201")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_installed_version_is_selected_only_for_reuse(
|
||||
self, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
"""Test that a version coming from an installed spec is a possible
|
||||
version only for reuse
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# Install a dependency that cannot be reused with "root"
|
||||
# because of a conflict a variant, then delete its version
|
||||
# because of a conflict in a variant, then delete its version
|
||||
dependency = Spec("changing@1.0~foo").concretized()
|
||||
dependency.package.do_install(fake=True, explicit=True)
|
||||
repo_with_changing_recipe.change({"delete_version": True})
|
||||
@@ -1653,12 +1596,10 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, monkeypatch):
|
||||
(["mpi", "mpich"], 1),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_best_effort_coconcretize(self, specs, expected):
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot concretize in rounds")
|
||||
|
||||
specs = [Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
@@ -1699,13 +1640,11 @@ def test_best_effort_coconcretize(self, specs, expected):
|
||||
(["hdf5+mpi", "zmpi", "mpich"], "mpich", 2),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot concretize in rounds")
|
||||
|
||||
specs = [Spec(s) for s in specs]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
@@ -1719,20 +1658,16 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot reuse")
|
||||
|
||||
reusable_specs = []
|
||||
for s in ["mpileaks ^mpich", "zmpi"]:
|
||||
reusable_specs.extend(Spec(s).concretized().traverse(root=True))
|
||||
|
||||
root_specs = [Spec("mpileaks"), Spec("zmpi")]
|
||||
|
||||
import spack.solver.asp
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
@@ -1742,15 +1677,13 @@ def test_coconcretize_reuse_and_virtuals(self):
|
||||
assert "zmpi" in spec
|
||||
|
||||
@pytest.mark.regression("30864")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_misleading_error_message_on_version(self, mutable_database):
|
||||
# For this bug to be triggered we need a reusable dependency
|
||||
# that is not optimal in terms of optimization scores.
|
||||
# We pick an old version of "b"
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot reuse")
|
||||
|
||||
reusable_specs = [Spec("non-existing-conditional-dep@1.0").concretized()]
|
||||
root_spec = Spec("non-existing-conditional-dep@2.0")
|
||||
|
||||
@@ -1763,13 +1696,11 @@ def test_misleading_error_message_on_version(self, mutable_database):
|
||||
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
|
||||
@pytest.mark.regression("31148")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_version_weight_and_provenance(self):
|
||||
"""Test package preferences during coconcretization."""
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot reuse")
|
||||
|
||||
reusable_specs = [Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")]
|
||||
root_spec = Spec("a foobar=bar")
|
||||
|
||||
@@ -1799,12 +1730,10 @@ def test_version_weight_and_provenance(self):
|
||||
assert result_spec.satisfies("^b@1.0")
|
||||
|
||||
@pytest.mark.regression("31169")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot reuse")
|
||||
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
@@ -1826,9 +1755,8 @@ def test_git_hash_assigned_version_is_preferred(self):
|
||||
assert hash in str(c)
|
||||
|
||||
@pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "main"))
|
||||
@pytest.mark.only_clingo("Original concretizer cannot account for git hashes")
|
||||
def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for git hashes")
|
||||
s = Spec("develop-branch-version@git.%s=develop" % git_ref)
|
||||
c = s.concretized()
|
||||
assert git_ref in str(c)
|
||||
@@ -1837,21 +1765,17 @@ def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
|
||||
assert s.satisfies("@0.1:")
|
||||
|
||||
@pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "fbranch"))
|
||||
@pytest.mark.only_clingo("Original concretizer cannot account for git hashes")
|
||||
def test_git_ref_version_succeeds_with_unknown_version(self, git_ref):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for git hashes")
|
||||
# main is not defined in the package.py for this file
|
||||
s = Spec("develop-branch-version@git.%s=main" % git_ref)
|
||||
s.concretize()
|
||||
assert s.satisfies("develop-branch-version@main")
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
|
||||
"""Test that external specs that are in the DB can be reused."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Use case not supported by the original concretizer")
|
||||
|
||||
# Configuration to be added to packages.yaml
|
||||
external_conf = {
|
||||
"changing": {
|
||||
"buildable": False,
|
||||
@@ -1879,12 +1803,9 @@ def test_installed_externals_are_reused(self, mutable_database, repo_with_changi
|
||||
assert external3.dag_hash() == external1.dag_hash()
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_user_can_select_externals_with_require(self, mutable_database):
|
||||
"""Test that users have means to select an external even in presence of reusable specs."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Use case not supported by the original concretizer")
|
||||
|
||||
# Configuration to be added to packages.yaml
|
||||
external_conf = {
|
||||
"mpi": {"buildable": False},
|
||||
"multi-provider-mpi": {
|
||||
@@ -1911,13 +1832,11 @@ def test_user_can_select_externals_with_require(self, mutable_database):
|
||||
assert mpi_spec.name == "multi-provider-mpi"
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch):
|
||||
"""Test that installed specs do not trigger conflicts. This covers for the rare case
|
||||
where a conflict is added on a package after a spec matching the conflict was installed.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Use case not supported by the original concretizer")
|
||||
|
||||
# Add a conflict to "mpich" that match an already installed "mpich~debug"
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("mpich")
|
||||
monkeypatch.setitem(pkg_cls.conflicts, "~debug", [(Spec(), None)])
|
||||
@@ -1933,11 +1852,9 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
|
||||
assert s.satisfies("~debug")
|
||||
|
||||
@pytest.mark.regression("32471")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_require_targets_are_allowed(self, mutable_database):
|
||||
"""Test that users can set target constraints under the require attribute."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Use case not supported by the original concretizer")
|
||||
|
||||
# Configuration to be added to packages.yaml
|
||||
external_conf = {"all": {"require": "target=%s" % spack.platforms.test.Test.front_end}}
|
||||
spack.config.set("packages", external_conf)
|
||||
@@ -2125,7 +2042,7 @@ def test_compiler_match_constraints_when_selected(self):
|
||||
assert s.compiler.version == ver("=11.1.0"), s
|
||||
|
||||
@pytest.mark.regression("36339")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows")
|
||||
def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
"""Test that, when a compiler has a completely made up version, we can use its
|
||||
'real version' to detect targets and don't raise during concretization.
|
||||
@@ -2181,3 +2098,98 @@ def test_virtuals_are_annotated_on_edges(self, spec_str, default_mock_concretiza
|
||||
assert len(edges) == 1 and edges[0].virtuals == ("mpi",)
|
||||
edges = spec.edges_to_dependencies(name="callpath")
|
||||
assert len(edges) == 1 and edges[0].virtuals == ()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
builder_test_path = os.path.join(spack.paths.repos_path, "duplicates.test")
|
||||
with spack.repo.use_repositories(builder_test_path) as mock_repo:
|
||||
yield mock_repo
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_config", "duplicates_test_repository")
|
||||
class TestConcretizeSeparately:
|
||||
@pytest.mark.parametrize("strategy", ["minimal", "full"])
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Not supported by the original concretizer",
|
||||
)
|
||||
def test_two_gmake(self, strategy):
|
||||
"""Tests that we can concretize a spec with nodes using the same build
|
||||
dependency pinned at different versions.
|
||||
|
||||
o hdf5@1.0
|
||||
|\
|
||||
o | pinned-gmake@1.0
|
||||
o | gmake@3.0
|
||||
/
|
||||
o gmake@4.1
|
||||
|
||||
"""
|
||||
spack.config.CONFIG.set("concretizer:duplicates:strategy", strategy)
|
||||
s = Spec("hdf5").concretized()
|
||||
|
||||
# Check that hdf5 depends on gmake@=4.1
|
||||
hdf5_gmake = s["hdf5"].dependencies(name="gmake", deptype="build")
|
||||
assert len(hdf5_gmake) == 1 and hdf5_gmake[0].satisfies("@=4.1")
|
||||
|
||||
# Check that pinned-gmake depends on gmake@=3.0
|
||||
pinned_gmake = s["pinned-gmake"].dependencies(name="gmake", deptype="build")
|
||||
assert len(pinned_gmake) == 1 and pinned_gmake[0].satisfies("@=3.0")
|
||||
|
||||
@pytest.mark.parametrize("strategy", ["minimal", "full"])
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Not supported by the original concretizer",
|
||||
)
|
||||
def test_two_setuptools(self, strategy):
|
||||
"""Tests that we can concretize separate build dependencies, when we are dealing
|
||||
with extensions.
|
||||
|
||||
o py-shapely@1.25.0
|
||||
|\
|
||||
| |\
|
||||
| o | py-setuptools@60
|
||||
|/ /
|
||||
| o py-numpy@1.25.0
|
||||
|/|
|
||||
| |\
|
||||
| o | py-setuptools@59
|
||||
|/ /
|
||||
o | python@3.11.2
|
||||
o | gmake@3.0
|
||||
/
|
||||
o gmake@4.1
|
||||
|
||||
"""
|
||||
spack.config.CONFIG.set("concretizer:duplicates:strategy", strategy)
|
||||
s = Spec("py-shapely").concretized()
|
||||
# Requirements on py-shapely
|
||||
setuptools = s["py-shapely"].dependencies(name="py-setuptools", deptype="build")
|
||||
assert len(setuptools) == 1 and setuptools[0].satisfies("@=60")
|
||||
|
||||
# Requirements on py-numpy
|
||||
setuptools = s["py-numpy"].dependencies(name="py-setuptools", deptype="build")
|
||||
assert len(setuptools) == 1 and setuptools[0].satisfies("@=59")
|
||||
gmake = s["py-numpy"].dependencies(name="gmake", deptype="build")
|
||||
assert len(gmake) == 1 and gmake[0].satisfies("@=4.1")
|
||||
|
||||
# Requirements on python
|
||||
gmake = s["python"].dependencies(name="gmake", deptype="build")
|
||||
assert len(gmake) == 1 and gmake[0].satisfies("@=3.0")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.environ.get("SPACK_TEST_SOLVER") == "original",
|
||||
reason="Not supported by the original concretizer",
|
||||
)
|
||||
def test_solution_without_cycles(self):
|
||||
"""Tests that when we concretize a spec with cycles, a fallback kicks in to recompute
|
||||
a solution without cycles.
|
||||
"""
|
||||
s = Spec("cycle-a").concretized()
|
||||
assert s["cycle-a"].satisfies("+cycle")
|
||||
assert s["cycle-b"].satisfies("~cycle")
|
||||
|
||||
s = Spec("cycle-b").concretized()
|
||||
assert s["cycle-a"].satisfies("~cycle")
|
||||
assert s["cycle-b"].satisfies("+cycle")
|
||||
|
||||
@@ -117,13 +117,9 @@ def test_preferred_compilers(self, compiler_str, spec_str):
|
||||
# etc.
|
||||
assert spec.compiler.satisfies(CompilerSpec(compiler_str))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_preferred_target(self, mutable_mock_repo):
|
||||
"""Test preferred targets are applied correctly"""
|
||||
# FIXME: This test was a false negative, since the default and
|
||||
# FIXME: the preferred target were the same
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known bug in the original concretizer")
|
||||
|
||||
spec = concretize("mpich")
|
||||
default = str(spec.target)
|
||||
preferred = str(spec.target.family)
|
||||
@@ -151,9 +147,8 @@ def test_preferred_versions(self):
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.version == Version("2.2")
|
||||
|
||||
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
|
||||
def test_preferred_versions_mixed_version_types(self):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||
update_packages("mixedversions", "version", ["=2.0"])
|
||||
spec = concretize("mixedversions")
|
||||
assert spec.version == Version("2.0")
|
||||
@@ -230,24 +225,20 @@ def test_preferred(self):
|
||||
spec.concretize()
|
||||
assert spec.version == Version("3.5.0")
|
||||
|
||||
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
|
||||
def test_preferred_undefined_raises(self):
|
||||
"""Preference should not specify an undefined version"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("This behavior is not enforced for the old concretizer")
|
||||
|
||||
update_packages("python", "version", ["3.5.0.1"])
|
||||
spec = Spec("python")
|
||||
with pytest.raises(spack.config.ConfigError):
|
||||
spec.concretize()
|
||||
|
||||
@pytest.mark.only_clingo("This behavior is not enforced for the old concretizer")
|
||||
def test_preferred_truncated(self):
|
||||
"""Versions without "=" are treated as version ranges: if there is
|
||||
a satisfying version defined in the package.py, we should use that
|
||||
(don't define a new version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("This behavior is not enforced for the old concretizer")
|
||||
|
||||
update_packages("python", "version", ["3.5"])
|
||||
spec = Spec("python")
|
||||
spec.concretize()
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,11 +13,14 @@
|
||||
import spack.repo
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.solver.asp import UnsatisfiableSpecError
|
||||
from spack.solver.asp import InternalConcretizerError, UnsatisfiableSpecError
|
||||
from spack.spec import Spec
|
||||
from spack.util.url import path_to_file_url
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows uses old concretizer")
|
||||
pytestmark = [
|
||||
pytest.mark.not_on_windows("Windows uses old concretizer"),
|
||||
pytest.mark.only_clingo("Original concretizer does not support configuration requirements"),
|
||||
]
|
||||
|
||||
|
||||
def update_packages_config(conf_str):
|
||||
@@ -134,9 +136,6 @@ def fake_installs(monkeypatch, tmpdir):
|
||||
|
||||
|
||||
def test_one_package_multiple_reqs(concretize_scope, test_repo):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -153,9 +152,6 @@ def test_requirement_isnt_optional(concretize_scope, test_repo):
|
||||
"""If a user spec requests something that directly conflicts
|
||||
with a requirement, make sure we get an error.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
@@ -173,9 +169,6 @@ def test_require_undefined_version(concretize_scope, test_repo):
|
||||
(it is assumed this is a typo, and raising the error here
|
||||
avoids a likely error when Spack attempts to fetch the version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
@@ -192,9 +185,6 @@ def test_require_truncated(concretize_scope, test_repo):
|
||||
of the defined versions (vs. allowing the requirement to
|
||||
define a new version).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
@@ -256,9 +246,6 @@ def test_git_user_supplied_reference_satisfaction(
|
||||
def test_requirement_adds_new_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
@@ -289,9 +276,6 @@ def test_requirement_adds_version_satisfies(
|
||||
depends_on condition and make sure it is triggered (i.e. the
|
||||
dependency is added).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
@@ -318,9 +302,6 @@ def test_requirement_adds_version_satisfies(
|
||||
def test_requirement_adds_git_hash_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
@@ -342,9 +323,6 @@ def test_requirement_adds_git_hash_version(
|
||||
def test_requirement_adds_multiple_new_versions(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
@@ -370,9 +348,6 @@ def test_preference_adds_new_version(
|
||||
"""Normally a preference cannot define a new version, but that constraint
|
||||
is ignored if the version is a Git hash-based version.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
@@ -398,9 +373,6 @@ def test_external_adds_new_version_that_is_preferred(concretize_scope, test_repo
|
||||
"""Test that we can use a version, not declared in package recipe, as the
|
||||
preferred version if that version appears in an external spec.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not enforce this constraint for preferences")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -421,9 +393,6 @@ def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
||||
"""If a simple requirement can be satisfied, make sure the
|
||||
concretization succeeds and the requirement spec is applied.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
s1 = Spec("x").concretized()
|
||||
# Without any requirements/preferences, the later version is preferred
|
||||
assert s1.satisfies("@1.1")
|
||||
@@ -443,9 +412,6 @@ def test_multiple_packages_requirements_are_respected(concretize_scope, test_rep
|
||||
"""Apply requirements to two packages; make sure the concretization
|
||||
succeeds and both requirements are respected.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
x:
|
||||
@@ -463,9 +429,6 @@ def test_oneof(concretize_scope, test_repo):
|
||||
"""'one_of' allows forcing the concretizer to satisfy one of
|
||||
the specs in the group (but not all have to be satisfied).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -483,9 +446,6 @@ def test_one_package_multiple_oneof_groups(concretize_scope, test_repo):
|
||||
"""One package has two 'one_of' groups; check that both are
|
||||
applied.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -505,9 +465,6 @@ def test_one_package_multiple_oneof_groups(concretize_scope, test_repo):
|
||||
@pytest.mark.regression("34241")
|
||||
def test_require_cflags(concretize_scope, test_repo):
|
||||
"""Ensures that flags can be required from configuration."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -523,9 +480,6 @@ def test_requirements_for_package_that_is_not_needed(concretize_scope, test_repo
|
||||
a dependency of a concretized spec (in other words, none of
|
||||
the requirements are used for the requested spec).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
# Note that the exact contents aren't important since this isn't
|
||||
# intended to be used, but the important thing is that a number of
|
||||
# packages have requirements applied
|
||||
@@ -549,9 +503,6 @@ def test_oneof_ordering(concretize_scope, test_repo):
|
||||
This priority should override default priority (e.g. choosing
|
||||
later versions).
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -568,9 +519,6 @@ def test_oneof_ordering(concretize_scope, test_repo):
|
||||
|
||||
|
||||
def test_reuse_oneof(concretize_scope, create_test_repo, mutable_database, fake_installs):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
y:
|
||||
@@ -591,9 +539,6 @@ def test_reuse_oneof(concretize_scope, create_test_repo, mutable_database, fake_
|
||||
|
||||
def test_requirements_are_higher_priority_than_deprecation(concretize_scope, test_repo):
|
||||
"""Test that users can override a deprecated version with a requirement."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
# @2.3 is a deprecated versions. Ensure that any_of picks both constraints,
|
||||
# since they are possible
|
||||
conf_str = """\
|
||||
@@ -612,9 +557,6 @@ def test_requirements_are_higher_priority_than_deprecation(concretize_scope, tes
|
||||
@pytest.mark.parametrize("spec_str,requirement_str", [("x", "%gcc"), ("x", "%clang")])
|
||||
def test_default_requirements_with_all(spec_str, requirement_str, concretize_scope, test_repo):
|
||||
"""Test that default requirements are applied to all packages."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
all:
|
||||
@@ -640,8 +582,6 @@ def test_default_and_package_specific_requirements(
|
||||
concretize_scope, requirements, expectations, test_repo
|
||||
):
|
||||
"""Test that specific package requirements override default package requirements."""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
generic_req, specific_req = requirements
|
||||
generic_exp, specific_exp = expectations
|
||||
conf_str = """\
|
||||
@@ -663,8 +603,6 @@ def test_default_and_package_specific_requirements(
|
||||
|
||||
@pytest.mark.parametrize("mpi_requirement", ["mpich", "mpich2", "zmpi"])
|
||||
def test_requirements_on_virtual(mpi_requirement, concretize_scope, mock_packages):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
conf_str = """\
|
||||
packages:
|
||||
mpi:
|
||||
@@ -686,8 +624,6 @@ def test_requirements_on_virtual(mpi_requirement, concretize_scope, mock_package
|
||||
def test_requirements_on_virtual_and_on_package(
|
||||
mpi_requirement, specific_requirement, concretize_scope, mock_packages
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
conf_str = """\
|
||||
packages:
|
||||
mpi:
|
||||
@@ -706,8 +642,6 @@ def test_requirements_on_virtual_and_on_package(
|
||||
|
||||
|
||||
def test_incompatible_virtual_requirements_raise(concretize_scope, mock_packages):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
conf_str = """\
|
||||
packages:
|
||||
mpi:
|
||||
@@ -716,13 +650,12 @@ def test_incompatible_virtual_requirements_raise(concretize_scope, mock_packages
|
||||
update_packages_config(conf_str)
|
||||
|
||||
spec = Spec("callpath ^zmpi")
|
||||
with pytest.raises(UnsatisfiableSpecError):
|
||||
# TODO (multiple nodes): recover a better error message later
|
||||
with pytest.raises((UnsatisfiableSpecError, InternalConcretizerError)):
|
||||
spec.concretize()
|
||||
|
||||
|
||||
def test_non_existing_variants_under_all(concretize_scope, mock_packages):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
conf_str = """\
|
||||
packages:
|
||||
all:
|
||||
@@ -805,9 +738,6 @@ def test_conditional_requirements_from_packages_yaml(
|
||||
"""Test that conditional requirements are required when the condition is met,
|
||||
and optional when the condition is not met.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
update_packages_config(packages_yaml)
|
||||
spec = Spec(spec_str).concretized()
|
||||
for match_str, expected in expected_satisfies:
|
||||
@@ -883,9 +813,6 @@ def test_requirements_fail_with_custom_message(
|
||||
"""Test that specs failing due to requirements not being satisfiable fail with a
|
||||
custom error message.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
update_packages_config(packages_yaml)
|
||||
with pytest.raises(spack.error.SpackError, match=expected_message):
|
||||
Spec(spec_str).concretized()
|
||||
@@ -899,9 +826,6 @@ def test_skip_requirement_when_default_requirement_condition_cannot_be_met(
|
||||
package. For those packages the requirement rule is not emitted, since it can be
|
||||
determined to be always false.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
|
||||
packages_yaml = """
|
||||
packages:
|
||||
all:
|
||||
@@ -919,8 +843,6 @@ def test_skip_requirement_when_default_requirement_condition_cannot_be_met(
|
||||
|
||||
|
||||
def test_requires_directive(concretize_scope, mock_packages):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration requirements")
|
||||
compilers_yaml = pathlib.Path(concretize_scope) / "compilers.yaml"
|
||||
compilers_yaml.write_text(
|
||||
"""
|
||||
@@ -937,7 +859,7 @@ def test_requires_directive(concretize_scope, mock_packages):
|
||||
modules: []
|
||||
"""
|
||||
)
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
# This package requires either clang or gcc
|
||||
s = Spec("requires_clang_or_gcc").concretized()
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import getpass
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from datetime import date
|
||||
|
||||
@@ -236,7 +235,7 @@ def test_write_key_to_disk(mock_low_high_config, compiler_specs):
|
||||
spack.config.set("compilers", b_comps["compilers"], scope="high")
|
||||
|
||||
# Clear caches so we're forced to read from disk.
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
check_compiler_config(a_comps["compilers"], *compiler_specs.a)
|
||||
@@ -249,7 +248,7 @@ def test_write_to_same_priority_file(mock_low_high_config, compiler_specs):
|
||||
spack.config.set("compilers", b_comps["compilers"], scope="low")
|
||||
|
||||
# Clear caches so we're forced to read from disk.
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
# Same check again, to ensure consistency.
|
||||
check_compiler_config(a_comps["compilers"], *compiler_specs.a)
|
||||
@@ -369,7 +368,7 @@ def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
||||
spack.config.set(
|
||||
"modules:default", {"roots": {"lmod": os.path.join("foo", "bar", "baz")}}, scope="low"
|
||||
)
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
path = spack.config.get("modules:default:roots:lmod")
|
||||
assert spack_path.canonicalize_path(path) == os.path.normpath(
|
||||
os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
|
||||
@@ -484,7 +483,7 @@ def test_parse_install_tree(config_settings, expected, mutable_config):
|
||||
assert projections == expected_proj
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Padding unsupported on Windows")
|
||||
@pytest.mark.not_on_windows("Padding unsupported on Windows")
|
||||
@pytest.mark.parametrize(
|
||||
"config_settings,expected",
|
||||
[
|
||||
@@ -816,7 +815,7 @@ def test_bad_config_section(mock_low_high_config):
|
||||
spack.config.get("foobar")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="chmod not supported on Windows")
|
||||
@pytest.mark.not_on_windows("chmod not supported on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_bad_command_line_scopes(tmpdir, config):
|
||||
cfg = spack.config.Configuration()
|
||||
@@ -854,18 +853,18 @@ def test_add_command_line_scopes(tmpdir, mutable_config):
|
||||
|
||||
def test_nested_override():
|
||||
"""Ensure proper scope naming of nested overrides."""
|
||||
base_name = spack.config.overrides_base_name
|
||||
base_name = spack.config._OVERRIDES_BASE_NAME
|
||||
|
||||
def _check_scopes(num_expected, debug_values):
|
||||
scope_names = [
|
||||
s.name for s in spack.config.config.scopes.values() if s.name.startswith(base_name)
|
||||
s.name for s in spack.config.CONFIG.scopes.values() if s.name.startswith(base_name)
|
||||
]
|
||||
|
||||
for i in range(num_expected):
|
||||
name = "{0}{1}".format(base_name, i)
|
||||
assert name in scope_names
|
||||
|
||||
data = spack.config.config.get_config("config", name)
|
||||
data = spack.config.CONFIG.get_config("config", name)
|
||||
assert data["debug"] == debug_values[i]
|
||||
|
||||
# Check results from single and nested override
|
||||
@@ -878,23 +877,23 @@ def _check_scopes(num_expected, debug_values):
|
||||
|
||||
def test_alternate_override(monkeypatch):
|
||||
"""Ensure proper scope naming of override when conflict present."""
|
||||
base_name = spack.config.overrides_base_name
|
||||
base_name = spack.config._OVERRIDES_BASE_NAME
|
||||
|
||||
def _matching_scopes(regexpr):
|
||||
return [spack.config.InternalConfigScope("{0}1".format(base_name))]
|
||||
|
||||
# Check that the alternate naming works
|
||||
monkeypatch.setattr(spack.config.config, "matching_scopes", _matching_scopes)
|
||||
monkeypatch.setattr(spack.config.CONFIG, "matching_scopes", _matching_scopes)
|
||||
|
||||
with spack.config.override("config:debug", False):
|
||||
name = "{0}2".format(base_name)
|
||||
|
||||
scope_names = [
|
||||
s.name for s in spack.config.config.scopes.values() if s.name.startswith(base_name)
|
||||
s.name for s in spack.config.CONFIG.scopes.values() if s.name.startswith(base_name)
|
||||
]
|
||||
assert name in scope_names
|
||||
|
||||
data = spack.config.config.get_config("config", name)
|
||||
data = spack.config.CONFIG.get_config("config", name)
|
||||
assert data["debug"] is False
|
||||
|
||||
|
||||
@@ -1395,7 +1394,7 @@ def test_config_file_dir_failure(tmpdir, mutable_empty_config):
|
||||
spack.config.read_config_file(tmpdir.strpath)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="chmod not supported on Windows")
|
||||
@pytest.mark.not_on_windows("chmod not supported on Windows")
|
||||
def test_config_file_read_perms_failure(tmpdir, mutable_empty_config):
|
||||
"""Test reading a configuration file without permissions to ensure
|
||||
ConfigFileError is raised."""
|
||||
|
||||
@@ -480,10 +480,10 @@ def __str__(self):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_fetch_cache(monkeypatch):
|
||||
"""Substitutes spack.paths.fetch_cache with a mock object that does nothing
|
||||
"""Substitutes spack.paths.FETCH_CACHE with a mock object that does nothing
|
||||
and raises on fetch.
|
||||
"""
|
||||
monkeypatch.setattr(spack.caches, "fetch_cache", MockCache())
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", MockCache())
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -716,7 +716,7 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
|
||||
def _create_mock_configuration_scopes(configuration_dir):
|
||||
"""Create the configuration scopes used in `config` and `mutable_config`."""
|
||||
scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||
scopes = [spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)]
|
||||
scopes += [
|
||||
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||
for name in ["site", "system", "user"]
|
||||
@@ -1935,7 +1935,25 @@ def shell_as(shell):
|
||||
@pytest.fixture()
|
||||
def nullify_globals(request, monkeypatch):
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
monkeypatch.setattr(spack.config, "config", None)
|
||||
monkeypatch.setattr(spack.caches, "misc_cache", None)
|
||||
monkeypatch.setattr(spack.config, "CONFIG", None)
|
||||
monkeypatch.setattr(spack.caches, "MISC_CACHE", None)
|
||||
monkeypatch.setattr(spack.caches, "FETCH_CACHE", None)
|
||||
monkeypatch.setattr(spack.repo, "PATH", None)
|
||||
monkeypatch.setattr(spack.store, "STORE", None)
|
||||
|
||||
|
||||
def pytest_runtest_setup(item):
|
||||
# Skip tests if they are marked only clingo and are run with the original concretizer
|
||||
only_clingo_marker = item.get_closest_marker(name="only_clingo")
|
||||
if only_clingo_marker and os.environ.get("SPACK_TEST_SOLVER") == "original":
|
||||
pytest.skip(*only_clingo_marker.args)
|
||||
|
||||
# Skip tests if they are marked only original and are run with clingo
|
||||
only_original_marker = item.get_closest_marker(name="only_original")
|
||||
if only_original_marker and os.environ.get("SPACK_TEST_SOLVER", "clingo") == "clingo":
|
||||
pytest.skip(*only_original_marker.args)
|
||||
|
||||
# Skip test marked "not_on_windows" if they're run on Windows
|
||||
not_on_windows_marker = item.get_closest_marker(name="not_on_windows")
|
||||
if not_on_windows_marker and sys.platform == "win32":
|
||||
pytest.skip(*not_on_windows_marker.args)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user