Compare commits
189 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 |
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -60,7 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -4773,17 +4773,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4927,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5147,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5226,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5254,7 +5253,7 @@ Saving build-time files
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5261,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5290,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5327,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5348,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5395,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5444,7 +5445,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5494,9 +5495,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5540,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -6071,7 +6075,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
@@ -10,4 +10,4 @@ pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -1754,9 +1754,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -876,32 +875,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -2383,22 +2368,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -480,11 +480,18 @@ def _add_externals_if_missing() -> None:
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1027,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1048,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1102,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1149,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1174,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1183,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1198,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1227,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
|
||||
@@ -69,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -170,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -187,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -149,12 +155,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
@@ -297,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -327,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -341,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
|
||||
@@ -118,7 +118,7 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
@@ -126,7 +126,7 @@ def clean(parser, args):
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
@@ -186,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
|
||||
@@ -64,11 +64,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -137,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1278,9 +1280,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1339,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -760,7 +760,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
@@ -1994,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2062,7 +2058,7 @@ def matching_spec(self, spec):
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
@@ -2370,7 +2366,7 @@ def display_specs(concretized_specs):
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.display_format,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
@@ -2448,13 +2444,13 @@ def make_repo_path(root):
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.push_scope(scope)
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
||||
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
if not type(original_method) == SpecMultiMethod:
|
||||
if not isinstance(original_method, SpecMultiMethod):
|
||||
original_method = SpecMultiMethod(original_method)
|
||||
|
||||
if self.spec is not None:
|
||||
|
||||
@@ -288,9 +288,6 @@ def next_spec(
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
@@ -306,13 +303,12 @@ def all_specs(self) -> List[spack.spec.Spec]:
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
__slots__ = "ctx", "has_compiler", "has_version"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
@@ -343,7 +339,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -353,7 +348,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -367,7 +361,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
@@ -378,25 +371,21 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
@@ -411,12 +400,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
@@ -10,11 +10,12 @@
|
||||
dependencies.
|
||||
"""
|
||||
import os
|
||||
from pathlib import PurePath
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||
prefix = llnl.util.filesystem.ancestor(__file__, 4)
|
||||
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||
|
||||
#: synonym for prefix
|
||||
spack_root = prefix
|
||||
@@ -88,7 +89,7 @@ def _get_user_cache_path():
|
||||
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
||||
|
||||
|
||||
user_cache_path = _get_user_cache_path()
|
||||
user_cache_path = str(PurePath(_get_user_cache_path()))
|
||||
|
||||
#: junit, cdash, etc. reports about builds
|
||||
reports_path = os.path.join(user_cache_path, "reports")
|
||||
|
||||
@@ -64,7 +64,7 @@ def use_platform(new_platform):
|
||||
host = _PickleableCallable(new_platform)
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
@@ -73,5 +73,5 @@ def use_platform(new_platform):
|
||||
host = original_host_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith("."):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
@@ -647,7 +647,7 @@ class RepoPath:
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@@ -966,7 +966,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
@@ -1357,7 +1357,7 @@ def create_or_construct(path, namespace=None):
|
||||
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
@@ -1404,14 +1404,14 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
PATH, saved = create(configuration=spack.config.config), PATH
|
||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||
try:
|
||||
yield PATH
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
PATH = saved
|
||||
|
||||
|
||||
|
||||
@@ -28,6 +28,12 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,12 @@
|
||||
import enum
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import List
|
||||
from typing import List, NamedTuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -50,6 +51,8 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
|
||||
# these are from clingo.ast and bootstrapped later
|
||||
ASTType = None
|
||||
parse_files = None
|
||||
@@ -77,9 +80,7 @@ def default_clingo_control():
|
||||
"""Return a control object with the default settings used in Spack"""
|
||||
control = clingo.Control()
|
||||
control.configuration.configuration = "tweety"
|
||||
control.configuration.solve.models = 0
|
||||
control.configuration.solver.heuristic = "Domain"
|
||||
control.configuration.solve.parallel_mode = "1"
|
||||
control.configuration.solver.opt_strategy = "usc,one"
|
||||
return control
|
||||
|
||||
@@ -266,12 +267,14 @@ def _id(thing):
|
||||
|
||||
@llnl.util.lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
__slots__ = ["name", "args"]
|
||||
|
||||
def __init__(self, name, args=None):
|
||||
self.name = name
|
||||
self.args = () if args is None else tuple(args)
|
||||
|
||||
def _cmp_key(self):
|
||||
return (self.name, self.args)
|
||||
return self.name, self.args
|
||||
|
||||
def __call__(self, *args):
|
||||
"""Return a new instance of this function with added arguments.
|
||||
@@ -302,6 +305,8 @@ def argify(arg):
|
||||
return clingo.String(str(arg))
|
||||
elif isinstance(arg, int):
|
||||
return clingo.Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo.Function(arg.name, [argify(x) for x in arg.args], positive=positive)
|
||||
else:
|
||||
return clingo.String(str(arg))
|
||||
|
||||
@@ -322,6 +327,15 @@ def __getattr__(self, name):
|
||||
fn = AspFunctionBuilder()
|
||||
|
||||
|
||||
def _create_counter(specs, tests):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
return FullDuplicatesCounter(specs, tests=tests)
|
||||
if strategy == "minimal":
|
||||
return MinimalDuplicatesCounter(specs, tests=tests)
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
|
||||
@@ -513,15 +527,17 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
key = input_spec.name
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(key)]
|
||||
key = providers[0]
|
||||
candidate = answer.get(key)
|
||||
providers = [
|
||||
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
||||
]
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[key])
|
||||
self._concrete_specs_by_input[input_spec] = answer[key]
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
self._unsolved_specs.append(input_spec)
|
||||
|
||||
@@ -572,16 +588,33 @@ def bootstrap_clingo():
|
||||
from clingo import parse_files
|
||||
|
||||
|
||||
def stringify(sym):
|
||||
"""Stringify symbols from clingo models.
|
||||
class NodeArgument(NamedTuple):
|
||||
id: str
|
||||
pkg: str
|
||||
|
||||
This will turn a ``clingo.Symbol`` into a string, or a sequence of ``clingo.Symbol``
|
||||
objects into a tuple of strings.
|
||||
|
||||
def intermediate_repr(sym):
|
||||
"""Returns an intermediate representation of clingo models for Spack's spec builder.
|
||||
|
||||
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
|
||||
|
||||
Returns:
|
||||
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
|
||||
``clingo.Symbol`` objects into a tuple of those objects.
|
||||
"""
|
||||
# TODO: simplify this when we no longer have to support older clingo versions.
|
||||
if isinstance(sym, (list, tuple)):
|
||||
return tuple(stringify(a) for a in sym)
|
||||
return tuple(intermediate_repr(a) for a in sym)
|
||||
|
||||
try:
|
||||
if sym.name == "node":
|
||||
return NodeArgument(
|
||||
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
|
||||
)
|
||||
except RuntimeError:
|
||||
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
|
||||
# that are not functions
|
||||
pass
|
||||
|
||||
if clingo_cffi:
|
||||
# Clingo w/ CFFI will throw an exception on failure
|
||||
@@ -596,10 +629,10 @@ def stringify(sym):
|
||||
def extract_args(model, predicate_name):
|
||||
"""Extract the arguments to predicates with the provided name from a model.
|
||||
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and return
|
||||
their stringified arguments as tuples.
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and
|
||||
return their intermediate representation.
|
||||
"""
|
||||
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
@@ -700,7 +733,9 @@ def fact(self, head):
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, "symbol") else head
|
||||
|
||||
self.out.write("%s.\n" % str(symbol))
|
||||
# This is commented out to avoid evaluating str(symbol) when we have no stream
|
||||
if not isinstance(self.out, llnl.util.lang.Devnull):
|
||||
self.out.write(f"{str(symbol)}.\n")
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
@@ -772,8 +807,13 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
||||
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -801,6 +841,14 @@ def on_model(model):
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
if solve_result.satisfiable and self._model_has_cycles(models):
|
||||
tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]")
|
||||
self.control.load(os.path.join(parent_dir, "cycle_detection.lp"))
|
||||
self.control.ground([("no_cycle", [])])
|
||||
models.clear()
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
@@ -836,7 +884,8 @@ def on_model(model):
|
||||
for sym in best_model:
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
"UNKNOWN SYMBOL: %s(%s)"
|
||||
% (sym.name, ", ".join(intermediate_repr(sym.arguments)))
|
||||
)
|
||||
|
||||
elif cores:
|
||||
@@ -853,6 +902,26 @@ def on_model(model):
|
||||
|
||||
return result, timer, self.control.statistics
|
||||
|
||||
def _model_has_cycles(self, models):
|
||||
"""Returns true if the best model has cycles in it"""
|
||||
cycle_detection = clingo.Control()
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
lp_file = parent_dir / "cycle_detection.lp"
|
||||
|
||||
min_cost, best_model = min(models)
|
||||
with cycle_detection.backend() as backend:
|
||||
for atom in best_model:
|
||||
if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"':
|
||||
symbol = fn.depends_on(atom.arguments[1], atom.arguments[2])
|
||||
atom_id = backend.add_atom(symbol.symbol())
|
||||
backend.add_rule([atom_id], [], choice=False)
|
||||
|
||||
cycle_detection.load(str(lp_file))
|
||||
cycle_detection.ground([("base", []), ("no_cycle", [])])
|
||||
cycle_result = cycle_detection.solve()
|
||||
|
||||
return cycle_result.unsatisfiable
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
@@ -883,6 +952,10 @@ def __init__(self, tests=False):
|
||||
|
||||
# id for dummy variables
|
||||
self._condition_id_counter = itertools.count()
|
||||
self._trigger_id_counter = itertools.count()
|
||||
self._trigger_cache = collections.defaultdict(dict)
|
||||
self._effect_id_counter = itertools.count()
|
||||
self._effect_cache = collections.defaultdict(dict)
|
||||
|
||||
# Caches to optimize the setup phase of the solver
|
||||
self.target_specs_cache = None
|
||||
@@ -919,15 +992,18 @@ def key_fn(version):
|
||||
|
||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(
|
||||
fn.version_declared(
|
||||
pkg.name, declared_version.version, weight, str(declared_version.origin)
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.version_declared(
|
||||
declared_version.version, weight, str(declared_version.origin)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Declare deprecated versions for this package, if any
|
||||
deprecated = self.deprecated_versions[pkg.name]
|
||||
for v in sorted(deprecated):
|
||||
self.gen.fact(fn.deprecated_version(pkg.name, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.deprecated_version(v)))
|
||||
|
||||
def spec_versions(self, spec):
|
||||
"""Return list of clauses expressing spec's version constraints."""
|
||||
@@ -960,7 +1036,10 @@ def conflict_rules(self, pkg):
|
||||
no_constraint_msg = "{0}: conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
trigger_spec = spack.spec.Spec(trigger)
|
||||
trigger_id = self.condition(
|
||||
trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg
|
||||
)
|
||||
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
@@ -970,7 +1049,9 @@ def conflict_rules(self, pkg):
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_facts(self):
|
||||
@@ -1023,8 +1104,11 @@ def package_compiler_defaults(self, pkg):
|
||||
|
||||
for i, compiler in enumerate(reversed(matches)):
|
||||
self.gen.fact(
|
||||
fn.node_compiler_preference(
|
||||
pkg.name, compiler.spec.name, compiler.spec.version, -i * 100
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.node_compiler_preference(
|
||||
compiler.spec.name, compiler.spec.version, -i * 100
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1114,12 +1198,65 @@ def pkg_rules(self, pkg, tests):
|
||||
self.gen.newline()
|
||||
|
||||
# variants
|
||||
self.variant_rules(pkg)
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
lambda v, p, i: self.gen.fact(fn.pkg_fact(pkg.name, fn.provider_preference(v, p, i))),
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
# trigger and effect tables
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def trigger_rules(self):
|
||||
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Trigger conditions")
|
||||
for name in self._trigger_cache:
|
||||
cache = self._trigger_cache[name]
|
||||
for spec_str, (trigger_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.condition_requirement(trigger_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._trigger_cache.clear()
|
||||
|
||||
def effect_rules(self):
|
||||
"""Flushes all the effect rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
cache = self._effect_cache[name]
|
||||
for spec_str, (effect_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.imposed_constraint(effect_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._effect_cache.clear()
|
||||
|
||||
def variant_rules(self, pkg):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant(name)))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
@@ -1128,19 +1265,23 @@ def pkg_rules(self, pkg, tests):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.conditional_variant(cond_id, name)))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
self.gen.fact(fn.variant_single_value(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_single_value(name)))
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_package_py(pkg.name, name, variant.default)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_default_value_from_package_py(name, variant.default)
|
||||
)
|
||||
)
|
||||
else:
|
||||
spec_variant = variant.make_default()
|
||||
defaults = spec_variant.value
|
||||
for val in sorted(defaults):
|
||||
self.gen.fact(fn.variant_default_value_from_package_py(pkg.name, name, val))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.variant_default_value_from_package_py(name, val))
|
||||
)
|
||||
|
||||
values = variant.values
|
||||
if values is None:
|
||||
@@ -1151,7 +1292,9 @@ def pkg_rules(self, pkg, tests):
|
||||
for sid, s in enumerate(values.sets):
|
||||
for value in s:
|
||||
self.gen.fact(
|
||||
fn.variant_value_from_disjoint_sets(pkg.name, name, value, sid)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_value_from_disjoint_sets(name, value, sid)
|
||||
)
|
||||
)
|
||||
union.update(s)
|
||||
values = union
|
||||
@@ -1178,7 +1321,9 @@ def pkg_rules(self, pkg, tests):
|
||||
msg="empty (total) conflict constraint",
|
||||
)
|
||||
msg = "variant {0}={1} is conditionally disabled".format(name, value)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, msg))
|
||||
)
|
||||
else:
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
@@ -1189,32 +1334,13 @@ def pkg_rules(self, pkg, tests):
|
||||
name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (pkg.name, name, value, when),
|
||||
)
|
||||
self.gen.fact(fn.variant_possible_value(pkg.name, name, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_possible_value(name, value)))
|
||||
|
||||
if variant.sticky:
|
||||
self.gen.fact(fn.variant_sticky(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_sticky(name)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
@@ -1232,21 +1358,41 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
assert named_cond.name, "must provide name for anonymous conditions!"
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the caller,
|
||||
# we won't emit partial facts.
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
for pred in requirements:
|
||||
self.gen.fact(fn.condition_requirement(condition_id, *pred.args))
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
if imposed_spec:
|
||||
self.impose(condition_id, imposed_spec, node=node, name=name)
|
||||
cache = self._trigger_cache[named_cond.name]
|
||||
|
||||
named_cond_key = str(named_cond)
|
||||
if named_cond_key not in cache:
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
cache[named_cond_key] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
cache = self._effect_cache[named_cond.name]
|
||||
imposed_spec_key = str(imposed_spec)
|
||||
if imposed_spec_key not in cache:
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
|
||||
if not node:
|
||||
requirements = list(
|
||||
filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
|
||||
)
|
||||
cache[imposed_spec_key] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
@@ -1259,13 +1405,19 @@ def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
|
||||
def package_provider_rules(self, pkg):
|
||||
for provider_name in sorted(set(s.name for s in pkg.provided.keys())):
|
||||
self.gen.fact(fn.possible_provider(pkg.name, provider_name))
|
||||
if provider_name not in self.possible_virtuals:
|
||||
continue
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(provider_name)))
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
if provided.name not in self.possible_virtuals:
|
||||
continue
|
||||
for when in whens:
|
||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(when.name, fn.provider_condition(condition_id, provided.name))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
@@ -1289,9 +1441,13 @@ def package_dependencies_rules(self, pkg):
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += " when %s" % cond
|
||||
else:
|
||||
pass
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
|
||||
)
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
@@ -1328,7 +1484,7 @@ def provider_requirements(self):
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
packages_yaml = spack.config.config.get("packages")
|
||||
packages_yaml = spack.config.CONFIG.get("packages")
|
||||
assert self.possible_virtuals is not None, msg
|
||||
for virtual_str in sorted(self.possible_virtuals):
|
||||
requirements = packages_yaml.get(virtual_str, {}).get("require", [])
|
||||
@@ -1336,6 +1492,8 @@ def provider_requirements(self):
|
||||
virtual_str, requirements, kind=RequirementKind.VIRTUAL
|
||||
)
|
||||
self.emit_facts_from_requirement_rules(rules)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
"""Generate facts to enforce requirements.
|
||||
@@ -1449,10 +1607,12 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx)))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
self.trigger_rules()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
"""Facts on concretization preferences, as read from packages.yaml"""
|
||||
preferences = spack.package_prefs.PackagePrefs
|
||||
@@ -1495,7 +1655,9 @@ def target_preferences(self, pkg_name):
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(
|
||||
fn.target_weight(pkg_name, str(preferred.architecture.target), i + offset)
|
||||
fn.pkg_fact(
|
||||
pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset)
|
||||
)
|
||||
)
|
||||
|
||||
def spec_clauses(self, *args, **kwargs):
|
||||
@@ -1667,37 +1829,11 @@ class Body:
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.attr("depends_on", spec.name, dep.name, dtype))
|
||||
|
||||
# TODO: We have to look up info from package.py here, but we'd
|
||||
# TODO: like to avoid this entirely. We should not need to look
|
||||
# TODO: up potentially wrong info if we have virtual edge info.
|
||||
try:
|
||||
try:
|
||||
pkg = dep.package
|
||||
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
virtuals = pkg.virtuals_provided
|
||||
|
||||
except spack.repo.UnknownPackageError:
|
||||
# Skip virtual node constriants for renamed/deleted packages,
|
||||
# so their binaries can still be installed.
|
||||
# NOTE: with current specs (which lack edge attributes) this
|
||||
# can allow concretizations with two providers, but it's unlikely.
|
||||
continue
|
||||
|
||||
# Don't concretize with two providers of the same virtual.
|
||||
# See above for exception for unknown packages.
|
||||
# TODO: we will eventually record provider information on edges,
|
||||
# TODO: which avoids the need for the package lookup above.
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_node", virtual.name))
|
||||
clauses.append(fn.provider(dep.name, virtual.name))
|
||||
for virtual_name in dspec.virtuals:
|
||||
clauses.append(
|
||||
fn.attr("virtual_on_edge", spec.name, dep.name, virtual_name)
|
||||
)
|
||||
clauses.append(fn.attr("virtual_node", virtual_name))
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
@@ -2041,11 +2177,11 @@ def define_version_constraints(self):
|
||||
# generate facts for each package constraint and the version
|
||||
# that satisfies it
|
||||
for v in sorted(v for v in self.possible_versions[pkg_name] if v.satisfies(versions)):
|
||||
self.gen.fact(fn.version_satisfies(pkg_name, versions, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.version_satisfies(versions, v)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def define_virtual_constraints(self):
|
||||
def collect_virtual_constraints(self):
|
||||
"""Define versions for constraints on virtuals.
|
||||
|
||||
Must be called before define_version_constraints().
|
||||
@@ -2131,7 +2267,7 @@ def define_variant_values(self):
|
||||
# spec_clauses(). We might want to order these facts by pkg and name
|
||||
# if we are debugging.
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.variant_possible_value(pkg, variant, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value)))
|
||||
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
# tell the solver about any installed packages that could
|
||||
@@ -2191,20 +2327,19 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
# get list of all possible dependencies
|
||||
self.possible_virtuals = set(x.name for x in specs if x.virtual)
|
||||
possible = spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
|
||||
)
|
||||
|
||||
node_counter = _create_counter(specs, tests=self.tests)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
|
||||
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
|
||||
]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
|
||||
self.pkgs = set(possible)
|
||||
|
||||
# driver is used by all the functions below to add facts and
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
@@ -2228,13 +2363,16 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
self._facts_from_concrete_spec(reusable_spec, self.pkgs)
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
self.gen.h1("Possible flags on nodes")
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
@@ -2255,7 +2393,7 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.build_version_dict(self.pkgs)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
@@ -2270,9 +2408,12 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
|
||||
self.gen.h1("Develop specs")
|
||||
# Inject dev_path from environment
|
||||
for ds in dev_specs:
|
||||
self.condition(spack.spec.Spec(ds.name), ds, msg="%s is a develop spec" % ds.name)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
@@ -2280,10 +2421,8 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
self.gen.h1("Virtual Constraints")
|
||||
self.define_virtual_constraints()
|
||||
|
||||
self.gen.h1("Version Constraints")
|
||||
self.collect_virtual_constraints()
|
||||
self.define_version_constraints()
|
||||
|
||||
self.gen.h1("Compiler Version Constraints")
|
||||
@@ -2305,8 +2444,8 @@ def literal_specs(self, specs):
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
@@ -2401,6 +2540,16 @@ class SpecBuilder:
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""Given a package name, returns the string representation of the "min_dupe_id" node in
|
||||
the ASP encoding.
|
||||
|
||||
Args:
|
||||
pkg: name of a package
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
@@ -2413,101 +2562,105 @@ def __init__(self, specs, hash_lookup=None):
|
||||
# from this dictionary during reconstruction
|
||||
self._hash_lookup = hash_lookup or {}
|
||||
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
self._hash_specs.append(pkg)
|
||||
def hash(self, node, h):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = self._hash_lookup[h]
|
||||
self._hash_specs.append(node)
|
||||
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = spack.spec.Spec(pkg)
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||
|
||||
def _arch(self, pkg):
|
||||
arch = self._specs[pkg].architecture
|
||||
def _arch(self, node):
|
||||
arch = self._specs[node].architecture
|
||||
if not arch:
|
||||
arch = spack.spec.ArchSpec()
|
||||
self._specs[pkg].architecture = arch
|
||||
self._specs[node].architecture = arch
|
||||
return arch
|
||||
|
||||
def node_platform(self, pkg, platform):
|
||||
self._arch(pkg).platform = platform
|
||||
def node_platform(self, node, platform):
|
||||
self._arch(node).platform = platform
|
||||
|
||||
def node_os(self, pkg, os):
|
||||
self._arch(pkg).os = os
|
||||
def node_os(self, node, os):
|
||||
self._arch(node).os = os
|
||||
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
def node_target(self, node, target):
|
||||
self._arch(node).target = target
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
def variant_value(self, node, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == "dev_path":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.SingleValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
if name == "patches":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.MultiValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
self._specs[pkg].update_variant_validate(name, value)
|
||||
self._specs[node].update_variant_validate(name, value)
|
||||
|
||||
def version(self, pkg, version):
|
||||
self._specs[pkg].versions = vn.VersionList([vn.Version(version)])
|
||||
def version(self, node, version):
|
||||
self._specs[node].versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_compiler_version(self, pkg, compiler, version):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[pkg].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
def node_compiler_version(self, node, compiler, version):
|
||||
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_flag_compiler_default(self, pkg):
|
||||
self._flag_compiler_defaults.add(pkg)
|
||||
def node_flag_compiler_default(self, node):
|
||||
self._flag_compiler_defaults.add(node)
|
||||
|
||||
def node_flag(self, pkg, flag_type, flag):
|
||||
self._specs[pkg].compiler_flags.add_flag(flag_type, flag, False)
|
||||
def node_flag(self, node, flag_type, flag):
|
||||
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, pkg, flag_type, source):
|
||||
self._flag_sources[(pkg, flag_type)].add(source)
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
spec_info = packages_yaml[pkg]["externals"][int(idx)]
|
||||
self._specs[pkg].external_path = spec_info.get("prefix", None)
|
||||
self._specs[pkg].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
|
||||
self._specs[node].external_path = spec_info.get("prefix", None)
|
||||
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info.get("modules", None)
|
||||
)
|
||||
self._specs[pkg].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = self._specs[pkg].package_class(self._specs[pkg])
|
||||
package = self._specs[node].package_class(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
|
||||
if extendee_spec:
|
||||
package.update_external_dependencies(self._specs.get(extendee_spec.name, None))
|
||||
extendee_node = SpecBuilder.make_node(pkg=extendee_spec.name)
|
||||
package.update_external_dependencies(self._specs.get(extendee_node, None))
|
||||
|
||||
def depends_on(self, pkg, dep, type):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
|
||||
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||
msg = "Current solver does not handle multiple dependency edges of the same name"
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], deptypes=(type,), virtuals=()
|
||||
)
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
edges[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
provider_spec = self._specs[provider_node]
|
||||
dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)]
|
||||
assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}"
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
@@ -2537,19 +2690,23 @@ def reorder_flags(self):
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
source_key = (spec.name, flag_type)
|
||||
node = SpecBuilder.make_node(pkg=spec.name)
|
||||
source_key = (node, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
order = [
|
||||
SpecBuilder.make_node(pkg=s.name)
|
||||
for s in spec.traverse(order="post", direction="parents")
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for name in sorted_sources:
|
||||
for node in sorted_sources:
|
||||
all_src_flags = list()
|
||||
per_pkg_sources = [self._specs[name]]
|
||||
if name in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[name])
|
||||
per_pkg_sources = [self._specs[node]]
|
||||
if node.pkg in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[node.pkg])
|
||||
for source in per_pkg_sources:
|
||||
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
|
||||
extend_flag_list(from_sources, all_src_flags)
|
||||
@@ -2620,14 +2777,15 @@ def build_specs(self, function_tuples):
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
node = args[0]
|
||||
pkg = node.pkg
|
||||
if spack.repo.PATH.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(pkg)
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
@@ -2645,10 +2803,12 @@ def build_specs(self, function_tuples):
|
||||
# fix flags after all specs are constructed
|
||||
self.reorder_flags()
|
||||
|
||||
# cycle detection
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -2768,7 +2928,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
timers (bool): Print out coarse fimers for different solve phases.
|
||||
timers (bool): Print out coarse timers for different solve phases.
|
||||
stats (bool): Print out detailed stats from clingo.
|
||||
tests (bool or tuple): If True, concretize test dependencies for all packages.
|
||||
If a tuple of package names, concretize test dependencies for named
|
||||
@@ -2776,6 +2936,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
@@ -2799,6 +2960,7 @@ def solve_in_rounds(self, specs, out=None, timers=False, stats=False, tests=Fals
|
||||
stats (bool): print internal statistics if set to True
|
||||
tests (bool): add test dependencies to the solve
|
||||
"""
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
159
lib/spack/spack/solver/counter.py
Normal file
159
lib/spack/spack/solver/counter.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import spack.dependency
|
||||
import spack.package_base
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
|
||||
class Counter:
|
||||
"""Computes the possible packages and the maximum number of duplicates
|
||||
allowed for each of them.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to concretize
|
||||
tests: if True, add test dependencies to the list of possible packages
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
|
||||
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
|
||||
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
|
||||
if not tests:
|
||||
self.link_run_types = ("link", "run")
|
||||
self.all_types = ("link", "run", "build")
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_dependencies
|
||||
|
||||
def possible_virtuals(self) -> Set[str]:
|
||||
"""Returns the list of possible virtuals"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_virtuals
|
||||
|
||||
def ensure_cache_values(self) -> None:
|
||||
"""Ensure the cache values have been computed"""
|
||||
if self._possible_dependencies:
|
||||
return
|
||||
self._compute_cache_values()
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
|
||||
"""Emit facts associated with the possible packages"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def _compute_cache_values(self):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self):
|
||||
result = spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
self._possible_dependencies = set(result)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
gen.h2("Maximum number of nodes (packages)")
|
||||
for package_name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||
def __init__(self, specs, tests):
|
||||
super().__init__(specs, tests)
|
||||
self._link_run: PossibleDependencies = set()
|
||||
self._direct_build: PossibleDependencies = set()
|
||||
self._total_build: PossibleDependencies = set()
|
||||
self._link_run_virtuals: Set[str] = set()
|
||||
|
||||
def _compute_cache_values(self):
|
||||
self._link_run = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
|
||||
)
|
||||
)
|
||||
self._link_run_virtuals.update(self._possible_virtuals)
|
||||
for x in self._link_run:
|
||||
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
|
||||
self._direct_build.update(current)
|
||||
|
||||
self._total_build = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
gen.h2("Packages with at most a single node")
|
||||
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 2))
|
||||
gen.fact(fn.multiple_unification_sets(package_name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
counter = collections.Counter(
|
||||
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||
)
|
||||
gen.h2("Maximum number of nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
count = min(count, 2)
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Build unification sets ")
|
||||
for name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.multiple_unification_sets(name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
counter = collections.Counter(
|
||||
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||
)
|
||||
gen.h2("Maximum number of virtual nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
@@ -0,0 +1,21 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Avoid cycles in the DAG
|
||||
%
|
||||
% Some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them. Note that these rules are quite
|
||||
% demanding on both grounding and solving, since they need to compute and
|
||||
% consider all possible paths between pair of nodes.
|
||||
%=============================================================================
|
||||
|
||||
|
||||
#program no_cycle.
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, A).
|
||||
|
||||
#defined depends_on/2.
|
||||
29
lib/spack/spack/solver/heuristic.lp
Normal file
29
lib/spack/spack/solver/heuristic.lp
Normal file
@@ -0,0 +1,29 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves (node with ID 0)
|
||||
%=============================================================================
|
||||
|
||||
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
|
||||
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
|
||||
|
||||
% Root node
|
||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
|
||||
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
@@ -0,0 +1,24 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves (node with ID > 0)
|
||||
%=============================================================================
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||
@@ -3,9 +3,11 @@
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
%=============================================================================
|
||||
|
||||
% macOS
|
||||
os_compatible("monterey", "bigsur").
|
||||
|
||||
27
lib/spack/spack/solver/when_possible.lp
Normal file
27
lib/spack/spack/solver/when_possible.lp
Normal file
@@ -0,0 +1,27 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Minimize the number of literals that are not solved
|
||||
%
|
||||
% This minimization is used for the "when_possible" concretization mode,
|
||||
% otherwise we assume that all literals must be solved.
|
||||
%=============================================================================
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ solve_literal(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not solve_literal(ID), literal(ID).
|
||||
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { solve_literal(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
#heuristic literal_solved(ID) : literal(ID). [1, sign]
|
||||
#heuristic literal_solved(ID) : literal(ID). [50, init]
|
||||
@@ -112,50 +112,49 @@
|
||||
"UnsatisfiableDependencySpecError",
|
||||
"AmbiguousHashError",
|
||||
"InvalidHashError",
|
||||
"RedundantSpecError",
|
||||
"SpecDeprecatedError",
|
||||
]
|
||||
|
||||
#: Valid pattern for an identifier in Spack
|
||||
|
||||
identifier_re = r"\w[\w-]*"
|
||||
IDENTIFIER_RE = r"\w[\w-]*"
|
||||
|
||||
compiler_color = "@g" #: color for highlighting compilers
|
||||
version_color = "@c" #: color for highlighting versions
|
||||
architecture_color = "@m" #: color for highlighting architectures
|
||||
enabled_variant_color = "@B" #: color for highlighting enabled variants
|
||||
disabled_variant_color = "r" #: color for highlighting disabled varaints
|
||||
dependency_color = "@." #: color for highlighting dependencies
|
||||
hash_color = "@K" #: color for highlighting package hashes
|
||||
COMPILER_COLOR = "@g" #: color for highlighting compilers
|
||||
VERSION_COLOR = "@c" #: color for highlighting versions
|
||||
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
|
||||
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
|
||||
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
|
||||
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
|
||||
HASH_COLOR = "@K" #: color for highlighting package hashes
|
||||
|
||||
#: This map determines the coloring of specs when using color output.
|
||||
#: We make the fields different colors to enhance readability.
|
||||
#: See llnl.util.tty.color for descriptions of the color codes.
|
||||
color_formats = {
|
||||
"%": compiler_color,
|
||||
"@": version_color,
|
||||
"=": architecture_color,
|
||||
"+": enabled_variant_color,
|
||||
"~": disabled_variant_color,
|
||||
"^": dependency_color,
|
||||
"#": hash_color,
|
||||
COLOR_FORMATS = {
|
||||
"%": COMPILER_COLOR,
|
||||
"@": VERSION_COLOR,
|
||||
"=": ARCHITECTURE_COLOR,
|
||||
"+": ENABLED_VARIANT_COLOR,
|
||||
"~": DISABLED_VARIANT_COLOR,
|
||||
"^": DEPENDENCY_COLOR,
|
||||
"#": HASH_COLOR,
|
||||
}
|
||||
|
||||
#: Regex used for splitting by spec field separators.
|
||||
#: These need to be escaped to avoid metacharacters in
|
||||
#: ``color_formats.keys()``.
|
||||
_separators = "[\\%s]" % "\\".join(color_formats.keys())
|
||||
#: ``COLOR_FORMATS.keys()``.
|
||||
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
|
||||
|
||||
#: Default format for Spec.format(). This format can be round-tripped, so that:
|
||||
#: Spec(Spec("string").format()) == Spec("string)"
|
||||
default_format = (
|
||||
DEFAULT_FORMAT = (
|
||||
"{name}{@versions}"
|
||||
"{%compiler.name}{@compiler.versions}{compiler_flags}"
|
||||
"{variants}{arch=architecture}{/abstract_hash}"
|
||||
)
|
||||
|
||||
#: Display format, which eliminates extra `@=` in the output, for readability.
|
||||
display_format = (
|
||||
DISPLAY_FORMAT = (
|
||||
"{name}{@version}"
|
||||
"{%compiler.name}{@compiler.version}{compiler_flags}"
|
||||
"{variants}{arch=architecture}{/abstract_hash}"
|
||||
@@ -187,7 +186,7 @@ class InstallStatus(enum.Enum):
|
||||
|
||||
def colorize_spec(spec):
|
||||
"""Returns a spec colorized according to the colors specified in
|
||||
color_formats."""
|
||||
COLOR_FORMATS."""
|
||||
|
||||
class insert_color:
|
||||
def __init__(self):
|
||||
@@ -200,9 +199,9 @@ def __call__(self, match):
|
||||
return clr.cescape(sep)
|
||||
self.last = sep
|
||||
|
||||
return "%s%s" % (color_formats[sep], clr.cescape(sep))
|
||||
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
|
||||
|
||||
return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
|
||||
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
@@ -985,16 +984,14 @@ def __iter__(self):
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge):
|
||||
"""Adds a new edge to this object.
|
||||
|
||||
Args:
|
||||
edge (DependencySpec): edge to be added
|
||||
"""
|
||||
def add(self, edge: DependencySpec):
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
current_list = self.edges.setdefault(key, [])
|
||||
current_list.append(edge)
|
||||
current_list.sort(key=_sort_by_dep_types)
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
lst.append(edge)
|
||||
lst.sort(key=_sort_by_dep_types)
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
@@ -1927,19 +1924,15 @@ def _lookup_hash(self):
|
||||
store, or finally, binary caches."""
|
||||
import spack.environment
|
||||
|
||||
matches = []
|
||||
active_env = spack.environment.active_environment()
|
||||
|
||||
if active_env:
|
||||
env_matches = active_env.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in env_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
db_matches = spack.store.STORE.db.get_by_hash(self.abstract_hash) or []
|
||||
matches = [m for m in db_matches if m._satisfies(self)]
|
||||
if not matches:
|
||||
query = spack.binary_distribution.BinaryCacheQuery(True)
|
||||
remote_matches = query("/" + self.abstract_hash) or []
|
||||
matches = [m for m in remote_matches if m._satisfies(self)]
|
||||
# First env, then store, then binary cache
|
||||
matches = (
|
||||
(active_env.all_matching_specs(self) if active_env else [])
|
||||
or spack.store.STORE.db.query(self, installed=any)
|
||||
or spack.binary_distribution.BinaryCacheQuery(True)(self)
|
||||
)
|
||||
|
||||
if not matches:
|
||||
raise InvalidHashError(self, self.abstract_hash)
|
||||
|
||||
@@ -1960,19 +1953,17 @@ def lookup_hash(self):
|
||||
spec = self.copy(deps=False)
|
||||
# root spec is replaced
|
||||
if spec.abstract_hash:
|
||||
new = self._lookup_hash()
|
||||
spec._dup(new)
|
||||
spec._dup(self._lookup_hash())
|
||||
return spec
|
||||
|
||||
# Get dependencies that need to be replaced
|
||||
for node in self.traverse(root=False):
|
||||
if node.abstract_hash:
|
||||
new = node._lookup_hash()
|
||||
spec._add_dependency(new, deptypes=(), virtuals=())
|
||||
spec._add_dependency(node._lookup_hash(), deptypes=(), virtuals=())
|
||||
|
||||
# reattach nodes that were not otherwise satisfied by new dependencies
|
||||
for node in self.traverse(root=False):
|
||||
if not any(n._satisfies(node) for n in spec.traverse()):
|
||||
if not any(n.satisfies(node) for n in spec.traverse()):
|
||||
spec._add_dependency(node.copy(), deptypes=(), virtuals=())
|
||||
|
||||
return spec
|
||||
@@ -1985,9 +1976,7 @@ def replace_hash(self):
|
||||
if not any(node for node in self.traverse(order="post") if node.abstract_hash):
|
||||
return
|
||||
|
||||
spec_by_hash = self.lookup_hash()
|
||||
|
||||
self._dup(spec_by_hash)
|
||||
self._dup(self.lookup_hash())
|
||||
|
||||
def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary representing the state of this Spec.
|
||||
@@ -2983,9 +2972,12 @@ def _new_concretize(self, tests=False):
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||
name = providers[0]
|
||||
|
||||
assert name in answer
|
||||
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
||||
assert (
|
||||
node in answer
|
||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||
|
||||
concretized = answer[name]
|
||||
concretized = answer[node]
|
||||
self._dup(concretized)
|
||||
|
||||
def concretize(self, tests=False):
|
||||
@@ -3519,7 +3511,8 @@ def update_variant_validate(self, variant_name, values):
|
||||
for value in values:
|
||||
if self.variants.get(variant_name):
|
||||
msg = (
|
||||
"Cannot append a value to a single-valued " "variant with an already set value"
|
||||
f"cannot append the new value '{value}' to the single-valued "
|
||||
f"variant '{self.variants[variant_name]}'"
|
||||
)
|
||||
assert pkg_variant.multi, msg
|
||||
self.variants[variant_name].append(value)
|
||||
@@ -3719,15 +3712,19 @@ def intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._intersects(rhs, deps)
|
||||
|
||||
def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if other.concrete and self.concrete:
|
||||
return self.dag_hash() == other.dag_hash()
|
||||
|
||||
self_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
other_hash = other.dag_hash() if other.concrete else other.abstract_hash
|
||||
|
||||
if (
|
||||
self_hash
|
||||
and other_hash
|
||||
and not (self_hash.startswith(other_hash) or other_hash.startswith(self_hash))
|
||||
):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
if self.virtual and other.virtual:
|
||||
@@ -3787,19 +3784,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# If we need to descend into dependencies, do it, otherwise we're done.
|
||||
if deps:
|
||||
return self._intersects_dependencies(other)
|
||||
else:
|
||||
return True
|
||||
|
||||
def satisfies(self, other, deps=True):
|
||||
"""
|
||||
This checks constraints on common dependencies against each other.
|
||||
"""
|
||||
other = self._autospec(other)
|
||||
|
||||
lhs = self.lookup_hash() or self
|
||||
rhs = other.lookup_hash() or other
|
||||
|
||||
return lhs._satisfies(rhs, deps=deps)
|
||||
return True
|
||||
|
||||
def _intersects_dependencies(self, other):
|
||||
if not other._dependencies or not self._dependencies:
|
||||
@@ -3836,7 +3822,7 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
return True
|
||||
|
||||
def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
def satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
"""Return True if all concrete specs matching self also match other, otherwise False.
|
||||
|
||||
Args:
|
||||
@@ -3851,6 +3837,13 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
# objects.
|
||||
return self.concrete and self.dag_hash() == other.dag_hash()
|
||||
|
||||
# If the right-hand side has an abstract hash, make sure it's a prefix of the
|
||||
# left-hand side's (abstract) hash.
|
||||
if other.abstract_hash:
|
||||
compare_hash = self.dag_hash() if self.concrete else self.abstract_hash
|
||||
if not compare_hash or not compare_hash.startswith(other.abstract_hash):
|
||||
return False
|
||||
|
||||
# If the names are different, we need to consider virtuals
|
||||
if self.name != other.name and self.name and other.name:
|
||||
# A concrete provider can satisfy a virtual dependency.
|
||||
@@ -4227,9 +4220,7 @@ def eq_node(self, other):
|
||||
def _cmp_iter(self):
|
||||
"""Lazily yield components of self for comparison."""
|
||||
|
||||
cmp_spec = self.lookup_hash() or self
|
||||
|
||||
for item in cmp_spec._cmp_node():
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
@@ -4240,10 +4231,10 @@ def _cmp_iter(self):
|
||||
# TODO: they exist for speed. We should benchmark whether it's really worth
|
||||
# TODO: having two types of hashing now that we use `json` instead of `yaml` for
|
||||
# TODO: spec hashing.
|
||||
yield cmp_spec.process_hash() if cmp_spec.concrete else None
|
||||
yield self.process_hash() if self.concrete else None
|
||||
|
||||
def deps():
|
||||
for dep in sorted(itertools.chain.from_iterable(cmp_spec._dependencies.values())):
|
||||
for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
|
||||
yield dep.spec.name
|
||||
yield tuple(sorted(dep.deptypes))
|
||||
yield hash(dep.spec)
|
||||
@@ -4253,7 +4244,7 @@ def deps():
|
||||
def colorized(self):
|
||||
return colorize_spec(self)
|
||||
|
||||
def format(self, format_string=default_format, **kwargs):
|
||||
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
|
||||
r"""Prints out particular pieces of a spec, depending on what is
|
||||
in the format string.
|
||||
|
||||
@@ -4332,7 +4323,7 @@ def format(self, format_string=default_format, **kwargs):
|
||||
def write(s, c=None):
|
||||
f = clr.cescape(s)
|
||||
if c is not None:
|
||||
f = color_formats[c] + f + "@."
|
||||
f = COLOR_FORMATS[c] + f + "@."
|
||||
clr.cwrite(f, stream=out, color=color)
|
||||
|
||||
def write_attribute(spec, attribute, color):
|
||||
@@ -4531,7 +4522,7 @@ def tree(self, **kwargs):
|
||||
status_fn = kwargs.pop("status_fn", False)
|
||||
cover = kwargs.pop("cover", "nodes")
|
||||
indent = kwargs.pop("indent", 0)
|
||||
fmt = kwargs.pop("format", default_format)
|
||||
fmt = kwargs.pop("format", DEFAULT_FORMAT)
|
||||
prefix = kwargs.pop("prefix", None)
|
||||
show_types = kwargs.pop("show_types", False)
|
||||
deptypes = kwargs.pop("deptypes", "all")
|
||||
@@ -5339,14 +5330,6 @@ class NoSuchSpecFileError(SpecFilenameError):
|
||||
"""Raised when a spec file doesn't exist."""
|
||||
|
||||
|
||||
class RedundantSpecError(spack.error.SpecError):
|
||||
def __init__(self, spec, addition):
|
||||
super().__init__(
|
||||
"Attempting to add %s to spec %s which is already concrete."
|
||||
" This is likely the result of adding to a spec specified by hash." % (addition, spec)
|
||||
)
|
||||
|
||||
|
||||
class SpecFormatStringError(spack.error.SpecError):
|
||||
"""Called for errors in Spec format strings."""
|
||||
|
||||
|
||||
@@ -197,7 +197,9 @@ def _expand_matrix_constraints(matrix_config):
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
flat_combo = [Spec(x) for x in flat_combo]
|
||||
|
||||
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
|
||||
@@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
|
||||
if self.default_fetcher.cachable:
|
||||
for rel_path in reversed(list(self.mirror_paths)):
|
||||
cache_fetcher = spack.caches.fetch_cache.fetcher(
|
||||
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
|
||||
rel_path, digest, expand=expand, extension=extension
|
||||
)
|
||||
fetchers.insert(0, cache_fetcher)
|
||||
@@ -577,7 +577,7 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
|
||||
def cache_mirror(self, mirror, stats):
|
||||
"""Perform a fetch if the resource is not already cached
|
||||
|
||||
@@ -212,7 +212,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
Args:
|
||||
configuration: configuration to create a store.
|
||||
"""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
config_dict = configuration.get("config")
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = configuration.get("config:install_hash_length")
|
||||
@@ -234,7 +234,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
|
||||
|
||||
def _create_global() -> Store:
|
||||
result = create(configuration=spack.config.config)
|
||||
result = create(configuration=spack.config.CONFIG)
|
||||
return result
|
||||
|
||||
|
||||
@@ -372,10 +372,10 @@ def use_store(
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
ensure_singleton_created()
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||
)
|
||||
temporary_store = create(configuration=spack.config.config)
|
||||
temporary_store = create(configuration=spack.config.CONFIG)
|
||||
original_store, STORE = STORE, temporary_store
|
||||
|
||||
try:
|
||||
@@ -383,7 +383,7 @@ def use_store(
|
||||
finally:
|
||||
# Restore the original store
|
||||
STORE = original_store
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
|
||||
|
||||
class MatchError(spack.error.SpackError):
|
||||
|
||||
@@ -94,14 +94,14 @@ class TestState:
|
||||
|
||||
def __init__(self):
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.config
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
if _SERIALIZE:
|
||||
spack.config.config = self.config
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
|
||||
@@ -199,15 +199,11 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
|
||||
# use foobar=bar to make the problem simpler for the old concretizer
|
||||
# the new concretizer should not need that help
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
||||
|
||||
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
||||
root_target_range,
|
||||
dep_target_range,
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
mirror_cmd = spack.main.SpackCommand("mirror")
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
@@ -51,7 +51,7 @@
|
||||
def cache_directory(tmpdir):
|
||||
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
||||
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
|
||||
|
||||
yield spack.config.caches
|
||||
|
||||
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
]
|
||||
)
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
|
||||
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -138,9 +138,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
if not timeout:
|
||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||
|
||||
yield spack.config.config
|
||||
yield spack.config.CONFIG
|
||||
|
||||
spack.config.config = old_config
|
||||
spack.config.CONFIG = old_config
|
||||
mutable_dir.remove()
|
||||
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path):
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -40,11 +40,11 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
|
||||
"""
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -54,15 +54,15 @@ def test_install_tree_customization_is_respected(mutable_config, tmp_path):
|
||||
"""
|
||||
spack.store.reinitialize()
|
||||
store_dir = tmp_path / "store"
|
||||
spack.config.config.set("config:install_tree:root", str(store_dir))
|
||||
spack.config.CONFIG.set("config:install_tree:root", str(store_dir))
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert (
|
||||
spack.config.config.get("config:install_tree:root")
|
||||
spack.config.CONFIG.get("config:install_tree:root")
|
||||
== spack.bootstrap.config.store_path()
|
||||
)
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.store.STORE.root == str(store_dir)
|
||||
|
||||
|
||||
@@ -185,12 +185,12 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
|
||||
|
||||
def test_nested_use_of_context_manager(mutable_config):
|
||||
"""Test nested use of the context manager"""
|
||||
user_config = spack.config.config
|
||||
user_config = spack.config.CONFIG
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.config == user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
assert spack.config.CONFIG == user_config
|
||||
|
||||
|
||||
@pytest.mark.parametrize("expected_missing", [False, True])
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
install = spack.main.SpackCommand("install")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -119,7 +118,7 @@ def __call__(self, *args, **kwargs):
|
||||
return mock_module_cmd
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
|
||||
@pytest.mark.not_on_windows("Static to Shared not supported on Win (yet)")
|
||||
def test_static_to_shared_library(build_environment):
|
||||
os.environ["SPACK_TEST_COMMAND"] = "dump-args"
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import py.path
|
||||
import pytest
|
||||
@@ -43,7 +42,7 @@ def _func(dir_str):
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="make not available on Windows")
|
||||
@pytest.mark.not_on_windows("make not available on Windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
class TestTargets:
|
||||
@pytest.mark.parametrize(
|
||||
@@ -92,7 +91,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
s.package._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="autotools not available on windows")
|
||||
@pytest.mark.not_on_windows("autotools not available on windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestAutotoolsPackage:
|
||||
def test_with_or_without(self, default_mock_concretization):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -106,10 +105,7 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
||||
assert value == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="log_ouput cannot currently be used outside of subprocess on Windows",
|
||||
)
|
||||
@pytest.mark.not_on_windows("log_ouput cannot currently be used outside of subprocess on Windows")
|
||||
@pytest.mark.regression("33928")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -153,7 +149,7 @@ def test_monkey_patching_test_log_file():
|
||||
|
||||
# Windows context manager's __exit__ fails with ValueError ("I/O operation
|
||||
# on closed file").
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Does not run on windows")
|
||||
@pytest.mark.not_on_windows("Does not run on windows")
|
||||
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
||||
"""Confirm able to run stand-alone test as a post-install callback."""
|
||||
s = spack.spec.Spec("py-test-callback").concretized()
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
arguments correctly.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -145,7 +144,7 @@
|
||||
+ test_args_without_paths
|
||||
)
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -35,7 +34,7 @@ def test_urlencode_string():
|
||||
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_import_signing_key(mock_gnupghome):
|
||||
signing_key_dir = spack_paths.mock_gpg_keys_path
|
||||
signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
|
||||
@@ -427,18 +426,14 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
|
||||
assert len(mpileaks_specs) == 2, e.all_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_process_command(repro_dir):
|
||||
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
|
||||
help_sh = repro_dir / "help.sh"
|
||||
assert help_sh.exists() and not result
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_process_command_fail(repro_dir, monkeypatch):
|
||||
msg = "subprocess wait exception"
|
||||
|
||||
@@ -489,9 +484,7 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
assert "Reproduction directory is required" in err
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
@@ -509,9 +502,7 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
assert os.path.getsize(log_file) > 0
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -35,7 +33,7 @@ def test_blame_by_percent(mock_packages):
|
||||
assert "EMAIL" in out
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_blame_file(mock_packages):
|
||||
"""Sanity check the blame command to make sure it works."""
|
||||
with working_dir(spack.paths.prefix):
|
||||
@@ -68,7 +66,7 @@ def test_blame_json(mock_packages):
|
||||
assert key in loaded["authors"][0]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="git hangs")
|
||||
@pytest.mark.not_on_windows("git hangs")
|
||||
def test_blame_by_git(mock_packages, capfd):
|
||||
"""Sanity check the blame command to make sure it works."""
|
||||
with capfd.disabled():
|
||||
|
||||
@@ -50,7 +50,7 @@ def test_reset_in_file_scopes(mutable_config, scopes):
|
||||
bootstrap_yaml_files = []
|
||||
for s in scopes:
|
||||
_bootstrap("disable", "--scope={0}".format(s))
|
||||
scope_path = spack.config.config.scopes[s].path
|
||||
scope_path = spack.config.CONFIG.scopes[s].path
|
||||
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
|
||||
assert os.path.exists(bootstrap_yaml)
|
||||
bootstrap_yaml_files.append(bootstrap_yaml)
|
||||
@@ -80,7 +80,7 @@ def test_reset_in_environment(mutable_mock_env_path, mutable_config):
|
||||
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
|
||||
# Create a bootstrap.yaml with some config
|
||||
_bootstrap("disable", "--scope=site")
|
||||
scope_path = spack.config.config.scopes["site"].path
|
||||
scope_path = spack.config.CONFIG.scopes["site"].path
|
||||
bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
|
||||
assert os.path.exists(bootstrap_yaml)
|
||||
|
||||
@@ -174,7 +174,7 @@ def test_remove_and_add_a_source(mutable_config):
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
|
||||
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
|
||||
`spack bootstrap add`. Here we don't download data, since that would be an
|
||||
|
||||
@@ -54,7 +54,7 @@ def test_pickle(tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
build_env("--pickle", _out_file, "zlib")
|
||||
environment = pickle.load(open(_out_file, "rb"))
|
||||
assert type(environment) == dict
|
||||
assert isinstance(environment, dict)
|
||||
assert "PATH" in environment
|
||||
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -26,7 +25,7 @@
|
||||
mirror = spack.main.SpackCommand("mirror")
|
||||
uninstall = spack.main.SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -36,7 +35,7 @@ def test_checksum_args(arguments, expected):
|
||||
assert check == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,expected",
|
||||
[
|
||||
@@ -57,7 +56,7 @@ def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stag
|
||||
assert "version(" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
|
||||
# TODO: mock_fetch doesn't actually work with stage, working around with ignoring
|
||||
# fail_on_error for now
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import jsonschema
|
||||
import pytest
|
||||
@@ -41,10 +40,7 @@
|
||||
uninstall_cmd = spack.main.SpackCommand("uninstall")
|
||||
buildcache_cmd = spack.main.SpackCommand("buildcache")
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
|
||||
pytest.mark.maybeslow,
|
||||
]
|
||||
pytestmark = [pytest.mark.not_on_windows("does not run on windows"), pytest.mark.maybeslow]
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
||||
@@ -33,8 +33,8 @@ def __call__(self, *args, **kwargs):
|
||||
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "do_clean", Counter("package"))
|
||||
monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
|
||||
monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.caches.FETCH_CACHE, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.MISC_CACHE, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.store.STORE.failure_tracker, "clear_all", Counter("failures"))
|
||||
monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))
|
||||
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -254,9 +253,7 @@ def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
||||
|
||||
|
||||
# Note: this test is never expected to be supported on Windows
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="shell completion script generator fails on windows"
|
||||
)
|
||||
@pytest.mark.not_on_windows("Shell completion script generator fails on windows")
|
||||
@pytest.mark.parametrize("shell", ["bash", "fish"])
|
||||
def test_updated_completion_scripts(shell, tmpdir):
|
||||
"""Make sure our shell tab completion scripts remain up-to-date."""
|
||||
|
||||
@@ -64,7 +64,7 @@ def compilers_dir(mock_executable):
|
||||
return clang_path.parent
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("11678,13138")
|
||||
def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_executable):
|
||||
"""Tests that 'spack compiler find' looks into PATH by default, if no specific path
|
||||
@@ -127,7 +127,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
"""Tests that we can add a compiler to configuration."""
|
||||
expected_version = "4.5.3"
|
||||
@@ -157,7 +157,7 @@ def test_compiler_add(mutable_config, mock_packages, mock_executable):
|
||||
assert new_compiler.version == spack.version.Version(expected_version)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary."""
|
||||
@@ -189,7 +189,7 @@ def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, compilers_
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@pytest.mark.regression("17590")
|
||||
def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
|
||||
@@ -210,7 +210,7 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
|
||||
assert clang["paths"]["cxx"] == str(compilers_dir / "clang++")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Cannot execute bash script on Windows")
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir):
|
||||
"""Ensure that we look for compilers in the same order as PATH, when there are duplicates"""
|
||||
new_dir = compilers_dir / "first_in_path"
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
def _create_config(scope=None, data={}, section="packages"):
|
||||
scope = scope or spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
with open(cfg_file, "w") as f:
|
||||
syaml.dump(data, stream=f)
|
||||
return cfg_file
|
||||
@@ -80,8 +80,8 @@ def test_config_edit(mutable_config, working_env):
|
||||
"""Ensure `spack config edit` edits the right paths."""
|
||||
|
||||
dms = spack.config.default_modify_scope("compilers")
|
||||
dms_path = spack.config.config.scopes[dms].path
|
||||
user_path = spack.config.config.scopes["user"].path
|
||||
dms_path = spack.config.CONFIG.scopes[dms].path
|
||||
user_path = spack.config.CONFIG.scopes["user"].path
|
||||
|
||||
comp_path = os.path.join(dms_path, "compilers.yaml")
|
||||
repos_path = os.path.join(user_path, "repos.yaml")
|
||||
@@ -544,7 +544,7 @@ def test_config_update_not_needed(mutable_config):
|
||||
def test_config_update_can_handle_comments(mutable_config):
|
||||
# Create an outdated config file with comments
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "config")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
f.write(
|
||||
"""
|
||||
@@ -574,7 +574,7 @@ def test_config_update_can_handle_comments(mutable_config):
|
||||
@pytest.mark.regression("18050")
|
||||
def test_config_update_works_for_empty_paths(mutable_config):
|
||||
scope = spack.config.default_modify_scope()
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "config")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "config")
|
||||
with open(cfg_file, mode="w") as f:
|
||||
f.write(
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ def test_config_prefer_upstream(
|
||||
|
||||
output = config("prefer-upstream")
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
cfg_file = spack.config.config.get_config_filename(scope, "packages")
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, "packages")
|
||||
packages = syaml.load(open(cfg_file))["packages"]
|
||||
|
||||
# Make sure only the non-default variants are set.
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
|
||||
debug = SpackCommand("debug")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.store
|
||||
@@ -16,7 +14,7 @@
|
||||
deprecate = SpackCommand("deprecate")
|
||||
find = SpackCommand("find")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,7 +17,7 @@
|
||||
install = SpackCommand("install")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
develop = SpackCommand("develop")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config")
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.diff
|
||||
@@ -45,7 +43,7 @@ def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
assert ["hash", "mpileaks %s" % specB.dag_hash()] in c["b_not_a"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
|
||||
"""Test with and without the --first option"""
|
||||
install_cmd("mpileaks")
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
|
||||
import pytest
|
||||
@@ -41,7 +40,7 @@
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("mutable_mock_env_path", "config", "mutable_mock_repo"),
|
||||
pytest.mark.maybeslow,
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Window"),
|
||||
pytest.mark.not_on_windows("Envs unsupported on Window"),
|
||||
]
|
||||
|
||||
env = SpackCommand("env")
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@ def python_database(mock_packages, mutable_database):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="All Fetchers Failed")
|
||||
@pytest.mark.not_on_windows("All Fetchers Failed")
|
||||
@pytest.mark.db
|
||||
def test_extensions(mock_packages, python_database, config, capsys):
|
||||
ext2 = Spec("py-extension2").concretized()
|
||||
|
||||
@@ -212,7 +212,7 @@ def test_find_external_empty_default_manifest_dir(
|
||||
external("find")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Can't chmod on Windows")
|
||||
@pytest.mark.not_on_windows("Can't chmod on Windows")
|
||||
@pytest.mark.skipif(getuid() == 0, reason="user is root")
|
||||
def test_find_external_manifest_with_bad_permissions(
|
||||
mutable_config,
|
||||
@@ -399,7 +399,7 @@ def test_use_tags_for_detection(command_args, mock_executable, mutable_config, m
|
||||
|
||||
|
||||
@pytest.mark.regression("38733")
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="the test uses bash scripts")
|
||||
@pytest.mark.not_on_windows("the test uses bash scripts")
|
||||
def test_failures_in_scanning_do_not_result_in_an_error(
|
||||
mock_executable, monkeypatch, mutable_config
|
||||
):
|
||||
|
||||
@@ -332,7 +332,7 @@ def test_find_command_basic_usage(database):
|
||||
assert "mpileaks" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="envirnment is not yet supported on windows")
|
||||
@pytest.mark.not_on_windows("envirnment is not yet supported on windows")
|
||||
@pytest.mark.regression("9875")
|
||||
def test_find_prefix_in_env(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch, mock_packages, mock_archive, config
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -13,7 +12,7 @@
|
||||
|
||||
gc = spack.main.SpackCommand("gc")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,7 +21,7 @@
|
||||
bootstrap = SpackCommand("bootstrap")
|
||||
mirror = SpackCommand("mirror")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
# test gpg command detection
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
import filecmp
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
import pytest
|
||||
@@ -38,8 +38,6 @@
|
||||
buildcache = SpackCommand("buildcache")
|
||||
find = SpackCommand("find")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def noop_install(monkeypatch):
|
||||
@@ -204,7 +202,7 @@ def test_show_log_on_error(
|
||||
assert isinstance(install.error, spack.build_environment.ChildError)
|
||||
assert install.error.pkg.name == "build-error"
|
||||
|
||||
assert "==> Installing build-error" in out
|
||||
assert "Installing build-error" in out
|
||||
assert "See build log for details:" in out
|
||||
|
||||
|
||||
@@ -263,9 +261,9 @@ def test_install_commit(mock_git_version_info, install_mockery, mock_packages, m
|
||||
|
||||
"""
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
|
||||
)
|
||||
file_url = pathlib.Path(repo_path).as_uri()
|
||||
|
||||
monkeypatch.setattr(spack.package_base.PackageBase, "git", file_url, raising=False)
|
||||
|
||||
# Use the earliest commit in the respository
|
||||
spec = Spec(f"git-test-commit@{commits[-1]}").concretized()
|
||||
@@ -548,6 +546,7 @@ def test_cdash_report_concretization_error(
|
||||
assert any(x in content for x in expected_messages)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows log_output logs phase header out of order")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_cdash_upload_build_error(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
# capfd interferes with Spack's capturing
|
||||
@@ -747,6 +746,7 @@ def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
|
||||
assert os.path.exists(root.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
@pytest.mark.regression("12002")
|
||||
def test_install_only_dependencies_in_env(
|
||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
@@ -896,7 +896,7 @@ def test_install_help_does_not_show_cdash_options(capsys):
|
||||
assert "CDash URL" not in captured.out
|
||||
|
||||
|
||||
def test_install_help_cdash(capsys):
|
||||
def test_install_help_cdash():
|
||||
"""Make sure `spack install --help-cdash` describes CDash arguments"""
|
||||
install_cmd = SpackCommand("install")
|
||||
out = install_cmd("--help-cdash")
|
||||
@@ -913,6 +913,7 @@ def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "Using CDash auth token from environment" in out
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows log_output logs phase header out of order")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
||||
@@ -938,6 +939,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "foo: No such file or directory" in content
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
def test_compiler_bootstrap(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
@@ -954,6 +956,7 @@ def test_compiler_bootstrap(
|
||||
install("a%gcc@=12.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
|
||||
def test_compiler_bootstrap_from_binary_mirror(
|
||||
install_mockery_mutable_config,
|
||||
mock_packages,
|
||||
@@ -994,6 +997,7 @@ def test_compiler_bootstrap_from_binary_mirror(
|
||||
install("--no-cache", "--only", "package", "b%gcc@10.2.0")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||
@pytest.mark.regression("16221")
|
||||
def test_compiler_bootstrap_already_installed(
|
||||
install_mockery_mutable_config,
|
||||
@@ -1037,6 +1041,7 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||
assert "using the `spack.yaml` in this directory" in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_all(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1048,6 +1053,7 @@ def test_install_env_with_tests_all(
|
||||
assert os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_env_with_tests_root(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1059,6 +1065,7 @@ def test_install_env_with_tests_root(
|
||||
assert not os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Environment views not supported on windows. Revisit after #34701")
|
||||
def test_install_empty_env(
|
||||
tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
):
|
||||
@@ -1072,6 +1079,7 @@ def test_install_empty_env(
|
||||
assert "no specs to install" in out
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows logger I/O operation on closed file when install fails")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.parametrize(
|
||||
"name,method",
|
||||
@@ -1095,6 +1103,7 @@ def test_installation_fail_tests(install_mockery, mock_fetch, name, method):
|
||||
assert "See test log for details" in output
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Buildcache not supported on windows")
|
||||
def test_install_use_buildcache(
|
||||
capsys,
|
||||
mock_packages,
|
||||
@@ -1172,6 +1181,7 @@ def install_use_buildcache(opt):
|
||||
install_use_buildcache(opt)
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Windows logger I/O operation on closed file when install fails")
|
||||
@pytest.mark.regression("34006")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_padded_install_runtests_root(install_mockery_mutable_config, mock_fetch):
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
|
||||
license = SpackCommand("license")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_list_files():
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,7 +16,7 @@
|
||||
install = SpackCommand("install")
|
||||
location = SpackCommand("location")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_manpath_trailing_colon(
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -19,7 +18,7 @@
|
||||
# Everything here uses (or can use) the mock config and database.
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures("config", "database"),
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
|
||||
pytest.mark.not_on_windows("does not run on windows"),
|
||||
]
|
||||
# location prints out "locations of packages and spack directories"
|
||||
location = SpackCommand("location")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@
|
||||
buildcache = SpackCommand("buildcache")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
module = spack.main.SpackCommand("module")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
#: make sure module files are generated for all the tests here
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -133,7 +132,7 @@ def test_pkg_add(git, mock_pkg_git_repo):
|
||||
pkg("add", "does-not-exist")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_list(mock_pkg_git_repo, mock_pkg_names):
|
||||
out = split(pkg("list", "HEAD^^"))
|
||||
assert sorted(mock_pkg_names) == sorted(out)
|
||||
@@ -149,7 +148,7 @@ def test_pkg_list(mock_pkg_git_repo, mock_pkg_names):
|
||||
assert sorted(mock_pkg_names) == sorted(out)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_diff(mock_pkg_git_repo, mock_pkg_names):
|
||||
out = split(pkg("diff", "HEAD^^", "HEAD^"))
|
||||
assert out == ["HEAD^:", "pkg-a", "pkg-b", "pkg-c"]
|
||||
@@ -161,7 +160,7 @@ def test_pkg_diff(mock_pkg_git_repo, mock_pkg_names):
|
||||
assert out == ["HEAD^:", "pkg-c", "HEAD:", "pkg-d"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_added(mock_pkg_git_repo):
|
||||
out = split(pkg("added", "HEAD^^", "HEAD^"))
|
||||
assert ["pkg-a", "pkg-b", "pkg-c"] == out
|
||||
@@ -176,7 +175,7 @@ def test_pkg_added(mock_pkg_git_repo):
|
||||
assert out == []
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_removed(mock_pkg_git_repo):
|
||||
out = split(pkg("removed", "HEAD^^", "HEAD^"))
|
||||
assert out == []
|
||||
@@ -188,7 +187,7 @@ def test_pkg_removed(mock_pkg_git_repo):
|
||||
assert out == ["pkg-c"]
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
|
||||
@pytest.mark.not_on_windows("stdout format conflict")
|
||||
def test_pkg_changed(mock_pkg_git_repo):
|
||||
out = split(pkg("changed", "HEAD^^", "HEAD^"))
|
||||
assert out == []
|
||||
|
||||
@@ -4,17 +4,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand
|
||||
|
||||
providers = SpackCommand("providers")
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Providers not currently supported on Windows"
|
||||
)
|
||||
pytestmark = pytest.mark.not_on_windows("Providers not currently supported on Windows")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,7 +13,7 @@
|
||||
deprecate = SpackCommand("deprecate")
|
||||
reindex = SpackCommand("reindex")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_reindex_basic(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
|
||||
@@ -32,6 +32,7 @@ def test_spec():
|
||||
assert "mpich@3.0.4" in output
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("Known failure of the original concretizer")
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
"""End-to-end test of CLI concretizer prefs.
|
||||
|
||||
@@ -39,9 +40,6 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
options to `solver.py`, and that config options are not
|
||||
lost along the way.
|
||||
"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.xfail("Known failure of the original concretizer")
|
||||
|
||||
# remove two non-preferred mpileaks installations
|
||||
# so that reuse will pick up the zmpi one
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -23,7 +22,7 @@
|
||||
pytestmark = pytest.mark.usefixtures("install_mockery", "mock_packages")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_spec(monkeypatch):
|
||||
"""Verify that staging specs works."""
|
||||
@@ -52,7 +51,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
return expected_path
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="PermissionError")
|
||||
@pytest.mark.not_on_windows("PermissionError")
|
||||
def test_stage_path(check_stage_path):
|
||||
"""Verify that --path only works with single specs."""
|
||||
stage("--path={0}".format(check_stage_path), "trivial-install-test-package")
|
||||
@@ -64,7 +63,7 @@ def test_stage_path_errors_multiple_specs(check_stage_path):
|
||||
stage(f"--path={check_stage_path}", "trivial-install-test-package", "mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_with_env_outside_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage concretizes specs not in environment instead of erroring."""
|
||||
@@ -83,7 +82,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
stage("trivial-install-test-package")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_with_env_inside_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage filters specs in environment instead of reconcretizing."""
|
||||
@@ -102,7 +101,7 @@ def fake_stage(pkg, mirror_only=False):
|
||||
stage("mpileaks")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
|
||||
@pytest.mark.not_on_windows("not implemented on windows")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_stage_full_env(mutable_mock_env_path, monkeypatch):
|
||||
"""Verify that stage filters specs in environment."""
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -22,7 +21,7 @@
|
||||
install = SpackCommand("install")
|
||||
spack_test = SpackCommand("test")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_test_package_not_installed(
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -15,7 +14,7 @@
|
||||
env = SpackCommand("env")
|
||||
concretize = SpackCommand("concretize")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user