Compare commits
196 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 | ||
|
|
1b1ed1b1fa | ||
|
|
ec0e51316b | ||
|
|
533821e46f | ||
|
|
6c5d125cb0 | ||
|
|
668fb1201f | ||
|
|
f7918fd8ab | ||
|
|
fc1996e0fa | ||
|
|
ed3aaafd73 | ||
|
|
63bb2c9bad | ||
|
|
a67455707a | ||
|
|
09ca71dbe0 | ||
|
|
ea082539e4 | ||
|
|
143146f4f3 | ||
|
|
ee6ae402aa | ||
|
|
0b26b26821 | ||
|
|
c764f9b1ab | ||
|
|
db19d83ea7 | ||
|
|
24256be6d6 | ||
|
|
633723236e | ||
|
|
381f31e69e | ||
|
|
9438cac219 | ||
|
|
85cf66f650 | ||
|
|
f3c080e546 | ||
|
|
37634f8b08 | ||
|
|
2ae8bbce9e | ||
|
|
b8bfaf65bf | ||
|
|
7968cb7fa2 | ||
|
|
ebc2efdfd2 | ||
|
|
ff07fd5ccb | ||
|
|
3f83ef6566 | ||
|
|
554ce7f063 | ||
|
|
23963779f4 | ||
|
|
45c5af10c3 | ||
|
|
532a37e7ba | ||
|
|
aeb9a92845 | ||
|
|
a3c7ad7669 | ||
|
|
b99288dcae | ||
|
|
01b7cc5106 | ||
|
|
f5888d8127 | ||
|
|
77c838ca93 | ||
|
|
11e538d962 | ||
|
|
7d444038ee | ||
|
|
c24471834b | ||
|
|
b1e33ae37b | ||
|
|
c36617f9da | ||
|
|
deadb64206 | ||
|
|
9eaa88e467 | ||
|
|
bd58801415 | ||
|
|
548a9de671 | ||
|
|
8e7c53a8ba | ||
|
|
5e630174a1 | ||
|
|
175a65dfba | ||
|
|
39d4c402d5 | ||
|
|
e51748ee8f | ||
|
|
f9457fa80b | ||
|
|
4cc2ca3e2e | ||
|
|
3843001004 | ||
|
|
e24bb5dd1c | ||
|
|
f6013114eb | ||
|
|
bdca875eb3 | ||
|
|
af8c392de2 | ||
|
|
9aa3b4619b | ||
|
|
3d733da70a | ||
|
|
cda99b792c | ||
|
|
9834bad82e | ||
|
|
3453259c98 | ||
|
|
ee243b84eb | ||
|
|
5080e2cb45 | ||
|
|
f42ef7aea7 | ||
|
|
41793673d9 | ||
|
|
8b6a6982ee | ||
|
|
ee74ca6391 | ||
|
|
7165e70186 | ||
|
|
97d632a161 | ||
|
|
571919992d | ||
|
|
99112ad2ad | ||
|
|
75c70c395d | ||
|
|
960bdfe612 | ||
|
|
97892bda18 | ||
|
|
cead6ef98d | ||
|
|
5d70c0f100 | ||
|
|
361632fc4b | ||
|
|
6576655137 | ||
|
|
feb26efecd | ||
|
|
4752d1cde3 | ||
|
|
a07afa6e1a | ||
|
|
7327d2913a | ||
|
|
8f8a1f7f52 | ||
|
|
eb8d836e76 | ||
|
|
bad8495e16 | ||
|
|
43de7f4881 | ||
|
|
84585ac575 | ||
|
|
86f9d3865b | ||
|
|
834e7b2b0a | ||
|
|
c14f23ddaa | ||
|
|
49f3681a12 | ||
|
|
19e1d10cdf | ||
|
|
7caf2a512d | ||
|
|
1f6e3cc8cb | ||
|
|
169c4245e0 | ||
|
|
ee1982010f | ||
|
|
dd396c4a76 | ||
|
|
235802013d | ||
|
|
e1f07e98ae | ||
|
|
60aee6f535 | ||
|
|
2510dc9e6e | ||
|
|
5607dd259b | ||
|
|
7bd5d1fd3c | ||
|
|
f6104cc3cb | ||
|
|
b54d286b4a | ||
|
|
ea9c488897 | ||
|
|
ba1d295023 | ||
|
|
27f04b3544 | ||
|
|
8cd9497522 |
1
.github/workflows/unit_tests.yaml
vendored
1
.github/workflows/unit_tests.yaml
vendored
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -60,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
||||
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.1.0
|
||||
flake8==6.0.0
|
||||
mypy==1.4.1
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
|
||||
@@ -143,7 +143,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1256,9 +1256,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
|
||||
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -301,7 +301,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
||||
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1504,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
|
||||
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
|
||||
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -68,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,6 +326,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
||||
@@ -149,12 +149,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
@@ -83,7 +83,7 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
@@ -210,7 +210,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
|
||||
@@ -289,6 +289,10 @@ def ci_rebuild(args):
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
@@ -418,11 +422,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -679,7 +678,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=sign_binaries,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -117,15 +118,15 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -36,13 +36,13 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -915,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
|
||||
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
||||
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -133,9 +133,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +144,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
@@ -239,7 +239,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -268,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
||||
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
||||
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -444,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
@@ -474,7 +473,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
||||
@@ -309,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -321,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -376,7 +377,7 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
||||
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
||||
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
||||
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1278,9 +1280,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1339,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -90,7 +90,7 @@ def spec_from_entry(entry):
|
||||
name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(entry["name"])
|
||||
|
||||
if "parameters" in entry:
|
||||
variant_strs = list()
|
||||
|
||||
@@ -21,10 +21,11 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, List, NamedTuple, Set, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -141,22 +142,23 @@ class InstallStatuses:
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
elif query_arg is False:
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
elif query_arg is any:
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
elif isinstance(query_arg, InstallStatus):
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
else:
|
||||
try: # Try block catches if it is not an iterable at all
|
||||
if any(type(x) != InstallStatus for x in query_arg):
|
||||
raise TypeError
|
||||
except TypeError:
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
return query_arg
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -306,15 +308,16 @@ def __reduce__(self):
|
||||
|
||||
"""
|
||||
|
||||
#: Data class to configure locks in Database objects
|
||||
#:
|
||||
#: Args:
|
||||
#: enable (bool): whether to enable locks or not.
|
||||
#: database_timeout (int or None): timeout for the database lock
|
||||
#: package_timeout (int or None): timeout for the package lock
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
Args:
|
||||
enable: whether to enable locks or not.
|
||||
database_timeout: timeout for the database lock
|
||||
package_timeout: timeout for the package lock
|
||||
"""
|
||||
|
||||
enable: bool
|
||||
database_timeout: Optional[int]
|
||||
package_timeout: Optional[int]
|
||||
@@ -348,13 +351,230 @@ def lock_configuration(configuration):
|
||||
)
|
||||
|
||||
|
||||
def prefix_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the prefix lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_lock"
|
||||
|
||||
|
||||
def failures_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the failures lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_failures"
|
||||
|
||||
|
||||
class SpecLocker:
|
||||
"""Manages acquiring and releasing read or write locks on concrete specs."""
|
||||
|
||||
def __init__(self, lock_path: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
self.lock_path = pathlib.Path(lock_path)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
# Maps (spec.dag_hash(), spec.name) to the corresponding lock object
|
||||
self.locks: Dict[Tuple[str, str], lk.Lock] = {}
|
||||
|
||||
def lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a lock on a concrete spec.
|
||||
|
||||
The lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``self.lock_path``.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes likelihood of collision is
|
||||
very low AND it gives us readers-writer lock semantics with just a single lockfile, so
|
||||
no cleanup required.
|
||||
"""
|
||||
assert spec.concrete, "cannot lock a non-concrete spec"
|
||||
timeout = timeout or self.default_timeout
|
||||
key = self._lock_key(spec)
|
||||
|
||||
if key not in self.locks:
|
||||
self.locks[key] = self.raw_lock(spec, timeout=timeout)
|
||||
else:
|
||||
self.locks[key].default_timeout = timeout
|
||||
|
||||
return self.locks[key]
|
||||
|
||||
def raw_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a raw lock for a Spec, but doesn't keep track of it."""
|
||||
return lk.Lock(
|
||||
str(self.lock_path),
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
def has_lock(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Returns True if the spec is already managed by this spec locker"""
|
||||
return self._lock_key(spec) in self.locks
|
||||
|
||||
def _lock_key(self, spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
return (spec.dag_hash(), spec.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_lock(self, spec: "spack.spec.Spec") -> Generator["SpecLocker", None, None]:
|
||||
lock = self.lock(spec)
|
||||
lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
lock.release_write()
|
||||
raise
|
||||
else:
|
||||
lock.release_write()
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec") -> Tuple[bool, Optional[lk.Lock]]:
|
||||
key = self._lock_key(spec)
|
||||
lock = self.locks.pop(key, None)
|
||||
return bool(lock), lock
|
||||
|
||||
def clear_all(self, clear_fn: Optional[Callable[[lk.Lock], Any]] = None) -> None:
|
||||
if clear_fn is not None:
|
||||
for lock in self.locks.values():
|
||||
clear_fn(lock)
|
||||
self.locks.clear()
|
||||
|
||||
|
||||
class FailureTracker:
|
||||
"""Tracks installation failures.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file lives alongside the install DB.
|
||||
|
||||
``n`` is the sys.maxsize-bit prefix of the associated DAG hash to make
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a failure lock
|
||||
exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
locked = self.lock_taken(spec)
|
||||
if locked and not force:
|
||||
tty.msg(f"Retaining failure marking for {spec.name} due to lock")
|
||||
return
|
||||
|
||||
if locked:
|
||||
tty.warn(f"Removing failure marking despite lock for {spec.name}")
|
||||
|
||||
succeeded, lock = self.locker.clear(spec)
|
||||
if succeeded and lock is not None:
|
||||
lock.release_write()
|
||||
|
||||
if self.persistent_mark(spec):
|
||||
path = self._path(spec)
|
||||
tty.debug(f"Removing failure marking for {spec.name}")
|
||||
try:
|
||||
path.unlink()
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
f"Unable to remove failure marking for {spec.name} ({str(path)}): {str(err)}"
|
||||
)
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
self.locker.clear_all(
|
||||
clear_fn=lambda x: x.release_write() if x.is_write_locked() else True
|
||||
)
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
if not self.locker.has_lock(spec):
|
||||
try:
|
||||
mark = self.locker.lock(spec)
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(f"PID {os.getpid()} failed to mark install failure for {spec.name}")
|
||||
tty.warn(f"Unable to mark {spec.name} as failed.")
|
||||
|
||||
return self.locker.lock(spec)
|
||||
|
||||
def has_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the spec is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if self.locker.has_lock(spec):
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.lock_taken(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.persistent_mark(spec)
|
||||
|
||||
def lock_taken(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if another process has a failure lock on the spec."""
|
||||
check = self.locker.raw_lock(spec)
|
||||
return check.is_write_locked()
|
||||
|
||||
def persistent_mark(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return self._path(spec).exists()
|
||||
|
||||
def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
assert spec.concrete, "concrete spec required for failure path"
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class Database:
|
||||
#: Per-process lock objects for each install prefix
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Per-process failure (lock) objects for each install prefix
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
|
||||
@@ -392,24 +612,10 @@ def __init__(
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# This is for other classes to use to lock prefix directories.
|
||||
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
|
||||
|
||||
# Ensure a persistent location for dealing with parallel installation
|
||||
# failures (e.g., across near-concurrent processes).
|
||||
self._failure_dir = os.path.join(self.database_directory, "failures")
|
||||
|
||||
# Support special locks for handling parallel installation failures
|
||||
# of a spec.
|
||||
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
# Failed write transactions (interrupted by exceptions) will alert
|
||||
@@ -423,15 +629,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
self.package_lock_timeout = lock_cfg.package_timeout
|
||||
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
timeout_format_str = (
|
||||
"{0}s".format(str(self.package_lock_timeout))
|
||||
if self.package_lock_timeout
|
||||
else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -471,212 +669,6 @@ def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
if not spec.concrete:
|
||||
raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
for pkg_id in list(self._prefix_failures.keys()):
|
||||
lock = self._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
# Remove all failure markings (aka files)
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
for fail_mark in os.listdir(self._failure_dir):
|
||||
try:
|
||||
os.remove(os.path.join(self._failure_dir, fail_mark))
|
||||
except OSError as exc:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
|
||||
)
|
||||
|
||||
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""
|
||||
Remove any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark_failed()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a prefix failure
|
||||
lock exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
failure_locked = self.prefix_failure_locked(spec)
|
||||
if failure_locked and not force:
|
||||
tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
|
||||
return
|
||||
|
||||
if failure_locked:
|
||||
tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
|
||||
|
||||
lock = self._prefix_failures.pop(spec.prefix, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
if self.prefix_failure_marked(spec):
|
||||
try:
|
||||
path = self._failed_spec_path(spec)
|
||||
tty.debug("Removing failure marking for {0}".format(spec.name))
|
||||
os.remove(path)
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking for {0} ({1}): {2}".format(
|
||||
spec.name, path, str(err)
|
||||
)
|
||||
)
|
||||
|
||||
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""
|
||||
Mark a spec as failing to install.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
|
||||
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
|
||||
associated DAG hash to make the likelihood of collision very low with
|
||||
no cleanup required.
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._failed_spec_path(spec)
|
||||
with open(path, "w") as f:
|
||||
spec.to_json(f)
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
err = "Unable to mark {0.name} as failed."
|
||||
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_failures:
|
||||
mark = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
try:
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(
|
||||
"PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
|
||||
)
|
||||
tty.warn(err.format(spec))
|
||||
|
||||
# Whether we or another process marked it as a failure, track it
|
||||
# as such locally.
|
||||
self._prefix_failures[prefix] = mark
|
||||
|
||||
return self._prefix_failures[prefix]
|
||||
|
||||
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the prefix (installation) is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if spec.prefix in self._prefix_failures:
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.prefix_failure_locked(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.prefix_failure_marked(spec)
|
||||
|
||||
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if a process has a failure lock on the spec."""
|
||||
check = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
return check.is_write_locked()
|
||||
|
||||
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return os.path.exists(self._failed_spec_path(spec))
|
||||
|
||||
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Get a lock on a particular spec's installation directory.
|
||||
|
||||
NOTE: The installation directory **does not** need to exist.
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
likelihood of collision is very low AND it gives us
|
||||
readers-writer lock semantics with just a single lockfile, so no
|
||||
cleanup required.
|
||||
"""
|
||||
timeout = timeout or self.package_lock_timeout
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_locks:
|
||||
self._prefix_locks[prefix] = lk.Lock(
|
||||
self.prefix_lock_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
elif timeout != self._prefix_locks[prefix].default_timeout:
|
||||
self._prefix_locks[prefix].default_timeout = timeout
|
||||
|
||||
return self._prefix_locks[prefix]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_read_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_read()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_read()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_read()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_write_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_write()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_write()
|
||||
|
||||
def _write_to_file(self, stream):
|
||||
"""Write out the database in JSON format to the stream passed
|
||||
as argument.
|
||||
|
||||
@@ -33,7 +33,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Optional, Set, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -520,7 +520,8 @@ def _execute_conflicts(pkg):
|
||||
|
||||
# Save in a list the conflicts and the associated custom messages
|
||||
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
|
||||
when_spec_list.append((when_spec, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, msg_with_name))
|
||||
|
||||
return _execute_conflicts
|
||||
|
||||
@@ -663,39 +664,35 @@ def _execute_patch(pkg_or_dep):
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name,
|
||||
default=None,
|
||||
description="",
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None,
|
||||
sticky=False,
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
validator: Optional[Callable[[str, str, Tuple[Any, ...]], None]] = None,
|
||||
when: Optional[Union[str, bool]] = None,
|
||||
sticky: bool = False,
|
||||
):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
"""Define a variant for the package.
|
||||
|
||||
Packager can specify a default value as well as a text description.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant
|
||||
default (str or bool): default value for the variant, if not
|
||||
specified otherwise the default will be False for a boolean
|
||||
variant and 'nothing' for a multi-valued variant
|
||||
description (str): description of the purpose of the variant
|
||||
values (tuple or typing.Callable): either a tuple of strings containing the
|
||||
allowed values, or a callable accepting one value and returning
|
||||
True if it is valid
|
||||
multi (bool): if False only one value per spec is allowed for
|
||||
this variant
|
||||
validator (typing.Callable): optional group validator to enforce additional
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
sticky (bool): the variant should not be changed by the concretizer to
|
||||
find a valid concrete spec.
|
||||
name: Name of the variant
|
||||
default: Default value for the variant, if not specified otherwise the default will be
|
||||
False for a boolean variant and 'nothing' for a multi-valued variant
|
||||
description: Description of the purpose of the variant
|
||||
values: Either a tuple of strings containing the allowed values, or a callable accepting
|
||||
one value and returning True if it is valid
|
||||
multi: If False only one value per spec is allowed for this variant
|
||||
validator: Optional group validator to enforce additional logic. It receives the package
|
||||
name, the variant name and a tuple of values and should raise an instance of SpackError
|
||||
if the group doesn't meet the additional constraints
|
||||
when: Optional condition on which the variant applies
|
||||
sticky: The variant should not be changed by the concretizer to find a valid concrete spec
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
def format_error(msg, pkg):
|
||||
@@ -900,7 +897,8 @@ def _execute_requires(pkg):
|
||||
|
||||
# Save in a list the requirements and the associated custom messages
|
||||
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
|
||||
when_spec_list.append((when_spec, policy, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, policy, msg_with_name))
|
||||
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
@@ -203,7 +203,7 @@ def activate(env, use_env_repo=False):
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(env.repo)
|
||||
spack.repo.PATH.put_first(env.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % env.name)
|
||||
|
||||
@@ -227,7 +227,7 @@ def deactivate():
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
spack.repo.path.remove(_active_environment._repo)
|
||||
spack.repo.PATH.remove(_active_environment._repo)
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
|
||||
@@ -1084,8 +1084,8 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
if list_name == user_speclist_name:
|
||||
if spec.anonymous:
|
||||
raise SpackEnvironmentError("cannot add anonymous specs to an environment")
|
||||
elif not spack.repo.path.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.path.provider_index.providers.keys()
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -1262,7 +1262,7 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
@@ -1490,7 +1490,7 @@ def _concretize_separately(self, tests=False):
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.path.provider_index
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
@@ -2280,7 +2280,7 @@ def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec_node, pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
def manifest_uptodate_or_warn(self):
|
||||
"""Emits a warning if the manifest file is not up-to-date."""
|
||||
@@ -2448,13 +2448,13 @@ def make_repo_path(root):
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.push_scope(scope)
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
||||
@@ -535,7 +535,7 @@ def edge_entry(self, edge):
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
|
||||
@@ -49,7 +49,7 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
@@ -1147,12 +1147,12 @@ def write_test_result(self, spec, result):
|
||||
def write_reproducibility_data(self):
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.path.dump_provenance(spec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
for vspec in spec.package.virtuals_provided:
|
||||
repo_cache_path = self.stage.repo.join(vspec.name)
|
||||
if not os.path.exists(repo_cache_path):
|
||||
try:
|
||||
spack.repo.path.dump_provenance(vspec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(vspec, repo_cache_path)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass # not all virtuals have package files
|
||||
|
||||
|
||||
@@ -519,13 +519,6 @@ def _try_install_from_binary_cache(
|
||||
)
|
||||
|
||||
|
||||
def clear_failures() -> None:
|
||||
"""
|
||||
Remove all failure tracking markers for the Spack instance.
|
||||
"""
|
||||
spack.store.STORE.db.clear_all_failures()
|
||||
|
||||
|
||||
def combine_phase_logs(phase_log_files: List[str], log_path: str) -> None:
|
||||
"""
|
||||
Read set or list of logs and combine them into one file.
|
||||
@@ -597,7 +590,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
if node is spec:
|
||||
spack.repo.path.dump_provenance(node, dest_pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
|
||||
@@ -1126,15 +1119,13 @@ class PackageInstaller:
|
||||
instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []):
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
Return:
|
||||
PackageInstaller: instance
|
||||
"""
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
@@ -1287,7 +1278,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
dep_id = package_id(dep_pkg)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.db.prefix_failed(dep):
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
@@ -1502,7 +1493,7 @@ def _ensure_locked(
|
||||
if lock is None:
|
||||
tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.db.prefix_lock(pkg.spec, timeout)
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
@@ -1627,12 +1618,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.db.clear_failure(dep, force=False)
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.STORE.db.clear_failure(request.spec, force=True)
|
||||
spack.store.STORE.failure_tracker.clear(request.spec, force=True)
|
||||
|
||||
# If not installing dependencies, then determine their
|
||||
# installation status before proceeding
|
||||
@@ -1888,7 +1879,7 @@ def _update_failed(
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.db.mark_failed(task.pkg.spec)
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
@@ -2074,7 +2065,7 @@ def install(self) -> None:
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.db.prefix_failed(spec):
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
self._update_failed(task)
|
||||
|
||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
@@ -442,7 +442,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
if spec:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
versions = pkg_cls.versions.get(spec.version, {})
|
||||
ext = versions.get("extension", None)
|
||||
# If the spec does not explicitly specify an extension (the default case),
|
||||
@@ -474,7 +474,7 @@ def get_all_versions(specs):
|
||||
"""
|
||||
version_specs = []
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# Skip any package that has no known versions.
|
||||
if not pkg_cls.versions:
|
||||
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
||||
|
||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
@@ -143,7 +143,7 @@ def hierarchy_tokens(self):
|
||||
|
||||
# Check if all the tokens in the hierarchy are virtual specs.
|
||||
# If not warn the user and raise an error.
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.path.is_virtual(t)]
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.PATH.is_virtual(t)]
|
||||
if not_virtual:
|
||||
msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n"
|
||||
msg += "Please check the 'modules.yaml' configuration files"
|
||||
|
||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
if not type(original_method) == SpecMultiMethod:
|
||||
if not isinstance(original_method, SpecMultiMethod):
|
||||
original_method = SpecMultiMethod(original_method)
|
||||
|
||||
if self.spec is not None:
|
||||
|
||||
@@ -665,7 +665,7 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
|
||||
super().__init__()
|
||||
@@ -728,11 +728,11 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
if spack.repo.path.is_virtual(name):
|
||||
if spack.repo.PATH.is_virtual(name):
|
||||
if virtuals is not None:
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.path.providers_for(name)
|
||||
providers = spack.repo.PATH.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.setdefault(cls.name, set()).add(name)
|
||||
@@ -756,7 +756,7 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
try:
|
||||
dep_cls = spack.repo.path.get_pkg_class(dep_name)
|
||||
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# log unknown packages
|
||||
missing.setdefault(cls.name, set()).add(dep_name)
|
||||
@@ -2209,7 +2209,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
pkg = None
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.STORE.db.prefix_write_lock(spec):
|
||||
with spack.store.STORE.prefix_locker.write_lock(spec):
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -2459,8 +2459,8 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
if not isinstance(pos, spack.spec.Spec):
|
||||
pos = spack.spec.Spec(pos)
|
||||
|
||||
if spack.repo.path.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.path.providers_for(pos.name))
|
||||
if spack.repo.PATH.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||
continue
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
||||
@@ -147,7 +147,7 @@ def preferred_variants(cls, pkg_name):
|
||||
variants = " ".join(variants)
|
||||
|
||||
# Only return variants that are actually supported by the package
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||
return dict(
|
||||
(name, variant) for name, variant in spec.variants.items() if name in pkg_cls.variants
|
||||
@@ -162,7 +162,7 @@ def spec_externals(spec):
|
||||
from spack.util.module_cmd import path_from_modules # noqa: F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
@@ -199,7 +199,7 @@ def is_spec_buildable(spec):
|
||||
so_far = all_buildable # the default "so far"
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# check whether any providers for this package override the default
|
||||
|
||||
@@ -238,7 +238,7 @@ def to_dict(self):
|
||||
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
repository = repository or spack.repo.PATH
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
|
||||
@@ -10,11 +10,12 @@
|
||||
dependencies.
|
||||
"""
|
||||
import os
|
||||
from pathlib import PurePath
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||
prefix = llnl.util.filesystem.ancestor(__file__, 4)
|
||||
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||
|
||||
#: synonym for prefix
|
||||
spack_root = prefix
|
||||
@@ -88,7 +89,7 @@ def _get_user_cache_path():
|
||||
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
||||
|
||||
|
||||
user_cache_path = _get_user_cache_path()
|
||||
user_cache_path = str(PurePath(_get_user_cache_path()))
|
||||
|
||||
#: junit, cdash, etc. reports about builds
|
||||
reports_path = os.path.join(user_cache_path, "reports")
|
||||
|
||||
@@ -64,7 +64,7 @@ def use_platform(new_platform):
|
||||
host = _PickleableCallable(new_platform)
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
@@ -73,5 +73,5 @@ def use_platform(new_platform):
|
||||
host = original_host_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
@@ -149,7 +149,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's
|
||||
# namespace, let the repo handle it.
|
||||
for repo in path.repos:
|
||||
for repo in PATH.repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
if namespace == repo.full_namespace:
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
@@ -163,7 +163,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if path.by_namespace.is_prefix(fullname):
|
||||
if PATH.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -184,9 +184,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.path.get_repo("builtin.mock").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.path.get_repo("builtin").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -282,7 +282,7 @@ def add_package_to_git_stage(packages):
|
||||
git = GitExe()
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
@@ -647,7 +647,7 @@ class RepoPath:
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@@ -966,7 +966,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
@@ -1357,7 +1357,7 @@ def create_or_construct(path, namespace=None):
|
||||
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
@@ -1374,7 +1374,7 @@ def create(configuration):
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
path: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
@@ -1383,7 +1383,7 @@ def create(configuration):
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||
return path.all_package_names(include_virtuals)
|
||||
return PATH.all_package_names(include_virtuals)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -1398,21 +1398,21 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
global PATH
|
||||
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||
override = kwargs.get("override", True)
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
path, saved = create(configuration=spack.config.config), path
|
||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||
try:
|
||||
yield path
|
||||
yield PATH
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
path = saved
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
PATH = saved
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
|
||||
@@ -28,6 +28,12 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,11 +8,12 @@
|
||||
import enum
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
from typing import List
|
||||
from typing import List, NamedTuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -50,6 +51,8 @@
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
|
||||
# these are from clingo.ast and bootstrapped later
|
||||
ASTType = None
|
||||
parse_files = None
|
||||
@@ -77,9 +80,7 @@ def default_clingo_control():
|
||||
"""Return a control object with the default settings used in Spack"""
|
||||
control = clingo.Control()
|
||||
control.configuration.configuration = "tweety"
|
||||
control.configuration.solve.models = 0
|
||||
control.configuration.solver.heuristic = "Domain"
|
||||
control.configuration.solve.parallel_mode = "1"
|
||||
control.configuration.solver.opt_strategy = "usc,one"
|
||||
return control
|
||||
|
||||
@@ -237,7 +238,7 @@ def listify(args):
|
||||
|
||||
def packagize(pkg):
|
||||
if isinstance(pkg, str):
|
||||
return spack.repo.path.get_pkg_class(pkg)
|
||||
return spack.repo.PATH.get_pkg_class(pkg)
|
||||
else:
|
||||
return pkg
|
||||
|
||||
@@ -302,6 +303,8 @@ def argify(arg):
|
||||
return clingo.String(str(arg))
|
||||
elif isinstance(arg, int):
|
||||
return clingo.Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo.Function(arg.name, [argify(x) for x in arg.args], positive=positive)
|
||||
else:
|
||||
return clingo.String(str(arg))
|
||||
|
||||
@@ -322,6 +325,15 @@ def __getattr__(self, name):
|
||||
fn = AspFunctionBuilder()
|
||||
|
||||
|
||||
def _create_counter(specs, tests):
|
||||
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
|
||||
if strategy == "full":
|
||||
return FullDuplicatesCounter(specs, tests=tests)
|
||||
if strategy == "minimal":
|
||||
return MinimalDuplicatesCounter(specs, tests=tests)
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
|
||||
@@ -342,7 +354,7 @@ def extend_flag_list(flag_list, new_flags):
|
||||
|
||||
def check_packages_exist(specs):
|
||||
"""Ensure all packages mentioned in specs exist."""
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
for spec in specs:
|
||||
for s in spec.traverse():
|
||||
try:
|
||||
@@ -513,15 +525,17 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
key = input_spec.name
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(key)]
|
||||
key = providers[0]
|
||||
candidate = answer.get(key)
|
||||
providers = [
|
||||
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
|
||||
]
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[key])
|
||||
self._concrete_specs_by_input[input_spec] = answer[key]
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
self._unsolved_specs.append(input_spec)
|
||||
|
||||
@@ -529,7 +543,7 @@ def _compute_specs_from_answer_set(self):
|
||||
def _normalize_packages_yaml(packages_yaml):
|
||||
normalized_yaml = copy.copy(packages_yaml)
|
||||
for pkg_name in packages_yaml:
|
||||
is_virtual = spack.repo.path.is_virtual(pkg_name)
|
||||
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
|
||||
if pkg_name == "all" or not is_virtual:
|
||||
continue
|
||||
|
||||
@@ -537,7 +551,7 @@ def _normalize_packages_yaml(packages_yaml):
|
||||
data = normalized_yaml.pop(pkg_name)
|
||||
is_buildable = data.get("buildable", True)
|
||||
if not is_buildable:
|
||||
for provider in spack.repo.path.providers_for(pkg_name):
|
||||
for provider in spack.repo.PATH.providers_for(pkg_name):
|
||||
entry = normalized_yaml.setdefault(provider.name, {})
|
||||
entry["buildable"] = False
|
||||
|
||||
@@ -572,16 +586,33 @@ def bootstrap_clingo():
|
||||
from clingo import parse_files
|
||||
|
||||
|
||||
def stringify(sym):
|
||||
"""Stringify symbols from clingo models.
|
||||
class NodeArgument(NamedTuple):
|
||||
id: str
|
||||
pkg: str
|
||||
|
||||
This will turn a ``clingo.Symbol`` into a string, or a sequence of ``clingo.Symbol``
|
||||
objects into a tuple of strings.
|
||||
|
||||
def intermediate_repr(sym):
|
||||
"""Returns an intermediate representation of clingo models for Spack's spec builder.
|
||||
|
||||
Currently, transforms symbols from clingo models either to strings or to NodeArgument objects.
|
||||
|
||||
Returns:
|
||||
This will turn a ``clingo.Symbol`` into a string or NodeArgument, or a sequence of
|
||||
``clingo.Symbol`` objects into a tuple of those objects.
|
||||
"""
|
||||
# TODO: simplify this when we no longer have to support older clingo versions.
|
||||
if isinstance(sym, (list, tuple)):
|
||||
return tuple(stringify(a) for a in sym)
|
||||
return tuple(intermediate_repr(a) for a in sym)
|
||||
|
||||
try:
|
||||
if sym.name == "node":
|
||||
return NodeArgument(
|
||||
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
|
||||
)
|
||||
except RuntimeError:
|
||||
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
|
||||
# that are not functions
|
||||
pass
|
||||
|
||||
if clingo_cffi:
|
||||
# Clingo w/ CFFI will throw an exception on failure
|
||||
@@ -596,10 +627,10 @@ def stringify(sym):
|
||||
def extract_args(model, predicate_name):
|
||||
"""Extract the arguments to predicates with the provided name from a model.
|
||||
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and return
|
||||
their stringified arguments as tuples.
|
||||
Pull out all the predicates with name ``predicate_name`` from the model, and
|
||||
return their intermediate representation.
|
||||
"""
|
||||
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
return [intermediate_repr(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
@@ -772,8 +803,11 @@ def visit(node):
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
if not setup.concretize_everything:
|
||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
@@ -801,6 +835,14 @@ def on_model(model):
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
if solve_result.satisfiable and self._model_has_cycles(models):
|
||||
tty.debug(f"cycles detected, falling back to slower algorithm [specs={specs}]")
|
||||
self.control.load(os.path.join(parent_dir, "cycle_detection.lp"))
|
||||
self.control.ground([("no_cycle", [])])
|
||||
models.clear()
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
@@ -836,7 +878,8 @@ def on_model(model):
|
||||
for sym in best_model:
|
||||
if sym.name not in ("attr", "error", "opt_criterion"):
|
||||
tty.debug(
|
||||
"UNKNOWN SYMBOL: %s(%s)" % (sym.name, ", ".join(stringify(sym.arguments)))
|
||||
"UNKNOWN SYMBOL: %s(%s)"
|
||||
% (sym.name, ", ".join(intermediate_repr(sym.arguments)))
|
||||
)
|
||||
|
||||
elif cores:
|
||||
@@ -853,6 +896,26 @@ def on_model(model):
|
||||
|
||||
return result, timer, self.control.statistics
|
||||
|
||||
def _model_has_cycles(self, models):
|
||||
"""Returns true if the best model has cycles in it"""
|
||||
cycle_detection = clingo.Control()
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
lp_file = parent_dir / "cycle_detection.lp"
|
||||
|
||||
min_cost, best_model = min(models)
|
||||
with cycle_detection.backend() as backend:
|
||||
for atom in best_model:
|
||||
if atom.name == "attr" and str(atom.arguments[0]) == '"depends_on"':
|
||||
symbol = fn.depends_on(atom.arguments[1], atom.arguments[2])
|
||||
atom_id = backend.add_atom(symbol.symbol())
|
||||
backend.add_rule([atom_id], [], choice=False)
|
||||
|
||||
cycle_detection.load(str(lp_file))
|
||||
cycle_detection.ground([("base", []), ("no_cycle", [])])
|
||||
cycle_result = cycle_detection.solve()
|
||||
|
||||
return cycle_result.unsatisfiable
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
@@ -883,6 +946,10 @@ def __init__(self, tests=False):
|
||||
|
||||
# id for dummy variables
|
||||
self._condition_id_counter = itertools.count()
|
||||
self._trigger_id_counter = itertools.count()
|
||||
self._trigger_cache = collections.defaultdict(dict)
|
||||
self._effect_id_counter = itertools.count()
|
||||
self._effect_cache = collections.defaultdict(dict)
|
||||
|
||||
# Caches to optimize the setup phase of the solver
|
||||
self.target_specs_cache = None
|
||||
@@ -919,15 +986,18 @@ def key_fn(version):
|
||||
|
||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
||||
self.gen.fact(
|
||||
fn.version_declared(
|
||||
pkg.name, declared_version.version, weight, str(declared_version.origin)
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.version_declared(
|
||||
declared_version.version, weight, str(declared_version.origin)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Declare deprecated versions for this package, if any
|
||||
deprecated = self.deprecated_versions[pkg.name]
|
||||
for v in sorted(deprecated):
|
||||
self.gen.fact(fn.deprecated_version(pkg.name, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.deprecated_version(v)))
|
||||
|
||||
def spec_versions(self, spec):
|
||||
"""Return list of clauses expressing spec's version constraints."""
|
||||
@@ -956,11 +1026,14 @@ def target_ranges(self, spec, single_target_fn):
|
||||
return [fn.attr("node_target_satisfies", spec.name, target)]
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
default_msg = "{0} '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0} conflicts with '{1}'"
|
||||
default_msg = "{0}: '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0}: conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
trigger_spec = spack.spec.Spec(trigger)
|
||||
trigger_id = self.condition(
|
||||
trigger_spec, name=trigger_spec.name or pkg.name, msg=trigger_msg
|
||||
)
|
||||
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
@@ -970,7 +1043,9 @@ def conflict_rules(self, pkg):
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, conflict_msg))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_facts(self):
|
||||
@@ -1023,8 +1098,11 @@ def package_compiler_defaults(self, pkg):
|
||||
|
||||
for i, compiler in enumerate(reversed(matches)):
|
||||
self.gen.fact(
|
||||
fn.node_compiler_preference(
|
||||
pkg.name, compiler.spec.name, compiler.spec.version, -i * 100
|
||||
fn.pkg_fact(
|
||||
pkg.name,
|
||||
fn.node_compiler_preference(
|
||||
compiler.spec.name, compiler.spec.version, -i * 100
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1114,12 +1192,65 @@ def pkg_rules(self, pkg, tests):
|
||||
self.gen.newline()
|
||||
|
||||
# variants
|
||||
self.variant_rules(pkg)
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
lambda v, p, i: self.gen.fact(fn.pkg_fact(pkg.name, fn.provider_preference(v, p, i))),
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
# trigger and effect tables
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def trigger_rules(self):
|
||||
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Trigger conditions")
|
||||
for name in self._trigger_cache:
|
||||
cache = self._trigger_cache[name]
|
||||
for spec_str, (trigger_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_id(trigger_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.trigger_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.condition_requirement(trigger_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._trigger_cache.clear()
|
||||
|
||||
def effect_rules(self):
|
||||
"""Flushes all the effect rules collected so far, and clears the cache."""
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
cache = self._effect_cache[name]
|
||||
for spec_str, (effect_id, requirements) in cache.items():
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_id(effect_id)))
|
||||
self.gen.fact(fn.pkg_fact(name, fn.effect_msg(spec_str)))
|
||||
for predicate in requirements:
|
||||
self.gen.fact(fn.imposed_constraint(effect_id, *predicate.args))
|
||||
self.gen.newline()
|
||||
self._effect_cache.clear()
|
||||
|
||||
def variant_rules(self, pkg):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant(name)))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
@@ -1128,19 +1259,23 @@ def pkg_rules(self, pkg, tests):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.conditional_variant(cond_id, name)))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
self.gen.fact(fn.variant_single_value(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_single_value(name)))
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_package_py(pkg.name, name, variant.default)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_default_value_from_package_py(name, variant.default)
|
||||
)
|
||||
)
|
||||
else:
|
||||
spec_variant = variant.make_default()
|
||||
defaults = spec_variant.value
|
||||
for val in sorted(defaults):
|
||||
self.gen.fact(fn.variant_default_value_from_package_py(pkg.name, name, val))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.variant_default_value_from_package_py(name, val))
|
||||
)
|
||||
|
||||
values = variant.values
|
||||
if values is None:
|
||||
@@ -1151,7 +1286,9 @@ def pkg_rules(self, pkg, tests):
|
||||
for sid, s in enumerate(values.sets):
|
||||
for value in s:
|
||||
self.gen.fact(
|
||||
fn.variant_value_from_disjoint_sets(pkg.name, name, value, sid)
|
||||
fn.pkg_fact(
|
||||
pkg.name, fn.variant_value_from_disjoint_sets(name, value, sid)
|
||||
)
|
||||
)
|
||||
union.update(s)
|
||||
values = union
|
||||
@@ -1178,7 +1315,9 @@ def pkg_rules(self, pkg, tests):
|
||||
msg="empty (total) conflict constraint",
|
||||
)
|
||||
msg = "variant {0}={1} is conditionally disabled".format(name, value)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, msg))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.conflict(trigger_id, constraint_id, msg))
|
||||
)
|
||||
else:
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
@@ -1189,32 +1328,13 @@ def pkg_rules(self, pkg, tests):
|
||||
name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (pkg.name, name, value, when),
|
||||
)
|
||||
self.gen.fact(fn.variant_possible_value(pkg.name, name, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_possible_value(name, value)))
|
||||
|
||||
if variant.sticky:
|
||||
self.gen.fact(fn.variant_sticky(pkg.name, name))
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.variant_sticky(name)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
# conflicts
|
||||
self.conflict_rules(pkg)
|
||||
|
||||
# default compilers for this package
|
||||
self.package_compiler_defaults(pkg)
|
||||
|
||||
# virtuals
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
|
||||
)
|
||||
|
||||
self.package_requirement_rules(pkg)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=False):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
@@ -1232,21 +1352,38 @@ def condition(self, required_spec, imposed_spec=None, name=None, msg=None, node=
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
assert named_cond.name, "must provide name for anonymous conditions!"
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the caller,
|
||||
# we won't emit partial facts.
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
for pred in requirements:
|
||||
self.gen.fact(fn.condition_requirement(condition_id, *pred.args))
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
if imposed_spec:
|
||||
self.impose(condition_id, imposed_spec, node=node, name=name)
|
||||
cache = self._trigger_cache[named_cond.name]
|
||||
if named_cond not in cache:
|
||||
trigger_id = next(self._trigger_id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=True, required_from=name)
|
||||
cache[named_cond] = (trigger_id, requirements)
|
||||
trigger_id, requirements = cache[named_cond]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
cache = self._effect_cache[named_cond.name]
|
||||
if imposed_spec not in cache:
|
||||
effect_id = next(self._effect_id_counter)
|
||||
requirements = self.spec_clauses(imposed_spec, body=False, required_from=name)
|
||||
if not node:
|
||||
requirements = list(
|
||||
filter(lambda x: x.args[0] not in ("node", "virtual_node"), requirements)
|
||||
)
|
||||
cache[imposed_spec] = (effect_id, requirements)
|
||||
effect_id, requirements = cache[imposed_spec]
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
@@ -1259,13 +1396,19 @@ def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
|
||||
def package_provider_rules(self, pkg):
|
||||
for provider_name in sorted(set(s.name for s in pkg.provided.keys())):
|
||||
self.gen.fact(fn.possible_provider(pkg.name, provider_name))
|
||||
if provider_name not in self.possible_virtuals:
|
||||
continue
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.possible_provider(provider_name)))
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
if provided.name not in self.possible_virtuals:
|
||||
continue
|
||||
for when in whens:
|
||||
msg = "%s provides %s when %s" % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(when.name, fn.provider_condition(condition_id, provided.name))
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg):
|
||||
@@ -1289,9 +1432,13 @@ def package_dependencies_rules(self, pkg):
|
||||
msg = "%s depends on %s" % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += " when %s" % cond
|
||||
else:
|
||||
pass
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(pkg.name, fn.dependency_condition(condition_id, dep.spec.name))
|
||||
)
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
@@ -1328,7 +1475,7 @@ def provider_requirements(self):
|
||||
"Internal Error: possible_virtuals is not populated. Please report to the spack"
|
||||
" maintainers"
|
||||
)
|
||||
packages_yaml = spack.config.config.get("packages")
|
||||
packages_yaml = spack.config.CONFIG.get("packages")
|
||||
assert self.possible_virtuals is not None, msg
|
||||
for virtual_str in sorted(self.possible_virtuals):
|
||||
requirements = packages_yaml.get(virtual_str, {}).get("require", [])
|
||||
@@ -1336,6 +1483,8 @@ def provider_requirements(self):
|
||||
virtual_str, requirements, kind=RequirementKind.VIRTUAL
|
||||
)
|
||||
self.emit_facts_from_requirement_rules(rules)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
"""Generate facts to enforce requirements.
|
||||
@@ -1416,7 +1565,7 @@ def external_packages(self):
|
||||
continue
|
||||
|
||||
# This package does not appear in any repository
|
||||
if pkg_name not in spack.repo.path:
|
||||
if pkg_name not in spack.repo.PATH:
|
||||
continue
|
||||
|
||||
self.gen.h2("External package: {0}".format(pkg_name))
|
||||
@@ -1449,10 +1598,12 @@ def external_packages(self):
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.possible_external(condition_id, local_idx)))
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
self.gen.newline()
|
||||
|
||||
self.trigger_rules()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
"""Facts on concretization preferences, as read from packages.yaml"""
|
||||
preferences = spack.package_prefs.PackagePrefs
|
||||
@@ -1495,7 +1646,9 @@ def target_preferences(self, pkg_name):
|
||||
if str(preferred.architecture.target) == best_default and i != 0:
|
||||
offset = 100
|
||||
self.gen.fact(
|
||||
fn.target_weight(pkg_name, str(preferred.architecture.target), i + offset)
|
||||
fn.pkg_fact(
|
||||
pkg_name, fn.target_weight(str(preferred.architecture.target), i + offset)
|
||||
)
|
||||
)
|
||||
|
||||
def spec_clauses(self, *args, **kwargs):
|
||||
@@ -1598,7 +1751,7 @@ class Body:
|
||||
if not spec.concrete:
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
try:
|
||||
variant_def, _ = pkg_cls.variants[vname]
|
||||
except KeyError:
|
||||
@@ -1606,7 +1759,7 @@ class Body:
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(
|
||||
variant, spack.repo.path.get_pkg_class(spec.name)
|
||||
variant, spack.repo.PATH.get_pkg_class(spec.name)
|
||||
)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
@@ -1678,7 +1831,7 @@ class Body:
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.path.get_pkg_class(dep.name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
@@ -1724,7 +1877,7 @@ def build_version_dict(self, possible_pkgs):
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
@@ -1755,7 +1908,7 @@ def key_fn(item):
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
version_defs = []
|
||||
pkg_class = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for vstr in version_preferences:
|
||||
v = vn.ver(vstr)
|
||||
if isinstance(v, vn.GitVersion):
|
||||
@@ -2041,11 +2194,11 @@ def define_version_constraints(self):
|
||||
# generate facts for each package constraint and the version
|
||||
# that satisfies it
|
||||
for v in sorted(v for v in self.possible_versions[pkg_name] if v.satisfies(versions)):
|
||||
self.gen.fact(fn.version_satisfies(pkg_name, versions, v))
|
||||
self.gen.fact(fn.pkg_fact(pkg_name, fn.version_satisfies(versions, v)))
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def define_virtual_constraints(self):
|
||||
def collect_virtual_constraints(self):
|
||||
"""Define versions for constraints on virtuals.
|
||||
|
||||
Must be called before define_version_constraints().
|
||||
@@ -2053,7 +2206,7 @@ def define_virtual_constraints(self):
|
||||
# aggregate constraints into per-virtual sets
|
||||
constraint_map = collections.defaultdict(lambda: set())
|
||||
for pkg_name, versions in self.version_constraints:
|
||||
if not spack.repo.path.is_virtual(pkg_name):
|
||||
if not spack.repo.PATH.is_virtual(pkg_name):
|
||||
continue
|
||||
constraint_map[pkg_name].add(versions)
|
||||
|
||||
@@ -2131,7 +2284,7 @@ def define_variant_values(self):
|
||||
# spec_clauses(). We might want to order these facts by pkg and name
|
||||
# if we are debugging.
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.variant_possible_value(pkg, variant, value))
|
||||
self.gen.fact(fn.pkg_fact(pkg, fn.variant_possible_value(variant, value)))
|
||||
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
# tell the solver about any installed packages that could
|
||||
@@ -2141,7 +2294,7 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
self.reusable_and_possible[h] = spec
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.path.get(spec)
|
||||
spack.repo.PATH.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
@@ -2191,20 +2344,19 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
# get list of all possible dependencies
|
||||
self.possible_virtuals = set(x.name for x in specs if x.virtual)
|
||||
possible = spack.package_base.possible_dependencies(
|
||||
*specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
|
||||
)
|
||||
|
||||
node_counter = _create_counter(specs, tests=self.tests)
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
|
||||
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
|
||||
]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
|
||||
self.pkgs = set(possible)
|
||||
|
||||
# driver is used by all the functions below to add facts and
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
@@ -2228,13 +2380,16 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
|
||||
if reuse:
|
||||
self.gen.h1("Reusable specs")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
self._facts_from_concrete_spec(reusable_spec, possible)
|
||||
self._facts_from_concrete_spec(reusable_spec, self.pkgs)
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
self.gen.h1("Possible flags on nodes")
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
@@ -2255,7 +2410,7 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.external_packages()
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible)
|
||||
self.build_version_dict(self.pkgs)
|
||||
self.add_concrete_versions_from_specs(specs, Provenance.SPEC)
|
||||
self.add_concrete_versions_from_specs(dev_specs, Provenance.DEV_SPEC)
|
||||
|
||||
@@ -2270,9 +2425,12 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.preferred_variants(pkg)
|
||||
self.target_preferences(pkg)
|
||||
|
||||
self.gen.h1("Develop specs")
|
||||
# Inject dev_path from environment
|
||||
for ds in dev_specs:
|
||||
self.condition(spack.spec.Spec(ds.name), ds, msg="%s is a develop spec" % ds.name)
|
||||
self.trigger_rules()
|
||||
self.effect_rules()
|
||||
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
@@ -2280,10 +2438,8 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
self.gen.h1("Virtual Constraints")
|
||||
self.define_virtual_constraints()
|
||||
|
||||
self.gen.h1("Version Constraints")
|
||||
self.collect_virtual_constraints()
|
||||
self.define_version_constraints()
|
||||
|
||||
self.gen.h1("Compiler Version Constraints")
|
||||
@@ -2305,8 +2461,8 @@ def literal_specs(self, specs):
|
||||
fn.literal(idx, "variant_default_value_from_cli", *clause.args[1:])
|
||||
)
|
||||
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
if self.concretize_everything:
|
||||
self.gen.fact(fn.solve_literal(idx))
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements(self):
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
@@ -2366,7 +2522,7 @@ def _specs_from_requires(self, pkg_name, section):
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
@@ -2401,6 +2557,16 @@ class SpecBuilder:
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""Given a package name, returns the string representation of the "min_dupe_id" node in
|
||||
the ASP encoding.
|
||||
|
||||
Args:
|
||||
pkg: name of a package
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
@@ -2413,101 +2579,105 @@ def __init__(self, specs, hash_lookup=None):
|
||||
# from this dictionary during reconstruction
|
||||
self._hash_lookup = hash_lookup or {}
|
||||
|
||||
def hash(self, pkg, h):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = self._hash_lookup[h]
|
||||
self._hash_specs.append(pkg)
|
||||
def hash(self, node, h):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = self._hash_lookup[h]
|
||||
self._hash_specs.append(node)
|
||||
|
||||
def node(self, pkg):
|
||||
if pkg not in self._specs:
|
||||
self._specs[pkg] = spack.spec.Spec(pkg)
|
||||
def node(self, node):
|
||||
if node not in self._specs:
|
||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||
|
||||
def _arch(self, pkg):
|
||||
arch = self._specs[pkg].architecture
|
||||
def _arch(self, node):
|
||||
arch = self._specs[node].architecture
|
||||
if not arch:
|
||||
arch = spack.spec.ArchSpec()
|
||||
self._specs[pkg].architecture = arch
|
||||
self._specs[node].architecture = arch
|
||||
return arch
|
||||
|
||||
def node_platform(self, pkg, platform):
|
||||
self._arch(pkg).platform = platform
|
||||
def node_platform(self, node, platform):
|
||||
self._arch(node).platform = platform
|
||||
|
||||
def node_os(self, pkg, os):
|
||||
self._arch(pkg).os = os
|
||||
def node_os(self, node, os):
|
||||
self._arch(node).os = os
|
||||
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
def node_target(self, node, target):
|
||||
self._arch(node).target = target
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
def variant_value(self, node, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == "dev_path":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.SingleValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
if name == "patches":
|
||||
self._specs[pkg].variants.setdefault(
|
||||
self._specs[node].variants.setdefault(
|
||||
name, spack.variant.MultiValuedVariant(name, value)
|
||||
)
|
||||
return
|
||||
|
||||
self._specs[pkg].update_variant_validate(name, value)
|
||||
self._specs[node].update_variant_validate(name, value)
|
||||
|
||||
def version(self, pkg, version):
|
||||
self._specs[pkg].versions = vn.VersionList([vn.Version(version)])
|
||||
def version(self, node, version):
|
||||
self._specs[node].versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_compiler_version(self, pkg, compiler, version):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[pkg].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
def node_compiler_version(self, node, compiler, version):
|
||||
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_flag_compiler_default(self, pkg):
|
||||
self._flag_compiler_defaults.add(pkg)
|
||||
def node_flag_compiler_default(self, node):
|
||||
self._flag_compiler_defaults.add(node)
|
||||
|
||||
def node_flag(self, pkg, flag_type, flag):
|
||||
self._specs[pkg].compiler_flags.add_flag(flag_type, flag, False)
|
||||
def node_flag(self, node, flag_type, flag):
|
||||
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, pkg, flag_type, source):
|
||||
self._flag_sources[(pkg, flag_type)].add(source)
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
def no_flags(self, node, flag_type):
|
||||
self._specs[node].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
spec_info = packages_yaml[pkg]["externals"][int(idx)]
|
||||
self._specs[pkg].external_path = spec_info.get("prefix", None)
|
||||
self._specs[pkg].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
|
||||
self._specs[node].external_path = spec_info.get("prefix", None)
|
||||
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
|
||||
spec_info.get("modules", None)
|
||||
)
|
||||
self._specs[pkg].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
|
||||
|
||||
# If this is an extension, update the dependencies to include the extendee
|
||||
package = self._specs[pkg].package_class(self._specs[pkg])
|
||||
package = self._specs[node].package_class(self._specs[node])
|
||||
extendee_spec = package.extendee_spec
|
||||
|
||||
if extendee_spec:
|
||||
package.update_external_dependencies(self._specs.get(extendee_spec.name, None))
|
||||
extendee_node = SpecBuilder.make_node(pkg=extendee_spec.name)
|
||||
package.update_external_dependencies(self._specs.get(extendee_node, None))
|
||||
|
||||
def depends_on(self, pkg, dep, type):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
|
||||
# TODO: assertion to be removed when cross-compilation is handled correctly
|
||||
msg = "Current solver does not handle multiple dependency edges of the same name"
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,), virtuals=())
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], deptypes=(type,), virtuals=()
|
||||
)
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].update_deptypes(deptypes=(type,))
|
||||
edges[0].update_deptypes(deptypes=(type,))
|
||||
|
||||
def virtual_on_edge(self, pkg, provider, virtual):
|
||||
dependencies = self._specs[pkg].edges_to_dependencies(name=provider)
|
||||
assert len(dependencies) == 1
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
provider_spec = self._specs[provider_node]
|
||||
dependencies = [x for x in dependencies if id(x.spec) == id(provider_spec)]
|
||||
assert len(dependencies) == 1, f"{virtual}: {provider_node.pkg}"
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
@@ -2537,19 +2707,23 @@ def reorder_flags(self):
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
source_key = (spec.name, flag_type)
|
||||
node = SpecBuilder.make_node(pkg=spec.name)
|
||||
source_key = (node, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [s.name for s in spec.traverse(order="post", direction="parents")]
|
||||
order = [
|
||||
SpecBuilder.make_node(pkg=s.name)
|
||||
for s in spec.traverse(order="post", direction="parents")
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for name in sorted_sources:
|
||||
for node in sorted_sources:
|
||||
all_src_flags = list()
|
||||
per_pkg_sources = [self._specs[name]]
|
||||
if name in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[name])
|
||||
per_pkg_sources = [self._specs[node]]
|
||||
if node.pkg in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[node.pkg])
|
||||
for source in per_pkg_sources:
|
||||
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
|
||||
extend_flag_list(from_sources, all_src_flags)
|
||||
@@ -2620,14 +2794,15 @@ def build_specs(self, function_tuples):
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
node = args[0]
|
||||
pkg = node.pkg
|
||||
if spack.repo.PATH.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
spec = self._specs.get(pkg)
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
if name != "node_flag_source":
|
||||
continue
|
||||
@@ -2639,16 +2814,18 @@ def build_specs(self, function_tuples):
|
||||
for spec in self._specs.values():
|
||||
if spec.namespace:
|
||||
continue
|
||||
repo = spack.repo.path.repo_for_pkg(spec)
|
||||
repo = spack.repo.PATH.repo_for_pkg(spec)
|
||||
spec.namespace = repo.namespace
|
||||
|
||||
# fix flags after all specs are constructed
|
||||
self.reorder_flags()
|
||||
|
||||
# cycle detection
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -2768,7 +2945,7 @@ def solve(self, specs, out=None, timers=False, stats=False, tests=False, setup_o
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
timers (bool): Print out coarse fimers for different solve phases.
|
||||
timers (bool): Print out coarse timers for different solve phases.
|
||||
stats (bool): Print out detailed stats from clingo.
|
||||
tests (bool or tuple): If True, concretize test dependencies for all packages.
|
||||
If a tuple of package names, concretize test dependencies for named
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
159
lib/spack/spack/solver/counter.py
Normal file
159
lib/spack/spack/solver/counter.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import spack.dependency
|
||||
import spack.package_base
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
|
||||
class Counter:
|
||||
"""Computes the possible packages and the maximum number of duplicates
|
||||
allowed for each of them.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to concretize
|
||||
tests: if True, add test dependencies to the list of possible packages
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
|
||||
self.link_run_types: Tuple[str, ...] = ("link", "run", "test")
|
||||
self.all_types: Tuple[str, ...] = spack.dependency.all_deptypes
|
||||
if not tests:
|
||||
self.link_run_types = ("link", "run")
|
||||
self.all_types = ("link", "run", "build")
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_dependencies
|
||||
|
||||
def possible_virtuals(self) -> Set[str]:
|
||||
"""Returns the list of possible virtuals"""
|
||||
self.ensure_cache_values()
|
||||
return self._possible_virtuals
|
||||
|
||||
def ensure_cache_values(self) -> None:
|
||||
"""Ensure the cache values have been computed"""
|
||||
if self._possible_dependencies:
|
||||
return
|
||||
self._compute_cache_values()
|
||||
|
||||
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
|
||||
"""Emit facts associated with the possible packages"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def _compute_cache_values(self):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
|
||||
class NoDuplicatesCounter(Counter):
|
||||
def _compute_cache_values(self):
|
||||
result = spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
self._possible_dependencies = set(result)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
gen.h2("Maximum number of nodes (packages)")
|
||||
for package_name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self.possible_dependencies()):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class MinimalDuplicatesCounter(NoDuplicatesCounter):
|
||||
def __init__(self, specs, tests):
|
||||
super().__init__(specs, tests)
|
||||
self._link_run: PossibleDependencies = set()
|
||||
self._direct_build: PossibleDependencies = set()
|
||||
self._total_build: PossibleDependencies = set()
|
||||
self._link_run_virtuals: Set[str] = set()
|
||||
|
||||
def _compute_cache_values(self):
|
||||
self._link_run = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self.specs, virtuals=self._possible_virtuals, deptype=self.link_run_types
|
||||
)
|
||||
)
|
||||
self._link_run_virtuals.update(self._possible_virtuals)
|
||||
for x in self._link_run:
|
||||
current = spack.repo.PATH.get_pkg_class(x).dependencies_of_type("build")
|
||||
self._direct_build.update(current)
|
||||
|
||||
self._total_build = set(
|
||||
spack.package_base.possible_dependencies(
|
||||
*self._direct_build, virtuals=self._possible_virtuals, deptype=self.all_types
|
||||
)
|
||||
)
|
||||
self._possible_dependencies = set(self._link_run) | set(self._total_build)
|
||||
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
gen.h2("Packages with at most a single node")
|
||||
for package_name in sorted(self.possible_dependencies() - build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Packages with at multiple possible nodes (build-tools)")
|
||||
for package_name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.max_dupes(package_name, 2))
|
||||
gen.fact(fn.multiple_unification_sets(package_name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Maximum number of nodes (virtual packages)")
|
||||
for package_name in sorted(self.possible_virtuals()):
|
||||
gen.fact(fn.max_dupes(package_name, 1))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
|
||||
class FullDuplicatesCounter(MinimalDuplicatesCounter):
|
||||
def possible_packages_facts(self, gen, fn):
|
||||
build_tools = set(spack.repo.PATH.packages_with_tags("build-tools"))
|
||||
counter = collections.Counter(
|
||||
list(self._link_run) + list(self._total_build) + list(self._direct_build)
|
||||
)
|
||||
gen.h2("Maximum number of nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
count = min(count, 2)
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Build unification sets ")
|
||||
for name in sorted(self.possible_dependencies() & build_tools):
|
||||
gen.fact(fn.multiple_unification_sets(name))
|
||||
gen.newline()
|
||||
|
||||
gen.h2("Possible package in link-run subDAG")
|
||||
for name in sorted(self._link_run):
|
||||
gen.fact(fn.possible_in_link_run(name))
|
||||
gen.newline()
|
||||
|
||||
counter = collections.Counter(
|
||||
list(self._link_run_virtuals) + list(self._possible_virtuals)
|
||||
)
|
||||
gen.h2("Maximum number of virtual nodes")
|
||||
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
|
||||
gen.fact(fn.max_dupes(pkg, count))
|
||||
gen.newline()
|
||||
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
21
lib/spack/spack/solver/cycle_detection.lp
Normal file
@@ -0,0 +1,21 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Avoid cycles in the DAG
|
||||
%
|
||||
% Some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them. Note that these rules are quite
|
||||
% demanding on both grounding and solving, since they need to compute and
|
||||
% consider all possible paths between pair of nodes.
|
||||
%=============================================================================
|
||||
|
||||
|
||||
#program no_cycle.
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, A).
|
||||
|
||||
#defined depends_on/2.
|
||||
44
lib/spack/spack/solver/heuristic.lp
Normal file
44
lib/spack/spack/solver/heuristic.lp
Normal file
@@ -0,0 +1,44 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Heuristic to speed-up solves
|
||||
%=============================================================================
|
||||
|
||||
|
||||
%-----------------
|
||||
% Domain heuristic
|
||||
%-----------------
|
||||
#heuristic attr("hash", node(0, Package), Hash) : literal(_, "root", Package). [45, init]
|
||||
#heuristic attr("root", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "root", Package). [45, true]
|
||||
#heuristic attr("node", node(0, Package)) : literal(_, "node", Package). [45, true]
|
||||
|
||||
% Root node
|
||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic attr("node_target", node(0, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||
#heuristic node_compiler(node(0, Package), CompilerID) : default_compiler_preference(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||
|
||||
% Providers
|
||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||
|
||||
% node(ID, _)
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)). [25-5*ID, true]
|
||||
|
||||
% node(ID, _), split build dependencies
|
||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic version_weight(node(ID, Package), ID) : pkg_fact(Package, version_declared(Version, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, ID)), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_target_weight(node(ID, Package), ID) : attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
#heuristic node_compiler(node(ID, Package), CompilerID) : default_compiler_preference(ID, ID), compiler_id(ID), attr("node", node(ID, Package)), multiple_unification_sets(Package). [25, true]
|
||||
@@ -3,9 +3,11 @@
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% OS compatibility rules for reusing solves.
|
||||
% os_compatible(RecentOS, OlderOS)
|
||||
% OlderOS binaries can be used on RecentOS
|
||||
%=============================================================================
|
||||
|
||||
% macOS
|
||||
os_compatible("monterey", "bigsur").
|
||||
|
||||
27
lib/spack/spack/solver/when_possible.lp
Normal file
27
lib/spack/spack/solver/when_possible.lp
Normal file
@@ -0,0 +1,27 @@
|
||||
% Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% Minimize the number of literals that are not solved
|
||||
%
|
||||
% This minimization is used for the "when_possible" concretization mode,
|
||||
% otherwise we assume that all literals must be solved.
|
||||
%=============================================================================
|
||||
|
||||
% Give clingo the choice to solve an input spec or not
|
||||
{ solve_literal(ID) } :- literal(ID).
|
||||
literal_not_solved(ID) :- not solve_literal(ID), literal(ID).
|
||||
|
||||
% Make a problem with "zero literals solved" unsat. This is to trigger
|
||||
% looking for solutions to the ASP problem with "errors", which results
|
||||
% in better reporting for users. See #30669 for details.
|
||||
1 { solve_literal(ID) : literal(ID) }.
|
||||
|
||||
opt_criterion(300, "number of input specs not concretized").
|
||||
#minimize{ 0@300: #true }.
|
||||
#minimize { 1@300,ID : literal_not_solved(ID) }.
|
||||
|
||||
#heuristic literal_solved(ID) : literal(ID). [1, sign]
|
||||
#heuristic literal_solved(ID) : literal(ID). [50, init]
|
||||
@@ -1299,7 +1299,7 @@ def __init__(self, spec, name, query_parameters):
|
||||
original_spec = getattr(spec, "wrapped_obj", spec)
|
||||
self.wrapped_obj = original_spec
|
||||
self.token = original_spec, name, query_parameters
|
||||
is_virtual = spack.repo.path.is_virtual(name)
|
||||
is_virtual = spack.repo.PATH.is_virtual(name)
|
||||
self.last_query = QueryState(
|
||||
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
|
||||
)
|
||||
@@ -1733,7 +1733,7 @@ def package(self):
|
||||
self.name
|
||||
)
|
||||
if not self._package:
|
||||
self._package = spack.repo.path.get(self)
|
||||
self._package = spack.repo.PATH.get(self)
|
||||
return self._package
|
||||
|
||||
@property
|
||||
@@ -1741,11 +1741,11 @@ def package_class(self):
|
||||
"""Internal package call gets only the class object for a package.
|
||||
Use this to just get package metadata.
|
||||
"""
|
||||
return spack.repo.path.get_pkg_class(self.fullname)
|
||||
return spack.repo.PATH.get_pkg_class(self.fullname)
|
||||
|
||||
@property
|
||||
def virtual(self):
|
||||
return spack.repo.path.is_virtual(self.name)
|
||||
return spack.repo.PATH.is_virtual(self.name)
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
@@ -2272,7 +2272,7 @@ def override(init_spec, change_spec):
|
||||
# TODO: this doesn't account for the case where the changed spec
|
||||
# (and the user spec) have dependencies
|
||||
new_spec = init_spec.copy()
|
||||
package_cls = spack.repo.path.get_pkg_class(new_spec.name)
|
||||
package_cls = spack.repo.PATH.get_pkg_class(new_spec.name)
|
||||
if change_spec.versions and not change_spec.versions == vn.any_version:
|
||||
new_spec.versions = change_spec.versions
|
||||
for variant, value in change_spec.variants.items():
|
||||
@@ -2546,7 +2546,7 @@ def validate_detection(self):
|
||||
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
|
||||
|
||||
# Validate the spec calling a package specific method
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
|
||||
validate_fn(self, self.extra_attributes)
|
||||
|
||||
@@ -2645,7 +2645,7 @@ def _expand_virtual_packages(self, concretizer):
|
||||
"""
|
||||
# Make an index of stuff this spec already provides
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=self.traverse(), restrict=True
|
||||
)
|
||||
changed = False
|
||||
done = False
|
||||
@@ -2785,7 +2785,7 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
visited_user_specs = set()
|
||||
for dep in self.traverse():
|
||||
visited_user_specs.add(dep.name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(dep.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
visited_user_specs.update(x.name for x in pkg_cls(dep).provided)
|
||||
|
||||
extra = set(user_spec_deps.keys()).difference(visited_user_specs)
|
||||
@@ -2868,7 +2868,7 @@ def inject_patches_variant(root):
|
||||
# we can do it as late as possible to allow as much
|
||||
# compatibility across repositories as possible.
|
||||
if s.namespace is None:
|
||||
s.namespace = spack.repo.path.repo_for_pkg(s.name).namespace
|
||||
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
||||
|
||||
if s.concrete:
|
||||
continue
|
||||
@@ -2926,7 +2926,7 @@ def ensure_external_path_if_external(external_spec):
|
||||
|
||||
# Get the path from the module the package can override the default
|
||||
# (this is mostly needed for Cray)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(external_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
|
||||
package = pkg_cls(external_spec)
|
||||
external_spec.external_path = getattr(
|
||||
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
|
||||
@@ -2983,9 +2983,12 @@ def _new_concretize(self, tests=False):
|
||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||
name = providers[0]
|
||||
|
||||
assert name in answer
|
||||
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
||||
assert (
|
||||
node in answer
|
||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||
|
||||
concretized = answer[name]
|
||||
concretized = answer[node]
|
||||
self._dup(concretized)
|
||||
|
||||
def concretize(self, tests=False):
|
||||
@@ -3200,7 +3203,7 @@ def _find_provider(self, vdep, provider_index):
|
||||
Raise an exception if there is a conflicting virtual
|
||||
dependency already in this spec.
|
||||
"""
|
||||
assert spack.repo.path.is_virtual_safe(vdep.name), vdep
|
||||
assert spack.repo.PATH.is_virtual_safe(vdep.name), vdep
|
||||
|
||||
# note that this defensively copies.
|
||||
providers = provider_index.providers_for(vdep)
|
||||
@@ -3266,7 +3269,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
if spack.repo.PATH.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
@@ -3274,11 +3277,11 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
dep = provider
|
||||
else:
|
||||
index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[dep], restrict=True
|
||||
repository=spack.repo.PATH, specs=[dep], restrict=True
|
||||
)
|
||||
items = list(spec_deps.items())
|
||||
for name, vspec in items:
|
||||
if not spack.repo.path.is_virtual_safe(vspec.name):
|
||||
if not spack.repo.PATH.is_virtual_safe(vspec.name):
|
||||
continue
|
||||
|
||||
if index.providers_for(vspec):
|
||||
@@ -3428,7 +3431,7 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
||||
# Initialize index of virtual dependency providers if
|
||||
# concretize didn't pass us one already
|
||||
provider_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
repository=spack.repo.PATH, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
)
|
||||
|
||||
# traverse the package DAG and fill out dependencies according
|
||||
@@ -3459,7 +3462,7 @@ def validate_or_raise(self):
|
||||
for spec in self.traverse():
|
||||
# raise an UnknownPackageError if the spec's package isn't real.
|
||||
if (not spec.virtual) and spec.name:
|
||||
spack.repo.path.get_pkg_class(spec.fullname)
|
||||
spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
|
||||
# validate compiler in addition to the package name.
|
||||
if spec.compiler:
|
||||
@@ -3519,7 +3522,8 @@ def update_variant_validate(self, variant_name, values):
|
||||
for value in values:
|
||||
if self.variants.get(variant_name):
|
||||
msg = (
|
||||
"Cannot append a value to a single-valued " "variant with an already set value"
|
||||
f"cannot append the new value '{value}' to the single-valued "
|
||||
f"variant '{self.variants[variant_name]}'"
|
||||
)
|
||||
assert pkg_variant.multi, msg
|
||||
self.variants[variant_name].append(value)
|
||||
@@ -3527,7 +3531,7 @@ def update_variant_validate(self, variant_name, values):
|
||||
variant = pkg_variant.make_variant(value)
|
||||
self.variants[variant_name] = variant
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
|
||||
|
||||
def constrain(self, other, deps=True):
|
||||
@@ -3732,8 +3736,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if self.name != other.name and self.name and other.name:
|
||||
if self.virtual and other.virtual:
|
||||
# Two virtual specs intersect only if there are providers for both
|
||||
lhs = spack.repo.path.providers_for(str(self))
|
||||
rhs = spack.repo.path.providers_for(str(other))
|
||||
lhs = spack.repo.PATH.providers_for(str(self))
|
||||
rhs = spack.repo.PATH.providers_for(str(other))
|
||||
intersection = [s for s in lhs if any(s.intersects(z) for z in rhs)]
|
||||
return bool(intersection)
|
||||
|
||||
@@ -3742,7 +3746,7 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(non_virtual_spec.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
|
||||
pkg = pkg_cls(non_virtual_spec)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
@@ -3813,10 +3817,10 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=self.traverse(), restrict=True
|
||||
)
|
||||
other_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=other.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=other.traverse(), restrict=True
|
||||
)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
@@ -3857,7 +3861,7 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
|
||||
pkg = pkg_cls(self)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
@@ -3939,8 +3943,8 @@ def patches(self):
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
if self._patches_assigned():
|
||||
for sha256 in self.variants["patches"]._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
index = spack.repo.PATH.patch_index
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
patch = index.patch_for_package(sha256, pkg_cls)
|
||||
self._patches.append(patch)
|
||||
|
||||
|
||||
@@ -326,7 +326,7 @@ def __init__(
|
||||
self.keep = keep
|
||||
|
||||
# File lock for the stage directory. We use one file for all
|
||||
# stage locks. See spack.database.Database.prefix_lock for
|
||||
# stage locks. See spack.database.Database.prefix_locker.lock for
|
||||
# details on this approach.
|
||||
self._lock = None
|
||||
if lock:
|
||||
@@ -484,7 +484,7 @@ def fetch(self, mirror_only=False, err_msg=None):
|
||||
|
||||
if self.default_fetcher.cachable:
|
||||
for rel_path in reversed(list(self.mirror_paths)):
|
||||
cache_fetcher = spack.caches.fetch_cache.fetcher(
|
||||
cache_fetcher = spack.caches.FETCH_CACHE.fetcher(
|
||||
rel_path, digest, expand=expand, extension=extension
|
||||
)
|
||||
fetchers.insert(0, cache_fetcher)
|
||||
@@ -577,7 +577,7 @@ def check(self):
|
||||
self.fetcher.check()
|
||||
|
||||
def cache_local(self):
|
||||
spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
spack.caches.FETCH_CACHE.store(self.fetcher, self.mirror_paths.storage_path)
|
||||
|
||||
def cache_mirror(self, mirror, stats):
|
||||
"""Perform a fetch if the resource is not already cached
|
||||
|
||||
@@ -25,13 +25,14 @@
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.directory_layout
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
|
||||
#: default installation root, relative to the Spack install path
|
||||
@@ -134,18 +135,21 @@ def parse_install_tree(config_dict):
|
||||
class Store:
|
||||
"""A store is a path full of installed Spack packages.
|
||||
|
||||
Stores consist of packages installed according to a
|
||||
``DirectoryLayout``, along with an index, or _database_ of their
|
||||
contents. The directory layout controls what paths look like and how
|
||||
Spack ensures that each unique spec gets its own unique directory (or
|
||||
not, though we don't recommend that). The database is a single file
|
||||
that caches metadata for the entire Spack installation. It prevents
|
||||
us from having to spider the install tree to figure out what's there.
|
||||
Stores consist of packages installed according to a ``DirectoryLayout``, along with a database
|
||||
of their contents.
|
||||
|
||||
The directory layout controls what paths look like and how Spack ensures that each unique spec
|
||||
gets its own unique directory (or not, though we don't recommend that).
|
||||
|
||||
The database is a single file that caches metadata for the entire Spack installation. It
|
||||
prevents us from having to spider the install tree to figure out what's there.
|
||||
|
||||
The store is also able to lock installation prefixes, and to mark installation failures.
|
||||
|
||||
Args:
|
||||
root: path to the root of the install tree
|
||||
unpadded_root: path to the root of the install tree without padding.
|
||||
The sbang script has to be installed here to work with padded roots
|
||||
unpadded_root: path to the root of the install tree without padding. The sbang script has
|
||||
to be installed here to work with padded roots
|
||||
projections: expression according to guidelines that describes how to construct a path to
|
||||
a package prefix in this store
|
||||
hash_length: length of the hashes used in the directory layout. Spec hash suffixes will be
|
||||
@@ -170,6 +174,19 @@ def __init__(
|
||||
self.upstreams = upstreams
|
||||
self.lock_cfg = lock_cfg
|
||||
self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)
|
||||
|
||||
timeout_format_str = (
|
||||
f"{str(lock_cfg.package_timeout)}s" if lock_cfg.package_timeout else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.prefix_locker = spack.database.SpecLocker(
|
||||
spack.database.prefix_lock_path(root), default_timeout=lock_cfg.package_timeout
|
||||
)
|
||||
self.failure_tracker = spack.database.FailureTracker(
|
||||
self.root, default_timeout=lock_cfg.package_timeout
|
||||
)
|
||||
|
||||
self.layout = spack.directory_layout.DirectoryLayout(
|
||||
root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
@@ -195,7 +212,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
Args:
|
||||
configuration: configuration to create a store.
|
||||
"""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
config_dict = configuration.get("config")
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = configuration.get("config:install_hash_length")
|
||||
@@ -217,7 +234,7 @@ def create(configuration: ConfigurationType) -> Store:
|
||||
|
||||
|
||||
def _create_global() -> Store:
|
||||
result = create(configuration=spack.config.config)
|
||||
result = create(configuration=spack.config.CONFIG)
|
||||
return result
|
||||
|
||||
|
||||
@@ -355,10 +372,10 @@ def use_store(
|
||||
|
||||
# Swap the store with the one just constructed and return it
|
||||
ensure_singleton_created()
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||
)
|
||||
temporary_store = create(configuration=spack.config.config)
|
||||
temporary_store = create(configuration=spack.config.CONFIG)
|
||||
original_store, STORE = STORE, temporary_store
|
||||
|
||||
try:
|
||||
@@ -366,7 +383,7 @@ def use_store(
|
||||
finally:
|
||||
# Restore the original store
|
||||
STORE = original_store
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
|
||||
|
||||
class MatchError(spack.error.SpackError):
|
||||
|
||||
@@ -94,15 +94,15 @@ class TestState:
|
||||
|
||||
def __init__(self):
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.config
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
if _SERIALIZE:
|
||||
spack.config.config = self.config
|
||||
spack.repo.path = spack.repo.create(self.config)
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
|
||||
@@ -32,10 +32,10 @@ def packages_with_tags(tags, installed, skip_empty):
|
||||
"""
|
||||
tag_pkgs = collections.defaultdict(lambda: list)
|
||||
spec_names = _get_installed_package_names() if installed else []
|
||||
keys = spack.repo.path.tag_index if tags is None else tags
|
||||
keys = spack.repo.PATH.tag_index if tags is None else tags
|
||||
for tag in keys:
|
||||
packages = [
|
||||
name for name in spack.repo.path.tag_index[tag] if not installed or name in spec_names
|
||||
name for name in spack.repo.PATH.tag_index[tag] if not installed or name in spec_names
|
||||
]
|
||||
if packages or not skip_empty:
|
||||
tag_pkgs[tag] = packages
|
||||
|
||||
@@ -199,15 +199,11 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
|
||||
# use foobar=bar to make the problem simpler for the old concretizer
|
||||
# the new concretizer should not need that help
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
||||
|
||||
spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
|
||||
root_target_range,
|
||||
dep_target_range,
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
mirror_cmd = spack.main.SpackCommand("mirror")
|
||||
install_cmd = spack.main.SpackCommand("install")
|
||||
@@ -51,7 +51,7 @@
|
||||
def cache_directory(tmpdir):
|
||||
fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
|
||||
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
|
||||
spack.config.caches, old_cache_path = fsc, spack.caches.FETCH_CACHE
|
||||
|
||||
yield spack.config.caches
|
||||
|
||||
@@ -115,8 +115,8 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
]
|
||||
)
|
||||
|
||||
spack.config.config, old_config = cfg, spack.config.config
|
||||
spack.config.config.set("repos", [spack.paths.mock_packages_path])
|
||||
spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
|
||||
spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
|
||||
njobs = spack.config.get("config:build_jobs")
|
||||
if not njobs:
|
||||
spack.config.set("config:build_jobs", 4, scope="user")
|
||||
@@ -138,9 +138,9 @@ def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutabl
|
||||
if not timeout:
|
||||
spack.config.set("config:connect_timeout", 10, scope="user")
|
||||
|
||||
yield spack.config.config
|
||||
yield spack.config.CONFIG
|
||||
|
||||
spack.config.config = old_config
|
||||
spack.config.CONFIG = old_config
|
||||
mutable_dir.remove()
|
||||
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path):
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.store.STORE.root == user_path
|
||||
assert spack.config.config.get("config:install_tree:root") == user_path
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == user_path
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -40,11 +40,11 @@ def test_store_padding_length_is_zero_during_bootstrapping(mutable_config, tmpdi
|
||||
"""
|
||||
user_path = str(tmpdir.join("store"))
|
||||
with spack.store.use_store(user_path, extra_data={"padded_length": 512}):
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 512
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 512
|
||||
|
||||
|
||||
@pytest.mark.regression("38963")
|
||||
@@ -54,15 +54,15 @@ def test_install_tree_customization_is_respected(mutable_config, tmp_path):
|
||||
"""
|
||||
spack.store.reinitialize()
|
||||
store_dir = tmp_path / "store"
|
||||
spack.config.config.set("config:install_tree:root", str(store_dir))
|
||||
spack.config.CONFIG.set("config:install_tree:root", str(store_dir))
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.store.STORE.root == spack.bootstrap.config.store_path()
|
||||
assert (
|
||||
spack.config.config.get("config:install_tree:root")
|
||||
spack.config.CONFIG.get("config:install_tree:root")
|
||||
== spack.bootstrap.config.store_path()
|
||||
)
|
||||
assert spack.config.config.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.config.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.config.CONFIG.get("config:install_tree:padded_length") == 0
|
||||
assert spack.config.CONFIG.get("config:install_tree:root") == str(store_dir)
|
||||
assert spack.store.STORE.root == str(store_dir)
|
||||
|
||||
|
||||
@@ -185,12 +185,12 @@ def test_bootstrap_custom_store_in_environment(mutable_config, tmpdir):
|
||||
|
||||
def test_nested_use_of_context_manager(mutable_config):
|
||||
"""Test nested use of the context manager"""
|
||||
user_config = spack.config.config
|
||||
user_config = spack.config.CONFIG
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
assert spack.config.config != user_config
|
||||
assert spack.config.config == user_config
|
||||
assert spack.config.CONFIG != user_config
|
||||
assert spack.config.CONFIG == user_config
|
||||
|
||||
|
||||
@pytest.mark.parametrize("expected_missing", [False, True])
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +15,7 @@
|
||||
|
||||
install = spack.main.SpackCommand("install")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
|
||||
|
||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -119,7 +118,7 @@ def __call__(self, *args, **kwargs):
|
||||
return mock_module_cmd
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
|
||||
@pytest.mark.not_on_windows("Static to Shared not supported on Win (yet)")
|
||||
def test_static_to_shared_library(build_environment):
|
||||
os.environ["SPACK_TEST_COMMAND"] = "dump-args"
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import py.path
|
||||
import pytest
|
||||
@@ -23,8 +22,6 @@
|
||||
|
||||
DATA_PATH = os.path.join(spack.paths.test_path, "data")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization):
|
||||
@@ -45,6 +42,7 @@ def _func(dir_str):
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("make not available on Windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
class TestTargets:
|
||||
@pytest.mark.parametrize(
|
||||
@@ -93,6 +91,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
s.package._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("autotools not available on windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestAutotoolsPackage:
|
||||
def test_with_or_without(self, default_mock_concretization):
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -106,10 +105,7 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
||||
assert value == expected
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="log_ouput cannot currently be used outside of subprocess on Windows",
|
||||
)
|
||||
@pytest.mark.not_on_windows("log_ouput cannot currently be used outside of subprocess on Windows")
|
||||
@pytest.mark.regression("33928")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -153,7 +149,7 @@ def test_monkey_patching_test_log_file():
|
||||
|
||||
# Windows context manager's __exit__ fails with ValueError ("I/O operation
|
||||
# on closed file").
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Does not run on windows")
|
||||
@pytest.mark.not_on_windows("Does not run on windows")
|
||||
def test_install_time_test_callback(tmpdir, config, mock_packages, mock_stage):
|
||||
"""Confirm able to run stand-alone test as a post-install callback."""
|
||||
s = spack.spec.Spec("py-test-callback").concretized()
|
||||
|
||||
@@ -15,7 +15,7 @@ def test_build_request_errors(install_mockery):
|
||||
inst.BuildRequest("abc", {})
|
||||
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildRequest(pkg_cls(spec), {})
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user