Compare commits
194 Commits
develop-20
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1b1ed1b1fa | ||
![]() |
ec0e51316b | ||
![]() |
533821e46f | ||
![]() |
6c5d125cb0 | ||
![]() |
668fb1201f | ||
![]() |
f7918fd8ab | ||
![]() |
fc1996e0fa | ||
![]() |
ed3aaafd73 | ||
![]() |
63bb2c9bad | ||
![]() |
a67455707a | ||
![]() |
09ca71dbe0 | ||
![]() |
ea082539e4 | ||
![]() |
143146f4f3 | ||
![]() |
ee6ae402aa | ||
![]() |
0b26b26821 | ||
![]() |
c764f9b1ab | ||
![]() |
db19d83ea7 | ||
![]() |
24256be6d6 | ||
![]() |
633723236e | ||
![]() |
381f31e69e | ||
![]() |
9438cac219 | ||
![]() |
85cf66f650 | ||
![]() |
f3c080e546 | ||
![]() |
37634f8b08 | ||
![]() |
2ae8bbce9e | ||
![]() |
b8bfaf65bf | ||
![]() |
7968cb7fa2 | ||
![]() |
ebc2efdfd2 | ||
![]() |
ff07fd5ccb | ||
![]() |
3f83ef6566 | ||
![]() |
554ce7f063 | ||
![]() |
23963779f4 | ||
![]() |
45c5af10c3 | ||
![]() |
532a37e7ba | ||
![]() |
aeb9a92845 | ||
![]() |
a3c7ad7669 | ||
![]() |
b99288dcae | ||
![]() |
01b7cc5106 | ||
![]() |
f5888d8127 | ||
![]() |
77c838ca93 | ||
![]() |
11e538d962 | ||
![]() |
7d444038ee | ||
![]() |
c24471834b | ||
![]() |
b1e33ae37b | ||
![]() |
c36617f9da | ||
![]() |
deadb64206 | ||
![]() |
9eaa88e467 | ||
![]() |
bd58801415 | ||
![]() |
548a9de671 | ||
![]() |
8e7c53a8ba | ||
![]() |
5e630174a1 | ||
![]() |
175a65dfba | ||
![]() |
39d4c402d5 | ||
![]() |
e51748ee8f | ||
![]() |
f9457fa80b | ||
![]() |
4cc2ca3e2e | ||
![]() |
3843001004 | ||
![]() |
e24bb5dd1c | ||
![]() |
f6013114eb | ||
![]() |
bdca875eb3 | ||
![]() |
af8c392de2 | ||
![]() |
9aa3b4619b | ||
![]() |
3d733da70a | ||
![]() |
cda99b792c | ||
![]() |
9834bad82e | ||
![]() |
3453259c98 | ||
![]() |
ee243b84eb | ||
![]() |
5080e2cb45 | ||
![]() |
f42ef7aea7 | ||
![]() |
41793673d9 | ||
![]() |
8b6a6982ee | ||
![]() |
ee74ca6391 | ||
![]() |
7165e70186 | ||
![]() |
97d632a161 | ||
![]() |
571919992d | ||
![]() |
99112ad2ad | ||
![]() |
75c70c395d | ||
![]() |
960bdfe612 | ||
![]() |
97892bda18 | ||
![]() |
cead6ef98d | ||
![]() |
5d70c0f100 | ||
![]() |
361632fc4b | ||
![]() |
6576655137 | ||
![]() |
feb26efecd | ||
![]() |
4752d1cde3 | ||
![]() |
a07afa6e1a | ||
![]() |
7327d2913a | ||
![]() |
8f8a1f7f52 | ||
![]() |
eb8d836e76 | ||
![]() |
bad8495e16 | ||
![]() |
43de7f4881 | ||
![]() |
84585ac575 | ||
![]() |
86f9d3865b | ||
![]() |
834e7b2b0a | ||
![]() |
c14f23ddaa | ||
![]() |
49f3681a12 | ||
![]() |
19e1d10cdf | ||
![]() |
7caf2a512d | ||
![]() |
1f6e3cc8cb | ||
![]() |
169c4245e0 | ||
![]() |
ee1982010f | ||
![]() |
dd396c4a76 | ||
![]() |
235802013d | ||
![]() |
e1f07e98ae | ||
![]() |
60aee6f535 | ||
![]() |
2510dc9e6e | ||
![]() |
5607dd259b | ||
![]() |
7bd5d1fd3c | ||
![]() |
f6104cc3cb | ||
![]() |
b54d286b4a | ||
![]() |
ea9c488897 | ||
![]() |
ba1d295023 | ||
![]() |
27f04b3544 | ||
![]() |
8cd9497522 | ||
![]() |
ef544a3b6d | ||
![]() |
4eed832653 | ||
![]() |
5996aaa4e3 | ||
![]() |
4957607005 | ||
![]() |
78bca131fb | ||
![]() |
045c5cea53 | ||
![]() |
ea256145d9 | ||
![]() |
0b2098850c | ||
![]() |
d2df0a29ce | ||
![]() |
92e9daec9b | ||
![]() |
d65437114a | ||
![]() |
9a6e98e729 | ||
![]() |
6515c16432 | ||
![]() |
2826ab36f0 | ||
![]() |
c035512930 | ||
![]() |
cfadba47d3 | ||
![]() |
95391dfe94 | ||
![]() |
382ba99631 | ||
![]() |
f8e25c79bf | ||
![]() |
93b54b79d3 | ||
![]() |
ff30efcebc | ||
![]() |
54514682d4 | ||
![]() |
92a75717f0 | ||
![]() |
b9be8e883e | ||
![]() |
da838a7d10 | ||
![]() |
85e5fb9ab7 | ||
![]() |
c0c300d773 | ||
![]() |
6e933ac7df | ||
![]() |
be679759be | ||
![]() |
eace479b1e | ||
![]() |
2069a42ba3 | ||
![]() |
41d2161b5b | ||
![]() |
ba936574fc | ||
![]() |
c0d0603baa | ||
![]() |
edbf12cfa8 | ||
![]() |
11b3dac705 | ||
![]() |
a7a5a994dc | ||
![]() |
8a9a24ce1e | ||
![]() |
f54974d66e | ||
![]() |
0b4631a774 | ||
![]() |
e7fa6d99bf | ||
![]() |
03c0d74139 | ||
![]() |
a14f4b5a02 | ||
![]() |
3be565f49e | ||
![]() |
df2938dfcf | ||
![]() |
5d8482598b | ||
![]() |
f079e7fc34 | ||
![]() |
3369acc050 | ||
![]() |
26f4fc0f34 | ||
![]() |
8c0551c1c0 | ||
![]() |
59866cdb11 | ||
![]() |
8d2a32f66d | ||
![]() |
b28ae67369 | ||
![]() |
b8590fbd05 | ||
![]() |
9343b9524f | ||
![]() |
bb0cec1530 | ||
![]() |
d4f41b51f4 | ||
![]() |
347acf3cc6 | ||
![]() |
65224ad6bc | ||
![]() |
784d56ce05 | ||
![]() |
b30523fdd8 | ||
![]() |
b46e098696 | ||
![]() |
20a7622602 | ||
![]() |
d25f1059dd | ||
![]() |
9394fa403e | ||
![]() |
679c6a606d | ||
![]() |
27f378601e | ||
![]() |
832ddbdf6d | ||
![]() |
0286455e1d | ||
![]() |
4baf489460 | ||
![]() |
56c7921430 | ||
![]() |
c2288af55c | ||
![]() |
39cd2f3754 | ||
![]() |
a941ab4acb | ||
![]() |
048cc711d6 | ||
![]() |
63a5cf78ac | ||
![]() |
bef03b9588 | ||
![]() |
2859f0a7e1 | ||
![]() |
c1b084d754 | ||
![]() |
a8301709a8 |
1
.github/workflows/unit_tests.yaml
vendored
1
.github/workflows/unit_tests.yaml
vendored
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
@@ -49,6 +49,7 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -59,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -73,10 +73,10 @@ arguments to the configure phase, you can use:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ['--no-python-dbus']
|
||||
return ["--no-python-dbus"]
|
||||
|
||||
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.1.0
|
||||
flake8==6.0.0
|
||||
mypy==1.4.1
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
|
@@ -217,13 +217,7 @@ file would live in the ``build_cache`` directory of a binary mirror::
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
@@ -143,7 +143,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -244,7 +245,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
@@ -52,6 +52,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -1312,15 +1313,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
bchecksum["hash"] = checksum
|
||||
spec_dict["binary_cache_checksum"] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.STORE.layout.root)
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
@@ -1799,6 +1792,27 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1809,6 +1823,14 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
# Create the install prefix
|
||||
fsys.mkdirp(
|
||||
spec.prefix,
|
||||
mode=get_package_dir_permissions(spec),
|
||||
group=get_package_group(spec),
|
||||
default_perms="parents",
|
||||
)
|
||||
|
||||
specfile_path = download_result["specfile_stage"].save_filename
|
||||
|
||||
with open(specfile_path, "r") as inputfile:
|
||||
@@ -1862,42 +1884,23 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.STORE.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get("buildinfo", {})
|
||||
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
raise
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix)
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
@@ -1910,12 +1913,29 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
|
@@ -476,13 +476,13 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
@@ -1256,9 +1256,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@@ -162,17 +162,6 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
|
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -301,7 +301,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qt", type="build", when="build_system=qmake")
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
@@ -7,13 +7,14 @@
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
from llnl.util.filesystem import find, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
@@ -39,9 +40,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
build_system("sip")
|
||||
|
||||
with when("build_system=sip"):
|
||||
extends("python")
|
||||
depends_on("qt")
|
||||
depends_on("py-sip")
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,13 +113,13 @@ class SIPBuilder(BaseBuilder):
|
||||
* install
|
||||
|
||||
The configure phase already adds a set of default flags. To see more
|
||||
options, run ``python configure.py --help``.
|
||||
options, run ``sip-build --help``.
|
||||
"""
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
@@ -130,34 +130,17 @@ class SIPBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
def configure_file(self):
|
||||
"""Returns the name of the configure file to use."""
|
||||
return "configure.py"
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
configure = self.configure_file()
|
||||
|
||||
args = self.configure_args()
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
args.extend(
|
||||
[
|
||||
"--verbose",
|
||||
"--confirm-license",
|
||||
"--qmake",
|
||||
spec["qt"].prefix.bin.qmake,
|
||||
"--sip",
|
||||
spec["py-sip"].prefix.bin.sip,
|
||||
"--sip-incdir",
|
||||
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||
"--bindir",
|
||||
prefix.bin,
|
||||
"--destdir",
|
||||
inspect.getmodule(self.pkg).python_platlib,
|
||||
]
|
||||
)
|
||||
|
||||
self.pkg.python(configure, *args)
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
@@ -167,7 +150,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -177,21 +161,11 @@ def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def extend_path_setup(self):
|
||||
# See github issue #14121 and PR #15297
|
||||
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||
if module != "sip":
|
||||
module = module.split(".")[0]
|
||||
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||
f.write("from pkgutil import extend_path\n")
|
||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||
|
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1504,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
@@ -1690,7 +1690,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
return True
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
@@ -1706,6 +1706,11 @@ def reproduce_ci_job(url, work_dir):
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
if gpg_url:
|
||||
gpg_path = web_util.fetch_url_text(gpg_url, dest_dir=os.path.join(work_dir, "_pgp"))
|
||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
@@ -1798,60 +1803,63 @@ def reproduce_ci_job(url, work_dir):
|
||||
# more faithful reproducer if everything appears to run in the same
|
||||
# absolute path used during the CI build.
|
||||
mount_as_dir = "/work"
|
||||
mounted_workdir = "/reproducer"
|
||||
if repro_details:
|
||||
mount_as_dir = repro_details["ci_project_dir"]
|
||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||
if gpg_path:
|
||||
mounted_gpg_path = os.path.join(mounted_workdir, rel_gpg_path)
|
||||
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
commit_1 = m.group(1)
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
|
||||
# In cases where CI build was run on a shell runner, it might be useful
|
||||
# to see what tags were applied to the job so the user knows what shell
|
||||
@@ -1862,45 +1870,92 @@ def reproduce_ci_job(url, work_dir):
|
||||
job_tags = job_yaml["tags"]
|
||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
||||
|
||||
inst_list = []
|
||||
entrypoint_script = [
|
||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
||||
]
|
||||
|
||||
inst_list = []
|
||||
# Finally, print out some instructions to reproduce the build
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
# Allow interactive
|
||||
entrypoint_script.extend(
|
||||
[
|
||||
[
|
||||
"echo",
|
||||
"Re-run install script using:\n\t{0}".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh")
|
||||
if job_image
|
||||
else install_script
|
||||
),
|
||||
],
|
||||
# Allow interactive
|
||||
["exec", "$@"],
|
||||
]
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
process_command(
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
docker_command = [
|
||||
[
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
||||
job_image,
|
||||
"bash",
|
||||
]
|
||||
]
|
||||
autostart = autostart and setup_result
|
||||
process_command("start", docker_command, work_dir, run=autostart)
|
||||
|
||||
if not autostart:
|
||||
inst_list.append("\nTo run the docker reproducer:\n\n")
|
||||
inst_list.extend(
|
||||
[
|
||||
" - Start the docker container install",
|
||||
" $ {0}/start.sh".format(work_dir),
|
||||
]
|
||||
)
|
||||
else:
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||
|
||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||
inst_list.extent(
|
||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
||||
)
|
||||
|
||||
if not setup_result:
|
||||
inst_list.append(" - Clone spack and acquire tested commit\n")
|
||||
inst_list.append("{0}".format(spack_info))
|
||||
spack_root = "<spack-clone-path>"
|
||||
else:
|
||||
spack_root = "{0}/spack".format(mount_as_dir)
|
||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||
inst_list.append("\n {0}\n".format(spack_info))
|
||||
inst_list.append("\n")
|
||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
||||
|
||||
inst_list.append(" - Activate the environment\n\n")
|
||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||
inst_list.append(
|
||||
" $ spack env activate --without-view {0}\n\n".format(
|
||||
mounted_env_dir if job_image else repro_dir
|
||||
)
|
||||
)
|
||||
inst_list.append(" - Run the install script\n\n")
|
||||
inst_list.append(
|
||||
" $ {0}\n".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script
|
||||
)
|
||||
)
|
||||
|
||||
print("".join(inst_list))
|
||||
tty.msg("".join(inst_list))
|
||||
|
||||
|
||||
def process_command(name, commands, repro_dir):
|
||||
def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
"""
|
||||
Create a script for and run the command. Copy the script to the
|
||||
reproducibility directory.
|
||||
@@ -1910,6 +1965,7 @@ def process_command(name, commands, repro_dir):
|
||||
commands (list): list of arguments for single command or list of lists of
|
||||
arguments for multiple commands. No shell escape is performed.
|
||||
repro_dir (str): Job reproducibility directory
|
||||
run (bool): Run the script and return the exit code if True
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
@@ -1928,7 +1984,8 @@ def process_command(name, commands, repro_dir):
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if exit_on_failure:
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
@@ -1939,19 +1996,27 @@ def process_command(name, commands, repro_dir):
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# a login shell.
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
exit_code = None
|
||||
if run:
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
else:
|
||||
# Delete the script, it is copied to the destination dir
|
||||
os.remove(script)
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
return exit_code
|
||||
|
||||
|
||||
|
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -383,7 +383,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
deps (bool): Display dependencies with specs
|
||||
long (bool): Display short hashes with specs
|
||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||
namespace (bool): Print namespaces along with names
|
||||
namespaces (bool): Print namespaces along with names
|
||||
show_flags (bool): Show compiler flags with specs
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
@@ -407,7 +407,7 @@ def get_arg(name, default=None):
|
||||
paths = get_arg("paths", False)
|
||||
deps = get_arg("deps", False)
|
||||
hashes = get_arg("long", False)
|
||||
namespace = get_arg("namespace", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
@@ -428,7 +428,7 @@ def get_arg(name, default=None):
|
||||
|
||||
format_string = get_arg("format", None)
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespace else "{name}"
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
@@ -584,14 +584,14 @@ def require_active_env(cmd_name):
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
|
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -326,6 +327,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
@@ -2,12 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
@@ -18,7 +20,6 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -28,7 +29,6 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
@@ -38,8 +38,8 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
setattr(setup_parser, "parser", subparser)
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
@@ -105,7 +105,7 @@ def setup_parser(subparser):
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
||||
listcache.add_argument(
|
||||
"-v",
|
||||
"--variants",
|
||||
@@ -158,14 +158,12 @@ def setup_parser(subparser):
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
check_spec_or_specfile.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
@@ -173,16 +171,19 @@ def setup_parser(subparser):
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
"-p",
|
||||
"--path",
|
||||
required=True,
|
||||
default=None,
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
help="path to directory where tarball should be downloaded",
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -190,32 +191,32 @@ def setup_parser(subparser):
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
required=True,
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
@@ -257,54 +258,24 @@ def setup_parser(subparser):
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs, spec_file):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if spec_file:
|
||||
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||
|
||||
if specs:
|
||||
constraints = spack.cmd.parse_specs(specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
)
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
@@ -315,7 +286,7 @@ def push_fn(args):
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -423,16 +394,21 @@ def preview_fn(args):
|
||||
def check_fn(args):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
if not specs:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
@@ -462,26 +438,28 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
if not args.path:
|
||||
tty.msg("No download path provided, exiting")
|
||||
return
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not result:
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -491,29 +469,24 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specs:
|
||||
tty.msg("No dependent specs provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
tty.msg("No yaml directory provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = "json"
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
)
|
||||
|
||||
|
||||
|
@@ -4,18 +4,21 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import deprecated_version, preferred_version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
@@ -31,35 +34,38 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version only",
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the preferred version only",
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
)
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
@@ -77,89 +83,174 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict = {}
|
||||
if not args.versions and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
else:
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn("Version {0} is deprecated".format(version))
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn(f"Version {version} is deprecated")
|
||||
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
print_checksum_status(pkg, version_hashes)
|
||||
sys.exit(0)
|
||||
|
||||
# convert dict into package.py version statements
|
||||
version_lines = get_version_lines(version_hashes, url_dict)
|
||||
print()
|
||||
print(version_lines)
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
# Make sure we also have a newline after the last version
|
||||
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||
versions.append("\n")
|
||||
# We need to insert the versions in reversed order
|
||||
versions.reverse()
|
||||
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||
version_line = None
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for i in range(len(lines)):
|
||||
# Black is drunk, so this is what it looks like for now
|
||||
# See https://github.com/psf/black/issues/2156 for more information
|
||||
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||
i
|
||||
].startswith(" version("):
|
||||
version_line = i
|
||||
break
|
||||
|
||||
if version_line is not None:
|
||||
for v in versions:
|
||||
lines.insert(version_line, v)
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
"""
|
||||
Verify checksums present in version_hashes against those present
|
||||
in the package's instructions.
|
||||
|
||||
with open(filename, "w") as f:
|
||||
f.writelines(lines)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
||||
|
||||
msg = "opening editor to verify"
|
||||
"""
|
||||
results = []
|
||||
num_verified = 0
|
||||
failed = False
|
||||
|
||||
if not sys.stdout.isatty():
|
||||
msg = "please verify"
|
||||
max_len = max(len(str(v)) for v in version_hashes)
|
||||
num_total = len(version_hashes)
|
||||
|
||||
tty.info(
|
||||
"Added {0} new versions to {1}, "
|
||||
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||
)
|
||||
for version, sha in version_hashes.items():
|
||||
if version not in pkg.versions:
|
||||
msg = "No previous checksum"
|
||||
status = "-"
|
||||
|
||||
elif sha == pkg.versions[version]["sha256"]:
|
||||
msg = "Correct"
|
||||
status = "="
|
||||
num_verified += 1
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
else:
|
||||
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||
msg = sha
|
||||
status = "x"
|
||||
failed = True
|
||||
|
||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
||||
|
||||
# Display table of checksum results.
|
||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
||||
|
||||
# Terminate at the end of function to prevent additional output.
|
||||
if failed:
|
||||
print()
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
# Split rendered version lines into tuple of (version, version_line)
|
||||
# We reverse sort here to make sure the versions match the version_lines
|
||||
new_versions = []
|
||||
for ver_line in version_lines.split("\n"):
|
||||
match = version_re.match(ver_line)
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
|
@@ -156,11 +156,27 @@ def setup_parser(subparser):
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
default="docker",
|
||||
choices=["docker", "podman"],
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
||||
)
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
@@ -273,6 +289,10 @@ def ci_rebuild(args):
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
@@ -402,11 +422,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -663,7 +678,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=sign_binaries,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
@@ -707,7 +722,7 @@ def ci_rebuild(args):
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -733,8 +748,18 @@ def ci_reproduce(args):
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
elif args.gpg_url:
|
||||
gpg_key_url = args.gpg_url
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -114,18 +115,14 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg("Removing all temporary build stages")
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug("Removing {0}".format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
|
@@ -36,13 +36,13 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
|
@@ -331,6 +331,17 @@ def tags():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def namespaces():
|
||||
return Args(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def jobs():
|
||||
return Args(
|
||||
|
@@ -17,6 +17,7 @@
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
@@ -832,13 +833,15 @@ def get_versions(args, name):
|
||||
version = parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
@@ -912,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
|
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
|
@@ -133,9 +133,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +144,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -239,7 +239,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
@@ -67,7 +67,7 @@ def setup_parser(subparser):
|
||||
help="do not group specs by arch/compiler",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
@@ -140,9 +140,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N", "--namespace", action="store_true", help="show fully qualified package names"
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -230,7 +227,7 @@ def display_env(env, args, decorator, results):
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -271,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
@@ -474,7 +474,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
@@ -309,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -321,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -376,7 +377,7 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -73,13 +73,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
|
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -67,13 +67,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
|
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
|
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
@@ -2,13 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
@@ -119,108 +115,3 @@ def c23_flag(self):
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||
|
||||
On macOS, not all buildsystems support querying CC and CXX for the
|
||||
compilers to use and instead query the Xcode toolchain for what
|
||||
compiler to run. This side-steps the spack wrappers. In order to inject
|
||||
spack into this setup, we need to copy (a subset of) Xcode.app and
|
||||
replace the compiler executables with symlinks to the spack wrapper.
|
||||
Currently, the stage is used to store the Xcode.app copies. We then set
|
||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||
related tools to use this Xcode.app.
|
||||
"""
|
||||
super().setup_custom_environment(pkg, env)
|
||||
|
||||
if not pkg.use_xcode:
|
||||
# if we do it for all packages, we get into big troubles with MPI:
|
||||
# filter_compilers(self) will use mockup XCode compilers on macOS
|
||||
# with Clang. Those point to Spack's compiler wrappers and
|
||||
# consequently render MPI non-functional outside of Spack.
|
||||
return
|
||||
|
||||
# Use special XCode versions of compiler wrappers when using XCode
|
||||
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
|
||||
xcrun = spack.util.executable.Executable("xcrun")
|
||||
xcode_clang = xcrun("-f", "clang", output=str).strip()
|
||||
xcode_clangpp = xcrun("-f", "clang++", output=str).strip()
|
||||
env.set("SPACK_CC", xcode_clang, force=True)
|
||||
env.set("SPACK_CXX", xcode_clangpp, force=True)
|
||||
|
||||
xcode_select = spack.util.executable.Executable("xcode-select")
|
||||
|
||||
# Get the path of the active developer directory
|
||||
real_root = xcode_select("--print-path", output=str).strip()
|
||||
|
||||
# The path name can be used to determine whether the full Xcode suite
|
||||
# or just the command-line tools are installed
|
||||
if real_root.endswith("Developer"):
|
||||
# The full Xcode suite is installed
|
||||
pass
|
||||
else:
|
||||
if real_root.endswith("CommandLineTools"):
|
||||
# Only the command-line tools are installed
|
||||
msg = "It appears that you have the Xcode command-line tools "
|
||||
msg += "but not the full Xcode suite installed.\n"
|
||||
|
||||
else:
|
||||
# Xcode is not installed
|
||||
msg = "It appears that you do not have Xcode installed.\n"
|
||||
|
||||
msg += "In order to use Spack to build the requested application, "
|
||||
msg += "you need the full Xcode suite. It can be installed "
|
||||
msg += "through the App Store. Make sure you launch the "
|
||||
msg += "application and accept the license agreement.\n"
|
||||
|
||||
raise OSError(msg)
|
||||
|
||||
real_root = os.path.dirname(os.path.dirname(real_root))
|
||||
developer_root = os.path.join(
|
||||
spack.stage.get_stage_root(), "xcode-select", self.name, str(self.version)
|
||||
)
|
||||
xcode_link = os.path.join(developer_root, "Xcode.app")
|
||||
|
||||
if not os.path.exists(developer_root):
|
||||
tty.warn(
|
||||
"Copying Xcode from %s to %s in order to add spack "
|
||||
"wrappers to it. Please do not interrupt." % (real_root, developer_root)
|
||||
)
|
||||
|
||||
# We need to make a new Xcode.app instance, but with symlinks to
|
||||
# the spack wrappers for the compilers it ships. This is necessary
|
||||
# because some projects insist on just asking xcrun and related
|
||||
# tools where the compiler runs. These tools are very hard to trick
|
||||
# as they do realpath and end up ignoring the symlinks in a
|
||||
# "softer" tree of nothing but symlinks in the right places.
|
||||
shutil.copytree(
|
||||
real_root,
|
||||
developer_root,
|
||||
symlinks=True,
|
||||
ignore=shutil.ignore_patterns(
|
||||
"AppleTV*.platform",
|
||||
"Watch*.platform",
|
||||
"iPhone*.platform",
|
||||
"Documentation",
|
||||
"swift*",
|
||||
),
|
||||
)
|
||||
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
for real_dir in real_dirs:
|
||||
dev_dir = os.path.join(developer_root, "Contents", "Developer", real_dir)
|
||||
for fname in os.listdir(dev_dir):
|
||||
if fname in bins:
|
||||
os.unlink(os.path.join(dev_dir, fname))
|
||||
symlink(
|
||||
os.path.join(spack.paths.build_env_path, "cc"),
|
||||
os.path.join(dev_dir, fname),
|
||||
)
|
||||
|
||||
symlink(developer_root, xcode_link)
|
||||
|
||||
env.set("DEVELOPER_DIR", xcode_link)
|
||||
|
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
@@ -90,7 +90,7 @@ def spec_from_entry(entry):
|
||||
name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(entry["name"])
|
||||
|
||||
if "parameters" in entry:
|
||||
variant_strs = list()
|
||||
|
@@ -21,10 +21,11 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, List, NamedTuple, Set, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -141,22 +142,23 @@ class InstallStatuses:
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
elif query_arg is False:
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
elif query_arg is any:
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
elif isinstance(query_arg, InstallStatus):
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
else:
|
||||
try: # Try block catches if it is not an iterable at all
|
||||
if any(type(x) != InstallStatus for x in query_arg):
|
||||
raise TypeError
|
||||
except TypeError:
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
return query_arg
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -306,15 +308,16 @@ def __reduce__(self):
|
||||
|
||||
"""
|
||||
|
||||
#: Data class to configure locks in Database objects
|
||||
#:
|
||||
#: Args:
|
||||
#: enable (bool): whether to enable locks or not.
|
||||
#: database_timeout (int or None): timeout for the database lock
|
||||
#: package_timeout (int or None): timeout for the package lock
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
Args:
|
||||
enable: whether to enable locks or not.
|
||||
database_timeout: timeout for the database lock
|
||||
package_timeout: timeout for the package lock
|
||||
"""
|
||||
|
||||
enable: bool
|
||||
database_timeout: Optional[int]
|
||||
package_timeout: Optional[int]
|
||||
@@ -348,13 +351,230 @@ def lock_configuration(configuration):
|
||||
)
|
||||
|
||||
|
||||
def prefix_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the prefix lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_lock"
|
||||
|
||||
|
||||
def failures_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the failures lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_failures"
|
||||
|
||||
|
||||
class SpecLocker:
|
||||
"""Manages acquiring and releasing read or write locks on concrete specs."""
|
||||
|
||||
def __init__(self, lock_path: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
self.lock_path = pathlib.Path(lock_path)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
# Maps (spec.dag_hash(), spec.name) to the corresponding lock object
|
||||
self.locks: Dict[Tuple[str, str], lk.Lock] = {}
|
||||
|
||||
def lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a lock on a concrete spec.
|
||||
|
||||
The lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``self.lock_path``.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes likelihood of collision is
|
||||
very low AND it gives us readers-writer lock semantics with just a single lockfile, so
|
||||
no cleanup required.
|
||||
"""
|
||||
assert spec.concrete, "cannot lock a non-concrete spec"
|
||||
timeout = timeout or self.default_timeout
|
||||
key = self._lock_key(spec)
|
||||
|
||||
if key not in self.locks:
|
||||
self.locks[key] = self.raw_lock(spec, timeout=timeout)
|
||||
else:
|
||||
self.locks[key].default_timeout = timeout
|
||||
|
||||
return self.locks[key]
|
||||
|
||||
def raw_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a raw lock for a Spec, but doesn't keep track of it."""
|
||||
return lk.Lock(
|
||||
str(self.lock_path),
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
def has_lock(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Returns True if the spec is already managed by this spec locker"""
|
||||
return self._lock_key(spec) in self.locks
|
||||
|
||||
def _lock_key(self, spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
return (spec.dag_hash(), spec.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_lock(self, spec: "spack.spec.Spec") -> Generator["SpecLocker", None, None]:
|
||||
lock = self.lock(spec)
|
||||
lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
lock.release_write()
|
||||
raise
|
||||
else:
|
||||
lock.release_write()
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec") -> Tuple[bool, Optional[lk.Lock]]:
|
||||
key = self._lock_key(spec)
|
||||
lock = self.locks.pop(key, None)
|
||||
return bool(lock), lock
|
||||
|
||||
def clear_all(self, clear_fn: Optional[Callable[[lk.Lock], Any]] = None) -> None:
|
||||
if clear_fn is not None:
|
||||
for lock in self.locks.values():
|
||||
clear_fn(lock)
|
||||
self.locks.clear()
|
||||
|
||||
|
||||
class FailureTracker:
|
||||
"""Tracks installation failures.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file lives alongside the install DB.
|
||||
|
||||
``n`` is the sys.maxsize-bit prefix of the associated DAG hash to make
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a failure lock
|
||||
exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
locked = self.lock_taken(spec)
|
||||
if locked and not force:
|
||||
tty.msg(f"Retaining failure marking for {spec.name} due to lock")
|
||||
return
|
||||
|
||||
if locked:
|
||||
tty.warn(f"Removing failure marking despite lock for {spec.name}")
|
||||
|
||||
succeeded, lock = self.locker.clear(spec)
|
||||
if succeeded and lock is not None:
|
||||
lock.release_write()
|
||||
|
||||
if self.persistent_mark(spec):
|
||||
path = self._path(spec)
|
||||
tty.debug(f"Removing failure marking for {spec.name}")
|
||||
try:
|
||||
path.unlink()
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
f"Unable to remove failure marking for {spec.name} ({str(path)}): {str(err)}"
|
||||
)
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
self.locker.clear_all(
|
||||
clear_fn=lambda x: x.release_write() if x.is_write_locked() else True
|
||||
)
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
if not self.locker.has_lock(spec):
|
||||
try:
|
||||
mark = self.locker.lock(spec)
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(f"PID {os.getpid()} failed to mark install failure for {spec.name}")
|
||||
tty.warn(f"Unable to mark {spec.name} as failed.")
|
||||
|
||||
return self.locker.lock(spec)
|
||||
|
||||
def has_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the spec is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if self.locker.has_lock(spec):
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.lock_taken(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.persistent_mark(spec)
|
||||
|
||||
def lock_taken(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if another process has a failure lock on the spec."""
|
||||
check = self.locker.raw_lock(spec)
|
||||
return check.is_write_locked()
|
||||
|
||||
def persistent_mark(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return self._path(spec).exists()
|
||||
|
||||
def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
assert spec.concrete, "concrete spec required for failure path"
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class Database:
|
||||
#: Per-process lock objects for each install prefix
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Per-process failure (lock) objects for each install prefix
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
|
||||
@@ -392,24 +612,10 @@ def __init__(
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# This is for other classes to use to lock prefix directories.
|
||||
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
|
||||
|
||||
# Ensure a persistent location for dealing with parallel installation
|
||||
# failures (e.g., across near-concurrent processes).
|
||||
self._failure_dir = os.path.join(self.database_directory, "failures")
|
||||
|
||||
# Support special locks for handling parallel installation failures
|
||||
# of a spec.
|
||||
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
# Failed write transactions (interrupted by exceptions) will alert
|
||||
@@ -423,15 +629,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
self.package_lock_timeout = lock_cfg.package_timeout
|
||||
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
timeout_format_str = (
|
||||
"{0}s".format(str(self.package_lock_timeout))
|
||||
if self.package_lock_timeout
|
||||
else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -471,212 +669,6 @@ def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
if not spec.concrete:
|
||||
raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
for pkg_id in list(self._prefix_failures.keys()):
|
||||
lock = self._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
# Remove all failure markings (aka files)
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
for fail_mark in os.listdir(self._failure_dir):
|
||||
try:
|
||||
os.remove(os.path.join(self._failure_dir, fail_mark))
|
||||
except OSError as exc:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
|
||||
)
|
||||
|
||||
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""
|
||||
Remove any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark_failed()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a prefix failure
|
||||
lock exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
failure_locked = self.prefix_failure_locked(spec)
|
||||
if failure_locked and not force:
|
||||
tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
|
||||
return
|
||||
|
||||
if failure_locked:
|
||||
tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
|
||||
|
||||
lock = self._prefix_failures.pop(spec.prefix, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
if self.prefix_failure_marked(spec):
|
||||
try:
|
||||
path = self._failed_spec_path(spec)
|
||||
tty.debug("Removing failure marking for {0}".format(spec.name))
|
||||
os.remove(path)
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking for {0} ({1}): {2}".format(
|
||||
spec.name, path, str(err)
|
||||
)
|
||||
)
|
||||
|
||||
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""
|
||||
Mark a spec as failing to install.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
|
||||
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
|
||||
associated DAG hash to make the likelihood of collision very low with
|
||||
no cleanup required.
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._failed_spec_path(spec)
|
||||
with open(path, "w") as f:
|
||||
spec.to_json(f)
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
err = "Unable to mark {0.name} as failed."
|
||||
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_failures:
|
||||
mark = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
try:
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(
|
||||
"PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
|
||||
)
|
||||
tty.warn(err.format(spec))
|
||||
|
||||
# Whether we or another process marked it as a failure, track it
|
||||
# as such locally.
|
||||
self._prefix_failures[prefix] = mark
|
||||
|
||||
return self._prefix_failures[prefix]
|
||||
|
||||
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the prefix (installation) is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if spec.prefix in self._prefix_failures:
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.prefix_failure_locked(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.prefix_failure_marked(spec)
|
||||
|
||||
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if a process has a failure lock on the spec."""
|
||||
check = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
return check.is_write_locked()
|
||||
|
||||
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return os.path.exists(self._failed_spec_path(spec))
|
||||
|
||||
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Get a lock on a particular spec's installation directory.
|
||||
|
||||
NOTE: The installation directory **does not** need to exist.
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
likelihood of collision is very low AND it gives us
|
||||
readers-writer lock semantics with just a single lockfile, so no
|
||||
cleanup required.
|
||||
"""
|
||||
timeout = timeout or self.package_lock_timeout
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_locks:
|
||||
self._prefix_locks[prefix] = lk.Lock(
|
||||
self.prefix_lock_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
elif timeout != self._prefix_locks[prefix].default_timeout:
|
||||
self._prefix_locks[prefix].default_timeout = timeout
|
||||
|
||||
return self._prefix_locks[prefix]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_read_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_read()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_read()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_read()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_write_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_write()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_write()
|
||||
|
||||
def _write_to_file(self, stream):
|
||||
"""Write out the database in JSON format to the stream passed
|
||||
as argument.
|
||||
|
@@ -33,7 +33,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Optional, Set, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -520,7 +520,8 @@ def _execute_conflicts(pkg):
|
||||
|
||||
# Save in a list the conflicts and the associated custom messages
|
||||
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
|
||||
when_spec_list.append((when_spec, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, msg_with_name))
|
||||
|
||||
return _execute_conflicts
|
||||
|
||||
@@ -663,39 +664,35 @@ def _execute_patch(pkg_or_dep):
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name,
|
||||
default=None,
|
||||
description="",
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None,
|
||||
sticky=False,
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
validator: Optional[Callable[[str, str, Tuple[Any, ...]], None]] = None,
|
||||
when: Optional[Union[str, bool]] = None,
|
||||
sticky: bool = False,
|
||||
):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
"""Define a variant for the package.
|
||||
|
||||
Packager can specify a default value as well as a text description.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant
|
||||
default (str or bool): default value for the variant, if not
|
||||
specified otherwise the default will be False for a boolean
|
||||
variant and 'nothing' for a multi-valued variant
|
||||
description (str): description of the purpose of the variant
|
||||
values (tuple or typing.Callable): either a tuple of strings containing the
|
||||
allowed values, or a callable accepting one value and returning
|
||||
True if it is valid
|
||||
multi (bool): if False only one value per spec is allowed for
|
||||
this variant
|
||||
validator (typing.Callable): optional group validator to enforce additional
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
sticky (bool): the variant should not be changed by the concretizer to
|
||||
find a valid concrete spec.
|
||||
name: Name of the variant
|
||||
default: Default value for the variant, if not specified otherwise the default will be
|
||||
False for a boolean variant and 'nothing' for a multi-valued variant
|
||||
description: Description of the purpose of the variant
|
||||
values: Either a tuple of strings containing the allowed values, or a callable accepting
|
||||
one value and returning True if it is valid
|
||||
multi: If False only one value per spec is allowed for this variant
|
||||
validator: Optional group validator to enforce additional logic. It receives the package
|
||||
name, the variant name and a tuple of values and should raise an instance of SpackError
|
||||
if the group doesn't meet the additional constraints
|
||||
when: Optional condition on which the variant applies
|
||||
sticky: The variant should not be changed by the concretizer to find a valid concrete spec
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
def format_error(msg, pkg):
|
||||
@@ -900,7 +897,8 @@ def _execute_requires(pkg):
|
||||
|
||||
# Save in a list the requirements and the associated custom messages
|
||||
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
|
||||
when_spec_list.append((when_spec, policy, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, policy, msg_with_name))
|
||||
|
||||
return _execute_requires
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -203,7 +203,7 @@ def activate(env, use_env_repo=False):
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(env.repo)
|
||||
spack.repo.PATH.put_first(env.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % env.name)
|
||||
|
||||
@@ -227,7 +227,7 @@ def deactivate():
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
spack.repo.path.remove(_active_environment._repo)
|
||||
spack.repo.PATH.remove(_active_environment._repo)
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
|
||||
@@ -1084,8 +1084,8 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
if list_name == user_speclist_name:
|
||||
if spec.anonymous:
|
||||
raise SpackEnvironmentError("cannot add anonymous specs to an environment")
|
||||
elif not spack.repo.path.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.path.provider_index.providers.keys()
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -1262,7 +1262,7 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
@@ -1490,7 +1490,7 @@ def _concretize_separately(self, tests=False):
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.path.provider_index
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
@@ -1921,16 +1921,17 @@ def install_specs(self, specs=None, **install_args):
|
||||
"Could not install log links for {0}: {1}".format(spec.name, str(e))
|
||||
)
|
||||
|
||||
def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
specs = [s for s in traverse.traverse_nodes(roots, key=traverse.by_dag_hash)]
|
||||
specs.sort()
|
||||
return specs
|
||||
def all_specs_generator(self) -> Iterable[Spec]:
|
||||
"""Returns a generator for all concrete specs"""
|
||||
return traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
|
||||
def all_specs(self) -> List[Spec]:
|
||||
"""Returns a list of all concrete specs"""
|
||||
return list(self.all_specs_generator())
|
||||
|
||||
def all_hashes(self):
|
||||
"""Return hashes of all specs."""
|
||||
return [s.dag_hash() for s in self.all_specs()]
|
||||
return [s.dag_hash() for s in self.all_specs_generator()]
|
||||
|
||||
def roots(self):
|
||||
"""Specs explicitly requested by the user *in this environment*.
|
||||
@@ -2279,7 +2280,7 @@ def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec_node, pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
def manifest_uptodate_or_warn(self):
|
||||
"""Emits a warning if the manifest file is not up-to-date."""
|
||||
|
@@ -45,6 +45,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
@@ -1540,7 +1541,7 @@ def for_package_version(pkg, version=None):
|
||||
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
version.attach_git_lookup_from_package(pkg.name)
|
||||
version.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(pkg.name))
|
||||
|
||||
# For GitVersion, we have no way to determine whether a ref is a branch or tag
|
||||
# Fortunately, we handle branches and tags identically, except tags are
|
||||
|
@@ -535,7 +535,7 @@ def edge_entry(self, edge):
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
|
@@ -49,7 +49,7 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
@@ -1147,12 +1147,12 @@ def write_test_result(self, spec, result):
|
||||
def write_reproducibility_data(self):
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.path.dump_provenance(spec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
for vspec in spec.package.virtuals_provided:
|
||||
repo_cache_path = self.stage.repo.join(vspec.name)
|
||||
if not os.path.exists(repo_cache_path):
|
||||
try:
|
||||
spack.repo.path.dump_provenance(vspec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(vspec, repo_cache_path)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass # not all virtuals have package files
|
||||
|
||||
|
@@ -519,13 +519,6 @@ def _try_install_from_binary_cache(
|
||||
)
|
||||
|
||||
|
||||
def clear_failures() -> None:
|
||||
"""
|
||||
Remove all failure tracking markers for the Spack instance.
|
||||
"""
|
||||
spack.store.STORE.db.clear_all_failures()
|
||||
|
||||
|
||||
def combine_phase_logs(phase_log_files: List[str], log_path: str) -> None:
|
||||
"""
|
||||
Read set or list of logs and combine them into one file.
|
||||
@@ -597,7 +590,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
if node is spec:
|
||||
spack.repo.path.dump_provenance(node, dest_pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
|
||||
@@ -1126,15 +1119,13 @@ class PackageInstaller:
|
||||
instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []):
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
Return:
|
||||
PackageInstaller: instance
|
||||
"""
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
@@ -1287,7 +1278,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
dep_id = package_id(dep_pkg)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.db.prefix_failed(dep):
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
@@ -1502,7 +1493,7 @@ def _ensure_locked(
|
||||
if lock is None:
|
||||
tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.db.prefix_lock(pkg.spec, timeout)
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
@@ -1627,12 +1618,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.db.clear_failure(dep, force=False)
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.STORE.db.clear_failure(request.spec, force=True)
|
||||
spack.store.STORE.failure_tracker.clear(request.spec, force=True)
|
||||
|
||||
# If not installing dependencies, then determine their
|
||||
# installation status before proceeding
|
||||
@@ -1888,7 +1879,7 @@ def _update_failed(
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.db.mark_failed(task.pkg.spec)
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
@@ -2074,7 +2065,7 @@ def install(self) -> None:
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.db.prefix_failed(spec):
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
self._update_failed(task)
|
||||
|
@@ -605,7 +605,7 @@ def setup_main_options(args):
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.config)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
|
@@ -442,7 +442,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
if spec:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
versions = pkg_cls.versions.get(spec.version, {})
|
||||
ext = versions.get("extension", None)
|
||||
# If the spec does not explicitly specify an extension (the default case),
|
||||
@@ -474,7 +474,7 @@ def get_all_versions(specs):
|
||||
"""
|
||||
version_specs = []
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# Skip any package that has no known versions.
|
||||
if not pkg_cls.versions:
|
||||
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
||||
|
@@ -143,7 +143,7 @@ def hierarchy_tokens(self):
|
||||
|
||||
# Check if all the tokens in the hierarchy are virtual specs.
|
||||
# If not warn the user and raise an error.
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.path.is_virtual(t)]
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.PATH.is_virtual(t)]
|
||||
if not_virtual:
|
||||
msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n"
|
||||
msg += "Please check the 'modules.yaml' configuration files"
|
||||
|
@@ -514,6 +514,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
# These are default values for instance variables.
|
||||
#
|
||||
|
||||
# Declare versions dictionary as placeholder for values.
|
||||
# This allows analysis tools to correctly interpret the class attributes.
|
||||
versions: dict
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
virtual = False
|
||||
@@ -528,10 +532,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: By default do not run tests within package's install()
|
||||
run_tests = False
|
||||
|
||||
# FIXME: this is a bad object-oriented design, should be moved to Clang.
|
||||
#: By default do not setup mockup XCode on macOS with Clang
|
||||
use_xcode = False
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror: Optional[str] = None
|
||||
@@ -665,7 +665,7 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
|
||||
super().__init__()
|
||||
@@ -728,11 +728,11 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
if spack.repo.path.is_virtual(name):
|
||||
if spack.repo.PATH.is_virtual(name):
|
||||
if virtuals is not None:
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.path.providers_for(name)
|
||||
providers = spack.repo.PATH.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.setdefault(cls.name, set()).add(name)
|
||||
@@ -756,7 +756,7 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
try:
|
||||
dep_cls = spack.repo.path.get_pkg_class(dep_name)
|
||||
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# log unknown packages
|
||||
missing.setdefault(cls.name, set()).add(dep_name)
|
||||
@@ -2209,7 +2209,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
pkg = None
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.STORE.db.prefix_write_lock(spec):
|
||||
with spack.store.STORE.prefix_locker.write_lock(spec):
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -2459,8 +2459,8 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
if not isinstance(pos, spack.spec.Spec):
|
||||
pos = spack.spec.Spec(pos)
|
||||
|
||||
if spack.repo.path.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.path.providers_for(pos.name))
|
||||
if spack.repo.PATH.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||
continue
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
@@ -147,7 +147,7 @@ def preferred_variants(cls, pkg_name):
|
||||
variants = " ".join(variants)
|
||||
|
||||
# Only return variants that are actually supported by the package
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||
return dict(
|
||||
(name, variant) for name, variant in spec.variants.items() if name in pkg_cls.variants
|
||||
@@ -162,7 +162,7 @@ def spec_externals(spec):
|
||||
from spack.util.module_cmd import path_from_modules # noqa: F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
@@ -199,7 +199,7 @@ def is_spec_buildable(spec):
|
||||
so_far = all_buildable # the default "so far"
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# check whether any providers for this package override the default
|
||||
|
@@ -238,7 +238,7 @@ def to_dict(self):
|
||||
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
repository = repository or spack.repo.PATH
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
|
@@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict, Union
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
@@ -149,7 +149,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's
|
||||
# namespace, let the repo handle it.
|
||||
for repo in path.repos:
|
||||
for repo in PATH.repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
if namespace == repo.full_namespace:
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
@@ -163,7 +163,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if path.by_namespace.is_prefix(fullname):
|
||||
if PATH.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -184,9 +184,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.path.get_repo("builtin.mock").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.path.get_repo("builtin").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -282,7 +282,7 @@ def add_package_to_git_stage(packages):
|
||||
git = GitExe()
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
@@ -422,7 +422,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
def last_mtime(self):
|
||||
return max(sinfo.st_mtime for sinfo in self._packages_to_stats.values())
|
||||
|
||||
def modified_since(self, since):
|
||||
def modified_since(self, since: float) -> List[str]:
|
||||
return [name for name, sinfo in self._packages_to_stats.items() if sinfo.st_mtime > since]
|
||||
|
||||
def __getitem__(self, item):
|
||||
@@ -548,35 +548,34 @@ class RepoIndex:
|
||||
when they're needed.
|
||||
|
||||
``Indexers`` should be added to the ``RepoIndex`` using
|
||||
``add_index(name, indexer)``, and they should support the interface
|
||||
``add_indexer(name, indexer)``, and they should support the interface
|
||||
defined by ``Indexer``, so that the ``RepoIndex`` can read, generate,
|
||||
and update stored indices.
|
||||
|
||||
Generated indexes are accessed by name via ``__getitem__()``.
|
||||
Generated indexes are accessed by name via ``__getitem__()``."""
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, package_checker, namespace, cache):
|
||||
def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = spack.util.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers = {}
|
||||
self.indexes = {}
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
self.indexes: Dict[str, Any] = {}
|
||||
self.cache = cache
|
||||
|
||||
def add_indexer(self, name, indexer):
|
||||
def add_indexer(self, name: str, indexer: Indexer):
|
||||
"""Add an indexer to the repo index.
|
||||
|
||||
Arguments:
|
||||
name (str): name of this indexer
|
||||
|
||||
indexer (object): an object that supports create(), read(),
|
||||
write(), and get_index() operations
|
||||
|
||||
"""
|
||||
name: name of this indexer
|
||||
indexer: object implementing the ``Indexer`` interface"""
|
||||
self.indexers[name] = indexer
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -597,17 +596,15 @@ def _build_all_indexes(self):
|
||||
because the main bottleneck here is loading all the packages. It
|
||||
can take tens of seconds to regenerate sequentially, and we'd
|
||||
rather only pay that cost once rather than on several
|
||||
invocations.
|
||||
|
||||
"""
|
||||
invocations."""
|
||||
for name, indexer in self.indexers.items():
|
||||
self.indexes[name] = self._build_index(name, indexer)
|
||||
|
||||
def _build_index(self, name, indexer):
|
||||
def _build_index(self, name: str, indexer: Indexer):
|
||||
"""Determine which packages need an update, and update indexes."""
|
||||
|
||||
# Filename of the provider index cache (we assume they're all json)
|
||||
cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
|
||||
cache_filename = f"{name}/{self.namespace}-index.json"
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
index_mtime = self.cache.mtime(cache_filename)
|
||||
@@ -631,8 +628,7 @@ def _build_index(self, name, indexer):
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
for pkg_name in needs_update:
|
||||
namespaced_name = "%s.%s" % (self.namespace, pkg_name)
|
||||
indexer.update(namespaced_name)
|
||||
indexer.update(f"{self.namespace}.{pkg_name}")
|
||||
|
||||
indexer.write(new)
|
||||
|
||||
@@ -1378,7 +1374,7 @@ def create(configuration):
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
path: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
@@ -1387,7 +1383,7 @@ def create(configuration):
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||
return path.all_package_names(include_virtuals)
|
||||
return PATH.all_package_names(include_virtuals)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -1402,7 +1398,7 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
global PATH
|
||||
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||
override = kwargs.get("override", True)
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
@@ -1411,12 +1407,12 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
spack.config.config.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
path, saved = create(configuration=spack.config.config), path
|
||||
PATH, saved = create(configuration=spack.config.config), PATH
|
||||
try:
|
||||
yield path
|
||||
yield PATH
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
path = saved
|
||||
PATH = saved
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"""Schema for a buildcache spec.yaml file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
|
||||
:lines: 14-
|
||||
:lines: 13-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
@@ -16,15 +16,8 @@
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"buildinfo": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["relative_prefix"],
|
||||
"properties": {
|
||||
"relative_prefix": {"type": "string"},
|
||||
"relative_rpaths": {"type": "boolean"},
|
||||
},
|
||||
},
|
||||
# `buildinfo` is no longer needed as of Spack 0.21
|
||||
"buildinfo": {"type": "object"},
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
|
@@ -48,6 +48,7 @@
|
||||
import spack.util.timer
|
||||
import spack.variant
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
# these are from clingo.ast and bootstrapped later
|
||||
ASTType = None
|
||||
@@ -236,7 +237,7 @@ def listify(args):
|
||||
|
||||
def packagize(pkg):
|
||||
if isinstance(pkg, str):
|
||||
return spack.repo.path.get_pkg_class(pkg)
|
||||
return spack.repo.PATH.get_pkg_class(pkg)
|
||||
else:
|
||||
return pkg
|
||||
|
||||
@@ -341,7 +342,7 @@ def extend_flag_list(flag_list, new_flags):
|
||||
|
||||
def check_packages_exist(specs):
|
||||
"""Ensure all packages mentioned in specs exist."""
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
for spec in specs:
|
||||
for s in spec.traverse():
|
||||
try:
|
||||
@@ -528,7 +529,7 @@ def _compute_specs_from_answer_set(self):
|
||||
def _normalize_packages_yaml(packages_yaml):
|
||||
normalized_yaml = copy.copy(packages_yaml)
|
||||
for pkg_name in packages_yaml:
|
||||
is_virtual = spack.repo.path.is_virtual(pkg_name)
|
||||
is_virtual = spack.repo.PATH.is_virtual(pkg_name)
|
||||
if pkg_name == "all" or not is_virtual:
|
||||
continue
|
||||
|
||||
@@ -536,7 +537,7 @@ def _normalize_packages_yaml(packages_yaml):
|
||||
data = normalized_yaml.pop(pkg_name)
|
||||
is_buildable = data.get("buildable", True)
|
||||
if not is_buildable:
|
||||
for provider in spack.repo.path.providers_for(pkg_name):
|
||||
for provider in spack.repo.PATH.providers_for(pkg_name):
|
||||
entry = normalized_yaml.setdefault(provider.name, {})
|
||||
entry["buildable"] = False
|
||||
|
||||
@@ -955,8 +956,8 @@ def target_ranges(self, spec, single_target_fn):
|
||||
return [fn.attr("node_target_satisfies", spec.name, target)]
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
default_msg = "{0} '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0} conflicts with '{1}'"
|
||||
default_msg = "{0}: '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0}: conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
@@ -1415,7 +1416,7 @@ def external_packages(self):
|
||||
continue
|
||||
|
||||
# This package does not appear in any repository
|
||||
if pkg_name not in spack.repo.path:
|
||||
if pkg_name not in spack.repo.PATH:
|
||||
continue
|
||||
|
||||
self.gen.h2("External package: {0}".format(pkg_name))
|
||||
@@ -1597,7 +1598,7 @@ class Body:
|
||||
if not spec.concrete:
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
try:
|
||||
variant_def, _ = pkg_cls.variants[vname]
|
||||
except KeyError:
|
||||
@@ -1605,7 +1606,7 @@ class Body:
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(
|
||||
variant, spack.repo.path.get_pkg_class(spec.name)
|
||||
variant, spack.repo.PATH.get_pkg_class(spec.name)
|
||||
)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
@@ -1677,7 +1678,7 @@ class Body:
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
# Try to look up the package of the same name and use its
|
||||
# providers. This is as good as we can do without edge info.
|
||||
pkg_class = spack.repo.path.get_pkg_class(dep.name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
spec = spack.spec.Spec(f"{dep.name}@{dep.version}")
|
||||
pkg = pkg_class(spec)
|
||||
|
||||
@@ -1723,7 +1724,7 @@ def build_version_dict(self, possible_pkgs):
|
||||
packages_yaml = spack.config.get("packages")
|
||||
packages_yaml = _normalize_packages_yaml(packages_yaml)
|
||||
for pkg_name in possible_pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# All the versions from the corresponding package.py file. Since concepts
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
@@ -1754,7 +1755,7 @@ def key_fn(item):
|
||||
# specs will be computed later
|
||||
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
|
||||
version_defs = []
|
||||
pkg_class = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for vstr in version_preferences:
|
||||
v = vn.ver(vstr)
|
||||
if isinstance(v, vn.GitVersion):
|
||||
@@ -2052,7 +2053,7 @@ def define_virtual_constraints(self):
|
||||
# aggregate constraints into per-virtual sets
|
||||
constraint_map = collections.defaultdict(lambda: set())
|
||||
for pkg_name, versions in self.version_constraints:
|
||||
if not spack.repo.path.is_virtual(pkg_name):
|
||||
if not spack.repo.PATH.is_virtual(pkg_name):
|
||||
continue
|
||||
constraint_map[pkg_name].add(versions)
|
||||
|
||||
@@ -2140,7 +2141,7 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
self.reusable_and_possible[h] = spec
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.path.get(spec)
|
||||
spack.repo.PATH.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
@@ -2365,7 +2366,7 @@ def _specs_from_requires(self, pkg_name, section):
|
||||
# Prefer spec's name if it exists, in case the spec is
|
||||
# requiring a specific implementation inside of a virtual section
|
||||
# e.g. packages:mpi:require:openmpi@4.0.1
|
||||
pkg_class = spack.repo.path.get_pkg_class(spec.name or pkg_name)
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(spec.name or pkg_name)
|
||||
satisfying_versions = self._check_for_defined_matching_versions(
|
||||
pkg_class, spec.versions
|
||||
)
|
||||
@@ -2620,7 +2621,7 @@ def build_specs(self, function_tuples):
|
||||
# predicates on virtual packages.
|
||||
if name != "error":
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
if spack.repo.PATH.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
@@ -2638,7 +2639,7 @@ def build_specs(self, function_tuples):
|
||||
for spec in self._specs.values():
|
||||
if spec.namespace:
|
||||
continue
|
||||
repo = spack.repo.path.repo_for_pkg(spec)
|
||||
repo = spack.repo.PATH.repo_for_pkg(spec)
|
||||
spec.namespace = repo.namespace
|
||||
|
||||
# fix flags after all specs are constructed
|
||||
@@ -2672,7 +2673,9 @@ def build_specs(self, function_tuples):
|
||||
for root in self._specs.values():
|
||||
for spec in root.traverse():
|
||||
if isinstance(spec.version, vn.GitVersion):
|
||||
spec.version.attach_git_lookup_from_package(spec.fullname)
|
||||
spec.version.attach_lookup(
|
||||
spack.version.git_ref_lookup.GitRefLookup(spec.fullname)
|
||||
)
|
||||
|
||||
return self._specs
|
||||
|
||||
|
@@ -56,7 +56,7 @@
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
from typing import Tuple, Union
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
@@ -88,6 +88,7 @@
|
||||
import spack.util.string
|
||||
import spack.variant as vt
|
||||
import spack.version as vn
|
||||
import spack.version.git_ref_lookup
|
||||
|
||||
__all__ = [
|
||||
"CompilerSpec",
|
||||
@@ -1298,7 +1299,7 @@ def __init__(self, spec, name, query_parameters):
|
||||
original_spec = getattr(spec, "wrapped_obj", spec)
|
||||
self.wrapped_obj = original_spec
|
||||
self.token = original_spec, name, query_parameters
|
||||
is_virtual = spack.repo.path.is_virtual(name)
|
||||
is_virtual = spack.repo.PATH.is_virtual(name)
|
||||
self.last_query = QueryState(
|
||||
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
|
||||
)
|
||||
@@ -1732,7 +1733,7 @@ def package(self):
|
||||
self.name
|
||||
)
|
||||
if not self._package:
|
||||
self._package = spack.repo.path.get(self)
|
||||
self._package = spack.repo.PATH.get(self)
|
||||
return self._package
|
||||
|
||||
@property
|
||||
@@ -1740,11 +1741,11 @@ def package_class(self):
|
||||
"""Internal package call gets only the class object for a package.
|
||||
Use this to just get package metadata.
|
||||
"""
|
||||
return spack.repo.path.get_pkg_class(self.fullname)
|
||||
return spack.repo.PATH.get_pkg_class(self.fullname)
|
||||
|
||||
@property
|
||||
def virtual(self):
|
||||
return spack.repo.path.is_virtual(self.name)
|
||||
return spack.repo.PATH.is_virtual(self.name)
|
||||
|
||||
@property
|
||||
def concrete(self):
|
||||
@@ -2271,7 +2272,7 @@ def override(init_spec, change_spec):
|
||||
# TODO: this doesn't account for the case where the changed spec
|
||||
# (and the user spec) have dependencies
|
||||
new_spec = init_spec.copy()
|
||||
package_cls = spack.repo.path.get_pkg_class(new_spec.name)
|
||||
package_cls = spack.repo.PATH.get_pkg_class(new_spec.name)
|
||||
if change_spec.versions and not change_spec.versions == vn.any_version:
|
||||
new_spec.versions = change_spec.versions
|
||||
for variant, value in change_spec.variants.items():
|
||||
@@ -2545,7 +2546,7 @@ def validate_detection(self):
|
||||
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
|
||||
|
||||
# Validate the spec calling a package specific method
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
|
||||
validate_fn(self, self.extra_attributes)
|
||||
|
||||
@@ -2644,7 +2645,7 @@ def _expand_virtual_packages(self, concretizer):
|
||||
"""
|
||||
# Make an index of stuff this spec already provides
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=self.traverse(), restrict=True
|
||||
)
|
||||
changed = False
|
||||
done = False
|
||||
@@ -2784,7 +2785,7 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
visited_user_specs = set()
|
||||
for dep in self.traverse():
|
||||
visited_user_specs.add(dep.name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(dep.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(dep.name)
|
||||
visited_user_specs.update(x.name for x in pkg_cls(dep).provided)
|
||||
|
||||
extra = set(user_spec_deps.keys()).difference(visited_user_specs)
|
||||
@@ -2867,7 +2868,7 @@ def inject_patches_variant(root):
|
||||
# we can do it as late as possible to allow as much
|
||||
# compatibility across repositories as possible.
|
||||
if s.namespace is None:
|
||||
s.namespace = spack.repo.path.repo_for_pkg(s.name).namespace
|
||||
s.namespace = spack.repo.PATH.repo_for_pkg(s.name).namespace
|
||||
|
||||
if s.concrete:
|
||||
continue
|
||||
@@ -2925,7 +2926,7 @@ def ensure_external_path_if_external(external_spec):
|
||||
|
||||
# Get the path from the module the package can override the default
|
||||
# (this is mostly needed for Cray)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(external_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(external_spec.name)
|
||||
package = pkg_cls(external_spec)
|
||||
external_spec.external_path = getattr(
|
||||
package, "external_prefix", md.path_from_modules(external_spec.external_modules)
|
||||
@@ -3199,7 +3200,7 @@ def _find_provider(self, vdep, provider_index):
|
||||
Raise an exception if there is a conflicting virtual
|
||||
dependency already in this spec.
|
||||
"""
|
||||
assert spack.repo.path.is_virtual_safe(vdep.name), vdep
|
||||
assert spack.repo.PATH.is_virtual_safe(vdep.name), vdep
|
||||
|
||||
# note that this defensively copies.
|
||||
providers = provider_index.providers_for(vdep)
|
||||
@@ -3265,7 +3266,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# If it's a virtual dependency, try to find an existing
|
||||
# provider in the spec, and merge that.
|
||||
virtuals = ()
|
||||
if spack.repo.path.is_virtual_safe(dep.name):
|
||||
if spack.repo.PATH.is_virtual_safe(dep.name):
|
||||
virtuals = (dep.name,)
|
||||
visited.add(dep.name)
|
||||
provider = self._find_provider(dep, provider_index)
|
||||
@@ -3273,11 +3274,11 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
dep = provider
|
||||
else:
|
||||
index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[dep], restrict=True
|
||||
repository=spack.repo.PATH, specs=[dep], restrict=True
|
||||
)
|
||||
items = list(spec_deps.items())
|
||||
for name, vspec in items:
|
||||
if not spack.repo.path.is_virtual_safe(vspec.name):
|
||||
if not spack.repo.PATH.is_virtual_safe(vspec.name):
|
||||
continue
|
||||
|
||||
if index.providers_for(vspec):
|
||||
@@ -3427,7 +3428,7 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
||||
# Initialize index of virtual dependency providers if
|
||||
# concretize didn't pass us one already
|
||||
provider_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
repository=spack.repo.PATH, specs=[s for s in all_spec_deps.values()], restrict=True
|
||||
)
|
||||
|
||||
# traverse the package DAG and fill out dependencies according
|
||||
@@ -3458,7 +3459,7 @@ def validate_or_raise(self):
|
||||
for spec in self.traverse():
|
||||
# raise an UnknownPackageError if the spec's package isn't real.
|
||||
if (not spec.virtual) and spec.name:
|
||||
spack.repo.path.get_pkg_class(spec.fullname)
|
||||
spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
|
||||
# validate compiler in addition to the package name.
|
||||
if spec.compiler:
|
||||
@@ -3526,7 +3527,7 @@ def update_variant_validate(self, variant_name, values):
|
||||
variant = pkg_variant.make_variant(value)
|
||||
self.variants[variant_name] = variant
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
pkg_variant.validate_or_raise(self.variants[variant_name], pkg_cls)
|
||||
|
||||
def constrain(self, other, deps=True):
|
||||
@@ -3731,8 +3732,8 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if self.name != other.name and self.name and other.name:
|
||||
if self.virtual and other.virtual:
|
||||
# Two virtual specs intersect only if there are providers for both
|
||||
lhs = spack.repo.path.providers_for(str(self))
|
||||
rhs = spack.repo.path.providers_for(str(other))
|
||||
lhs = spack.repo.PATH.providers_for(str(self))
|
||||
rhs = spack.repo.PATH.providers_for(str(other))
|
||||
intersection = [s for s in lhs if any(s.intersects(z) for z in rhs)]
|
||||
return bool(intersection)
|
||||
|
||||
@@ -3741,7 +3742,7 @@ def _intersects(self, other: "Spec", deps: bool = True) -> bool:
|
||||
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(non_virtual_spec.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
|
||||
pkg = pkg_cls(non_virtual_spec)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
@@ -3812,10 +3813,10 @@ def _intersects_dependencies(self, other):
|
||||
|
||||
# For virtual dependencies, we need to dig a little deeper.
|
||||
self_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=self.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=self.traverse(), restrict=True
|
||||
)
|
||||
other_index = spack.provider_index.ProviderIndex(
|
||||
repository=spack.repo.path, specs=other.traverse(), restrict=True
|
||||
repository=spack.repo.PATH, specs=other.traverse(), restrict=True
|
||||
)
|
||||
|
||||
# This handles cases where there are already providers for both vpkgs
|
||||
@@ -3856,7 +3857,7 @@ def _satisfies(self, other: "Spec", deps: bool = True) -> bool:
|
||||
if not self.virtual and other.virtual:
|
||||
try:
|
||||
# Here we might get an abstract spec
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
|
||||
pkg = pkg_cls(self)
|
||||
except spack.repo.UnknownEntityError:
|
||||
# If we can't get package info on this spec, don't treat
|
||||
@@ -3938,8 +3939,8 @@ def patches(self):
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
if self._patches_assigned():
|
||||
for sha256 in self.variants["patches"]._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.name)
|
||||
index = spack.repo.PATH.patch_index
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
|
||||
patch = index.patch_for_package(sha256, pkg_cls)
|
||||
self._patches.append(patch)
|
||||
|
||||
@@ -4801,7 +4802,7 @@ def attach_git_version_lookup(self):
|
||||
return
|
||||
for v in self.versions:
|
||||
if isinstance(v, vn.GitVersion) and v._ref_version is None:
|
||||
v.attach_git_lookup_from_package(self.fullname)
|
||||
v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname))
|
||||
|
||||
|
||||
def parse_with_version_concrete(string: str, compiler: bool = False):
|
||||
@@ -5146,9 +5147,7 @@ def __missing__(self, key):
|
||||
return value
|
||||
|
||||
|
||||
def save_dependency_specfiles(
|
||||
root_spec_info, output_directory, dependencies=None, spec_format="json"
|
||||
):
|
||||
def save_dependency_specfiles(root: Spec, output_directory: str, dependencies: List[Spec]):
|
||||
"""Given a root spec (represented as a yaml object), index it with a subset
|
||||
of its dependencies, and write each dependency to a separate yaml file
|
||||
in the output directory. By default, all dependencies will be written
|
||||
@@ -5157,26 +5156,15 @@ def save_dependency_specfiles(
|
||||
incoming spec is not json, that can be specified with the spec_format
|
||||
parameter. This can be used to convert from yaml specfiles to the
|
||||
json format."""
|
||||
if spec_format == "json":
|
||||
root_spec = Spec.from_json(root_spec_info)
|
||||
elif spec_format == "yaml":
|
||||
root_spec = Spec.from_yaml(root_spec_info)
|
||||
else:
|
||||
raise SpecParseError("Unrecognized spec format {0}.".format(spec_format))
|
||||
|
||||
dep_list = dependencies
|
||||
if not dep_list:
|
||||
dep_list = [dep.name for dep in root_spec.traverse()]
|
||||
for spec in root.traverse():
|
||||
if not any(spec.satisfies(dep) for dep in dependencies):
|
||||
continue
|
||||
|
||||
for dep_name in dep_list:
|
||||
if dep_name not in root_spec:
|
||||
msg = "Dependency {0} does not exist in root spec {1}".format(dep_name, root_spec.name)
|
||||
raise SpecDependencyNotFoundError(msg)
|
||||
dep_spec = root_spec[dep_name]
|
||||
json_path = os.path.join(output_directory, "{0}.json".format(dep_name))
|
||||
json_path = os.path.join(output_directory, f"{spec.name}.json")
|
||||
|
||||
with open(json_path, "w") as fd:
|
||||
fd.write(dep_spec.to_json(hash=ht.dag_hash))
|
||||
fd.write(spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
@@ -5397,11 +5385,6 @@ def __init__(self, spec, matches):
|
||||
super().__init__(message, long_message)
|
||||
|
||||
|
||||
class SpecDependencyNotFoundError(spack.error.SpecError):
|
||||
"""Raised when a failure is encountered writing the dependencies of
|
||||
a spec."""
|
||||
|
||||
|
||||
class SpecDeprecatedError(spack.error.SpecError):
|
||||
"""Raised when a spec concretizes to a deprecated spec or dependency."""
|
||||
|
||||
|
@@ -326,7 +326,7 @@ def __init__(
|
||||
self.keep = keep
|
||||
|
||||
# File lock for the stage directory. We use one file for all
|
||||
# stage locks. See spack.database.Database.prefix_lock for
|
||||
# stage locks. See spack.database.Database.prefix_locker.lock for
|
||||
# details on this approach.
|
||||
self._lock = None
|
||||
if lock:
|
||||
@@ -884,23 +884,19 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
keep_stage (bool): whether to keep staging area when command completes
|
||||
batch (bool): whether to ask user how many versions to fetch (false)
|
||||
or fetch all versions (true)
|
||||
latest (bool): whether to take the latest version (true) or all (false)
|
||||
fetch_options (dict): Options used for the fetcher (such as timeout
|
||||
or cookies)
|
||||
|
||||
Returns:
|
||||
(str): A multi-line string containing versions and corresponding hashes
|
||||
(dict): A dictionary of the form: version -> checksum
|
||||
|
||||
"""
|
||||
batch = kwargs.get("batch", False)
|
||||
fetch_options = kwargs.get("fetch_options", None)
|
||||
first_stage_function = kwargs.get("first_stage_function", None)
|
||||
keep_stage = kwargs.get("keep_stage", False)
|
||||
latest = kwargs.get("latest", False)
|
||||
|
||||
sorted_versions = sorted(url_dict.keys(), reverse=True)
|
||||
if latest:
|
||||
sorted_versions = sorted_versions[:1]
|
||||
|
||||
# Find length of longest string in the list for padding
|
||||
max_len = max(len(str(v)) for v in sorted_versions)
|
||||
@@ -915,7 +911,7 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
)
|
||||
print()
|
||||
|
||||
if batch or latest:
|
||||
if batch:
|
||||
archives_to_fetch = len(sorted_versions)
|
||||
else:
|
||||
archives_to_fetch = tty.get_number(
|
||||
@@ -929,14 +925,10 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
urls = [url_dict[v] for v in versions]
|
||||
|
||||
tty.debug("Downloading...")
|
||||
version_hashes = []
|
||||
version_hashes = {}
|
||||
i = 0
|
||||
errors = []
|
||||
for url, version in zip(urls, versions):
|
||||
# Wheels should not be expanded during staging
|
||||
expand_arg = ""
|
||||
if url.endswith(".whl") or ".whl#" in url:
|
||||
expand_arg = ", expand=False"
|
||||
try:
|
||||
if fetch_options:
|
||||
url_or_fs = fs.URLFetchStrategy(url, fetch_options=fetch_options)
|
||||
@@ -951,8 +943,8 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
first_stage_function(stage, url)
|
||||
|
||||
# Checksum the archive and add it to the list
|
||||
version_hashes.append(
|
||||
(version, spack.util.crypto.checksum(hashlib.sha256, stage.archive_file))
|
||||
version_hashes[version] = spack.util.crypto.checksum(
|
||||
hashlib.sha256, stage.archive_file
|
||||
)
|
||||
i += 1
|
||||
except FailedDownloadError:
|
||||
@@ -966,17 +958,12 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for {0}".format(name))
|
||||
|
||||
# Generate the version directives to put in a package.py
|
||||
version_lines = "\n".join(
|
||||
[' version("{0}", sha256="{1}"{2})'.format(v, h, expand_arg) for v, h in version_hashes]
|
||||
)
|
||||
|
||||
num_hash = len(version_hashes)
|
||||
tty.debug(
|
||||
"Checksummed {0} version{1} of {2}:".format(num_hash, "" if num_hash == 1 else "s", name)
|
||||
)
|
||||
|
||||
return version_lines
|
||||
return version_hashes
|
||||
|
||||
|
||||
class StageError(spack.error.SpackError):
|
||||
|
@@ -25,13 +25,14 @@
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.directory_layout
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
|
||||
#: default installation root, relative to the Spack install path
|
||||
@@ -134,18 +135,21 @@ def parse_install_tree(config_dict):
|
||||
class Store:
|
||||
"""A store is a path full of installed Spack packages.
|
||||
|
||||
Stores consist of packages installed according to a
|
||||
``DirectoryLayout``, along with an index, or _database_ of their
|
||||
contents. The directory layout controls what paths look like and how
|
||||
Spack ensures that each unique spec gets its own unique directory (or
|
||||
not, though we don't recommend that). The database is a single file
|
||||
that caches metadata for the entire Spack installation. It prevents
|
||||
us from having to spider the install tree to figure out what's there.
|
||||
Stores consist of packages installed according to a ``DirectoryLayout``, along with a database
|
||||
of their contents.
|
||||
|
||||
The directory layout controls what paths look like and how Spack ensures that each unique spec
|
||||
gets its own unique directory (or not, though we don't recommend that).
|
||||
|
||||
The database is a single file that caches metadata for the entire Spack installation. It
|
||||
prevents us from having to spider the install tree to figure out what's there.
|
||||
|
||||
The store is also able to lock installation prefixes, and to mark installation failures.
|
||||
|
||||
Args:
|
||||
root: path to the root of the install tree
|
||||
unpadded_root: path to the root of the install tree without padding.
|
||||
The sbang script has to be installed here to work with padded roots
|
||||
unpadded_root: path to the root of the install tree without padding. The sbang script has
|
||||
to be installed here to work with padded roots
|
||||
projections: expression according to guidelines that describes how to construct a path to
|
||||
a package prefix in this store
|
||||
hash_length: length of the hashes used in the directory layout. Spec hash suffixes will be
|
||||
@@ -170,6 +174,19 @@ def __init__(
|
||||
self.upstreams = upstreams
|
||||
self.lock_cfg = lock_cfg
|
||||
self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)
|
||||
|
||||
timeout_format_str = (
|
||||
f"{str(lock_cfg.package_timeout)}s" if lock_cfg.package_timeout else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.prefix_locker = spack.database.SpecLocker(
|
||||
spack.database.prefix_lock_path(root), default_timeout=lock_cfg.package_timeout
|
||||
)
|
||||
self.failure_tracker = spack.database.FailureTracker(
|
||||
self.root, default_timeout=lock_cfg.package_timeout
|
||||
)
|
||||
|
||||
self.layout = spack.directory_layout.DirectoryLayout(
|
||||
root, projections=projections, hash_length=hash_length
|
||||
)
|
||||
|
@@ -102,7 +102,7 @@ def __init__(self):
|
||||
def restore(self):
|
||||
if _SERIALIZE:
|
||||
spack.config.config = self.config
|
||||
spack.repo.path = spack.repo.create(self.config)
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
|
@@ -32,10 +32,10 @@ def packages_with_tags(tags, installed, skip_empty):
|
||||
"""
|
||||
tag_pkgs = collections.defaultdict(lambda: list)
|
||||
spec_names = _get_installed_package_names() if installed else []
|
||||
keys = spack.repo.path.tag_index if tags is None else tags
|
||||
keys = spack.repo.PATH.tag_index if tags is None else tags
|
||||
for tag in keys:
|
||||
packages = [
|
||||
name for name in spack.repo.path.tag_index[tag] if not installed or name in spec_names
|
||||
name for name in spack.repo.PATH.tag_index[tag] if not installed or name in spec_names
|
||||
]
|
||||
if packages or not skip_empty:
|
||||
tag_pkgs[tag] = packages
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
import urllib.response
|
||||
from pathlib import PurePath
|
||||
|
||||
import py
|
||||
import pytest
|
||||
@@ -177,6 +178,33 @@ def install_dir_non_default_layout(tmpdir):
|
||||
spack.store.STORE.layout = real_layout
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def dummy_prefix(tmpdir):
|
||||
"""Dummy prefix used for testing tarball creation, validation, extraction"""
|
||||
p = tmpdir.mkdir("prefix")
|
||||
assert os.path.isabs(p)
|
||||
|
||||
p.mkdir("bin")
|
||||
p.mkdir("share")
|
||||
p.mkdir(".spack")
|
||||
|
||||
app = p.join("bin", "app")
|
||||
relative_app_link = p.join("bin", "relative_app_link")
|
||||
absolute_app_link = p.join("bin", "absolute_app_link")
|
||||
data = p.join("share", "file")
|
||||
|
||||
with open(app, "w") as f:
|
||||
f.write("hello world")
|
||||
|
||||
with open(data, "w") as f:
|
||||
f.write("hello world")
|
||||
|
||||
os.symlink("app", relative_app_link)
|
||||
os.symlink(app, absolute_app_link)
|
||||
|
||||
return str(p)
|
||||
|
||||
|
||||
args = ["file"]
|
||||
if sys.platform == "darwin":
|
||||
args.extend(["/usr/bin/clang++", "install_name_tool"])
|
||||
@@ -966,3 +994,71 @@ def test_tarball_normalized_permissions(tmpdir):
|
||||
|
||||
# not-executable-by-user files should be 0o644
|
||||
assert path_to_member["pkg/share/file"].mode == 0o644
|
||||
|
||||
|
||||
def test_tarball_common_prefix(dummy_prefix, tmpdir):
|
||||
"""Tests whether Spack can figure out the package directory
|
||||
from the tarball contents, and strip them when extracting."""
|
||||
|
||||
# When creating a tarball, Python (and tar) use relative paths,
|
||||
# Absolute paths become relative to `/`, so drop the leading `/`.
|
||||
assert os.path.isabs(dummy_prefix)
|
||||
expected_prefix = PurePath(dummy_prefix).as_posix().lstrip("/")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
# Create a tarball (using absolute path for prefix dir)
|
||||
with tarfile.open("example.tar", mode="w") as tar:
|
||||
tar.add(name=dummy_prefix)
|
||||
|
||||
# Open, verify common prefix, and extract it.
|
||||
with tarfile.open("example.tar", mode="r") as tar:
|
||||
common_prefix = bindist._ensure_common_prefix(tar)
|
||||
assert common_prefix == expected_prefix
|
||||
|
||||
# Strip the prefix from the tar entries
|
||||
bindist._tar_strip_component(tar, common_prefix)
|
||||
|
||||
# Extract into prefix2
|
||||
tar.extractall(path="prefix2")
|
||||
|
||||
# Verify files are all there at the correct level.
|
||||
assert set(os.listdir("prefix2")) == {"bin", "share", ".spack"}
|
||||
assert set(os.listdir(os.path.join("prefix2", "bin"))) == {
|
||||
"app",
|
||||
"relative_app_link",
|
||||
"absolute_app_link",
|
||||
}
|
||||
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
|
||||
|
||||
# Relative symlink should still be correct
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||
|
||||
# Absolute symlink should remain absolute -- this is for relocation to fix up.
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||
dummy_prefix, "bin", "app"
|
||||
)
|
||||
|
||||
|
||||
def test_tarfile_without_common_directory_prefix_fails(tmpdir):
|
||||
"""A tarfile that only contains files without a common package directory
|
||||
should fail to extract, as we won't know where to put the files."""
|
||||
with tmpdir.as_cwd():
|
||||
# Create a broken tarball with just a file, no directories.
|
||||
with tarfile.open("empty.tar", mode="w") as tar:
|
||||
tar.addfile(tarfile.TarInfo(name="example/file"), fileobj=io.BytesIO(b"hello"))
|
||||
|
||||
with pytest.raises(ValueError, match="Tarball does not contain a common prefix"):
|
||||
bindist._ensure_common_prefix(tarfile.open("empty.tar", mode="r"))
|
||||
|
||||
|
||||
def test_tarfile_with_files_outside_common_prefix(tmpdir, dummy_prefix):
|
||||
"""If a file is outside of the common prefix, we should fail."""
|
||||
with tmpdir.as_cwd():
|
||||
with tarfile.open("broken.tar", mode="w") as tar:
|
||||
tar.add(name=dummy_prefix)
|
||||
tar.addfile(tarfile.TarInfo(name="/etc/config_file"), fileobj=io.BytesIO(b"hello"))
|
||||
|
||||
with pytest.raises(
|
||||
ValueError, match="Tarball contains file /etc/config_file outside of prefix"
|
||||
):
|
||||
bindist._ensure_common_prefix(tarfile.open("broken.tar", mode="r"))
|
||||
|
@@ -23,8 +23,6 @@
|
||||
|
||||
DATA_PATH = os.path.join(spack.paths.test_path, "data")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def concretize_and_setup(default_mock_concretization):
|
||||
@@ -45,6 +43,7 @@ def _func(dir_str):
|
||||
return _func
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="make not available on Windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
|
||||
class TestTargets:
|
||||
@pytest.mark.parametrize(
|
||||
@@ -93,6 +92,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
||||
s.package._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="autotools not available on windows")
|
||||
@pytest.mark.usefixtures("config", "mock_packages")
|
||||
class TestAutotoolsPackage:
|
||||
def test_with_or_without(self, default_mock_concretization):
|
||||
|
@@ -15,7 +15,7 @@ def test_build_request_errors(install_mockery):
|
||||
inst.BuildRequest("abc", {})
|
||||
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildRequest(pkg_cls(spec), {})
|
||||
|
||||
|
@@ -15,7 +15,7 @@ def test_build_task_errors(install_mockery):
|
||||
inst.BuildTask("abc", None, False, 0, 0, 0, [])
|
||||
|
||||
spec = spack.spec.Spec("trivial-install-test-package")
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
|
||||
|
||||
|
@@ -12,6 +12,7 @@
|
||||
|
||||
import spack.cmd.checksum
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.main import SpackCommand
|
||||
|
||||
spack_checksum = SpackCommand("checksum")
|
||||
@@ -20,17 +21,18 @@
|
||||
@pytest.mark.parametrize(
|
||||
"arguments,expected",
|
||||
[
|
||||
(["--batch", "patch"], (True, False, False, False)),
|
||||
(["--latest", "patch"], (False, True, False, False)),
|
||||
(["--preferred", "patch"], (False, False, True, False)),
|
||||
(["--add-to-package", "patch"], (False, False, False, True)),
|
||||
(["--batch", "patch"], (True, False, False, False, False)),
|
||||
(["--latest", "patch"], (False, True, False, False, False)),
|
||||
(["--preferred", "patch"], (False, False, True, False, False)),
|
||||
(["--add-to-package", "patch"], (False, False, False, True, False)),
|
||||
(["--verify", "patch"], (False, False, False, False, True)),
|
||||
],
|
||||
)
|
||||
def test_checksum_args(arguments, expected):
|
||||
parser = argparse.ArgumentParser()
|
||||
spack.cmd.checksum.setup_parser(parser)
|
||||
args = parser.parse_args(arguments)
|
||||
check = args.batch, args.latest, args.preferred, args.add_to_package
|
||||
check = args.batch, args.latest, args.preferred, args.add_to_package, args.verify
|
||||
assert check == expected
|
||||
|
||||
|
||||
@@ -41,13 +43,18 @@ def test_checksum_args(arguments, expected):
|
||||
(["--batch", "preferred-test"], "version of preferred-test"),
|
||||
(["--latest", "preferred-test"], "Found 1 version"),
|
||||
(["--preferred", "preferred-test"], "Found 1 version"),
|
||||
(["--add-to-package", "preferred-test"], "Added 1 new versions to"),
|
||||
(["--add-to-package", "preferred-test"], "Added 0 new versions to"),
|
||||
(["--verify", "preferred-test"], "Verified 1 of 1"),
|
||||
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
|
||||
],
|
||||
)
|
||||
def test_checksum(arguments, expected, mock_packages, mock_clone_repo, mock_stage):
|
||||
output = spack_checksum(*arguments)
|
||||
assert expected in output
|
||||
assert "version(" in output
|
||||
|
||||
# --verify doesn't print versions strings like other flags
|
||||
if "--verify" not in arguments:
|
||||
assert "version(" in output
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@@ -65,15 +72,14 @@ def _get_number(*args, **kwargs):
|
||||
|
||||
|
||||
def test_checksum_versions(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
|
||||
pkg_cls = spack.repo.path.get_pkg_class("preferred-test")
|
||||
versions = [str(v) for v in pkg_cls.versions if not v.isdevelop()]
|
||||
output = spack_checksum("preferred-test", versions[0])
|
||||
assert "Found 1 version" in output
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||
versions = [str(v) for v in pkg_cls.versions]
|
||||
output = spack_checksum("zlib", *versions)
|
||||
assert "Found 3 versions" in output
|
||||
assert "version(" in output
|
||||
output = spack_checksum("--add-to-package", "preferred-test", versions[0])
|
||||
assert "Found 1 version" in output
|
||||
assert "version(" in output
|
||||
assert "Added 1 new versions to" in output
|
||||
output = spack_checksum("--add-to-package", "zlib", *versions)
|
||||
assert "Found 3 versions" in output
|
||||
assert "Added 0 new versions to" in output
|
||||
|
||||
|
||||
def test_checksum_missing_version(mock_packages, mock_clone_repo, mock_fetch, mock_stage):
|
||||
@@ -91,4 +97,30 @@ def test_checksum_deprecated_version(mock_packages, mock_clone_repo, mock_fetch,
|
||||
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
|
||||
)
|
||||
assert "Version 1.1.0 is deprecated" in output
|
||||
assert "Added 1 new versions to" not in output
|
||||
assert "Added 0 new versions to" not in output
|
||||
|
||||
|
||||
def test_checksum_at(mock_packages):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||
versions = [str(v) for v in pkg_cls.versions]
|
||||
output = spack_checksum(f"zlib@{versions[0]}")
|
||||
assert "Found 1 version" in output
|
||||
|
||||
|
||||
def test_checksum_url(mock_packages):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
|
||||
output = spack_checksum(f"{pkg_cls.url}", fail_on_error=False)
|
||||
assert "accepts package names" in output
|
||||
|
||||
|
||||
def test_checksum_verification_fails(install_mockery, capsys):
|
||||
spec = spack.spec.Spec("zlib").concretized()
|
||||
pkg = spec.package
|
||||
versions = list(pkg.versions.keys())
|
||||
version_hashes = {versions[0]: "abadhash", spack.version.Version("0.1"): "123456789"}
|
||||
with pytest.raises(SystemExit):
|
||||
spack.cmd.checksum.print_checksum_status(pkg, version_hashes)
|
||||
out = str(capsys.readouterr())
|
||||
assert out.count("Correct") == 0
|
||||
assert "No previous checksum" in out
|
||||
assert "Invalid checksum" in out
|
||||
|
@@ -2029,10 +2029,10 @@ def fake_download_and_extract_artifacts(url, work_dir):
|
||||
working_dir.strpath,
|
||||
output=str,
|
||||
)
|
||||
expect_out = "docker run --rm --name spack_reproducer -v {0}:{0}:Z -ti {1}".format(
|
||||
os.path.realpath(working_dir.strpath), image_name
|
||||
)
|
||||
|
||||
# Make sure the script was generated
|
||||
assert os.path.exists(os.path.join(os.path.realpath(working_dir.strpath), "start.sh"))
|
||||
# Make sure we tell the suer where it is when not in interactive mode
|
||||
expect_out = "$ {0}/start.sh".format(os.path.realpath(working_dir.strpath))
|
||||
assert expect_out in rep_out
|
||||
|
||||
|
||||
|
@@ -10,9 +10,11 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd.clean
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.stage
|
||||
import spack.store
|
||||
|
||||
clean = spack.main.SpackCommand("clean")
|
||||
|
||||
@@ -33,7 +35,7 @@ def __call__(self, *args, **kwargs):
|
||||
monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
|
||||
monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
|
||||
monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
|
||||
monkeypatch.setattr(spack.installer, "clear_failures", Counter("failures"))
|
||||
monkeypatch.setattr(spack.store.STORE.failure_tracker, "clear_all", Counter("failures"))
|
||||
monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))
|
||||
|
||||
yield counts
|
||||
|
@@ -219,7 +219,8 @@ def test_fish_completion():
|
||||
def test_update_completion_arg(shell, tmpdir, monkeypatch):
|
||||
"""Test the update completion flag."""
|
||||
|
||||
mock_infile = tmpdir.join("spack-completion.in")
|
||||
tmpdir.join(shell).mkdir()
|
||||
mock_infile = tmpdir.join(shell).join(f"spack-completion.{shell}")
|
||||
mock_outfile = tmpdir.join(f"spack-completion.{shell}")
|
||||
|
||||
mock_args = {
|
||||
@@ -267,7 +268,7 @@ def test_updated_completion_scripts(shell, tmpdir):
|
||||
"and adding the changed files to your pull request."
|
||||
)
|
||||
|
||||
header = os.path.join(spack.paths.share_path, shell, "spack-completion.in")
|
||||
header = os.path.join(spack.paths.share_path, shell, f"spack-completion.{shell}")
|
||||
script = "spack-completion.{0}".format(shell)
|
||||
old_script = os.path.join(spack.paths.share_path, script)
|
||||
new_script = str(tmpdir.join(script))
|
||||
|
@@ -266,7 +266,7 @@ def test_dev_build_multiple(
|
||||
# root and dependency if they wanted a dev build for both.
|
||||
leaf_dir = tmpdir.mkdir("leaf")
|
||||
leaf_spec = spack.spec.Spec("dev-build-test-install@=1.0.0") # non-existing version
|
||||
leaf_pkg_cls = spack.repo.path.get_pkg_class(leaf_spec.name)
|
||||
leaf_pkg_cls = spack.repo.PATH.get_pkg_class(leaf_spec.name)
|
||||
with leaf_dir.as_cwd():
|
||||
with open(leaf_pkg_cls.filename, "w") as f:
|
||||
f.write(leaf_pkg_cls.original_string)
|
||||
@@ -275,7 +275,7 @@ def test_dev_build_multiple(
|
||||
# don't concretize outside environment -- dev info will be wrong
|
||||
root_dir = tmpdir.mkdir("root")
|
||||
root_spec = spack.spec.Spec("dev-build-test-dependent@0.0.0")
|
||||
root_pkg_cls = spack.repo.path.get_pkg_class(root_spec.name)
|
||||
root_pkg_cls = spack.repo.PATH.get_pkg_class(root_spec.name)
|
||||
with root_dir.as_cwd():
|
||||
with open(root_pkg_cls.filename, "w") as f:
|
||||
f.write(root_pkg_cls.original_string)
|
||||
@@ -329,7 +329,7 @@ def test_dev_build_env_dependency(
|
||||
dep_spec = spack.spec.Spec("dev-build-test-install")
|
||||
|
||||
with build_dir.as_cwd():
|
||||
dep_pkg_cls = spack.repo.path.get_pkg_class(dep_spec.name)
|
||||
dep_pkg_cls = spack.repo.PATH.get_pkg_class(dep_spec.name)
|
||||
with open(dep_pkg_cls.filename, "w") as f:
|
||||
f.write(dep_pkg_cls.original_string)
|
||||
|
||||
|
@@ -277,7 +277,7 @@ def test_env_modifications_error_on_activate(install_mockery, mock_fetch, monkey
|
||||
def setup_error(pkg, env):
|
||||
raise RuntimeError("cmake-client had issues!")
|
||||
|
||||
pkg = spack.repo.path.get_pkg_class("cmake-client")
|
||||
pkg = spack.repo.PATH.get_pkg_class("cmake-client")
|
||||
monkeypatch.setattr(pkg, "setup_run_environment", setup_error)
|
||||
|
||||
spack.environment.shell.activate(e)
|
||||
|
@@ -43,7 +43,7 @@ def define_plat_exe(exe):
|
||||
|
||||
|
||||
def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
cmake_path = mock_executable("cmake", output="echo cmake version 1.foo")
|
||||
executables_found({str(cmake_path): define_plat_exe("cmake")})
|
||||
|
||||
@@ -58,7 +58,7 @@ def test_find_external_single_package(mock_executable, executables_found, _platf
|
||||
def test_find_external_two_instances_same_package(
|
||||
mock_executable, executables_found, _platform_executables
|
||||
):
|
||||
pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
|
||||
pkgs_to_check = [spack.repo.PATH.get_pkg_class("cmake")]
|
||||
|
||||
# Each of these cmake instances is created in a different prefix
|
||||
# In Windows, quoted strings are echo'd with quotes includes
|
||||
@@ -347,7 +347,7 @@ def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platfo
|
||||
def _determine_variants(cls, exes, version_str):
|
||||
return "languages=c", {"prefix": "/opt/gcc/bin", "compilers": {"c": exes[0]}}
|
||||
|
||||
gcc_cls = spack.repo.path.get_pkg_class("gcc")
|
||||
gcc_cls = spack.repo.PATH.get_pkg_class("gcc")
|
||||
monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
|
||||
|
||||
# Find the external spec
|
||||
|
@@ -117,13 +117,13 @@ def test_tag2_tag3(parser, specs):
|
||||
assert len(specs) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args,with_namespace", [([], False), (["--namespace"], True), (["--namespaces"], True)]
|
||||
)
|
||||
@pytest.mark.db
|
||||
def test_namespaces_shown_correctly(database):
|
||||
out = find()
|
||||
assert "builtin.mock.zmpi" not in out
|
||||
|
||||
out = find("--namespace")
|
||||
assert "builtin.mock.zmpi" in out
|
||||
def test_namespaces_shown_correctly(args, with_namespace, database):
|
||||
"""Test that --namespace(s) works. Old syntax is --namespace"""
|
||||
assert ("builtin.mock.zmpi" in find(*args)) == with_namespace
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
@@ -23,6 +23,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.package_base
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
from spack.error import SpackError
|
||||
from spack.main import SpackCommand
|
||||
@@ -705,9 +706,11 @@ def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
assert "was not installed" in out
|
||||
|
||||
# Check that failure prefix locks are still cached
|
||||
failure_lock_prefixes = ",".join(spack.store.STORE.db._prefix_failures.keys())
|
||||
assert "libelf" in failure_lock_prefixes
|
||||
assert "libdwarf" in failure_lock_prefixes
|
||||
failed_packages = [
|
||||
pkg_name for dag_hash, pkg_name in spack.store.STORE.failure_tracker.locker.locks.keys()
|
||||
]
|
||||
assert "libelf" in failed_packages
|
||||
assert "libdwarf" in failed_packages
|
||||
|
||||
|
||||
def test_install_only_dependencies(tmpdir, mock_fetch, install_mockery):
|
||||
|
@@ -54,12 +54,10 @@ def test_all(mock_packages, capfd):
|
||||
"user3",
|
||||
"py-extension1:",
|
||||
"adamjstewart,",
|
||||
"pradyunsg,",
|
||||
"user1,",
|
||||
"user2",
|
||||
"py-extension2:",
|
||||
"adamjstewart,",
|
||||
"pradyunsg",
|
||||
"adamjstewart",
|
||||
]
|
||||
|
||||
with capfd.disabled():
|
||||
@@ -74,9 +72,6 @@ def test_all_by_user(mock_packages, capfd):
|
||||
"adamjstewart:",
|
||||
"py-extension1,",
|
||||
"py-extension2",
|
||||
"pradyunsg:",
|
||||
"py-extension1,",
|
||||
"py-extension2",
|
||||
"user0:",
|
||||
"maintainers-3",
|
||||
"user1:",
|
||||
|
@@ -82,7 +82,7 @@ def mock_pkg_git_repo(git, tmpdir_factory):
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def mock_pkg_names():
|
||||
repo = spack.repo.path.get_repo("builtin.mock")
|
||||
repo = spack.repo.PATH.get_repo("builtin.mock")
|
||||
|
||||
# Be sure to include virtual packages since packages with stand-alone
|
||||
# tests may inherit additional tests from the virtuals they provide,
|
||||
@@ -105,11 +105,11 @@ def split(output):
|
||||
|
||||
|
||||
def test_packages_path():
|
||||
assert spack.repo.packages_path() == spack.repo.path.get_repo("builtin").packages_path
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
def test_mock_packages_path(mock_packages):
|
||||
assert spack.repo.packages_path() == spack.repo.path.get_repo("builtin.mock").packages_path
|
||||
assert spack.repo.packages_path() == spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
|
||||
|
||||
def test_pkg_add(git, mock_pkg_git_repo):
|
||||
@@ -126,8 +126,8 @@ def test_pkg_add(git, mock_pkg_git_repo):
|
||||
finally:
|
||||
shutil.rmtree("pkg-e")
|
||||
# Removing a package mid-run disrupts Spack's caching
|
||||
if spack.repo.path.repos[0]._fast_package_checker:
|
||||
spack.repo.path.repos[0]._fast_package_checker.invalidate()
|
||||
if spack.repo.PATH.repos[0]._fast_package_checker:
|
||||
spack.repo.PATH.repos[0]._fast_package_checker.invalidate()
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
pkg("add", "does-not-exist")
|
||||
@@ -248,7 +248,7 @@ def test_pkg_source_requires_one_arg(mock_packages):
|
||||
def test_pkg_source(mock_packages):
|
||||
fake_source = pkg("source", "fake")
|
||||
|
||||
fake_file = spack.repo.path.filename_for_package_name("fake")
|
||||
fake_file = spack.repo.PATH.filename_for_package_name("fake")
|
||||
with open(fake_file) as f:
|
||||
contents = f.read()
|
||||
assert fake_source == contents
|
||||
@@ -303,7 +303,7 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
pkg("grep", "-l", "splice", output=str)
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.path.get_pkg_class(name).module.__file__
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-z"]
|
||||
)
|
||||
|
||||
|
@@ -120,7 +120,7 @@ def test_changed_files_all_files():
|
||||
assert len(files) > 6000
|
||||
|
||||
# a builtin package
|
||||
zlib = spack.repo.path.get_pkg_class("zlib")
|
||||
zlib = spack.repo.PATH.get_pkg_class("zlib")
|
||||
zlib_file = zlib.module.__file__
|
||||
if zlib_file.endswith("pyc"):
|
||||
zlib_file = zlib_file[:-1]
|
||||
|
@@ -41,7 +41,7 @@ def test_tags_no_tags(monkeypatch):
|
||||
class tag_path:
|
||||
tag_index = dict()
|
||||
|
||||
monkeypatch.setattr(spack.repo, "path", tag_path)
|
||||
monkeypatch.setattr(spack.repo, "PATH", tag_path)
|
||||
out = tags()
|
||||
assert "No tagged" in out
|
||||
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test basic behavior of compilers in Spack"""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from copy import copy
|
||||
|
||||
@@ -832,119 +831,6 @@ def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
assert flag == "-std=c++0x"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Apple Clang and XCode unsupported on Windows")
|
||||
def test_apple_clang_setup_environment(mock_executable, monkeypatch):
|
||||
"""Test a code path that is taken only if the package uses
|
||||
Xcode on MacOS.
|
||||
"""
|
||||
|
||||
class MockPackage:
|
||||
use_xcode = False
|
||||
|
||||
apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang")
|
||||
compiler = apple_clang_cls(
|
||||
spack.spec.CompilerSpec("apple-clang@=11.0.0"),
|
||||
"catalina",
|
||||
"x86_64",
|
||||
["/usr/bin/clang", "/usr/bin/clang++", None, None],
|
||||
)
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
# Check a package that doesn't use xcode and ensure we don't add changes
|
||||
# to the environment
|
||||
pkg = MockPackage()
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
assert not env
|
||||
|
||||
# Prepare mock executables to fake the Xcode environment
|
||||
xcrun = mock_executable(
|
||||
"xcrun",
|
||||
"""
|
||||
if [[ "$2" == "clang" ]] ; then
|
||||
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
|
||||
fi
|
||||
if [[ "$2" == "clang++" ]] ; then
|
||||
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
mock_executable(
|
||||
"xcode-select",
|
||||
"""
|
||||
echo "/Library/Developer"
|
||||
""",
|
||||
)
|
||||
bin_dir = os.path.dirname(xcrun)
|
||||
monkeypatch.setenv("PATH", bin_dir, prepend=os.pathsep)
|
||||
|
||||
def noop(*args, **kwargs):
|
||||
pass
|
||||
|
||||
real_listdir = os.listdir
|
||||
|
||||
def _listdir(path):
|
||||
if not os.path.exists(path):
|
||||
return []
|
||||
return real_listdir(path)
|
||||
|
||||
# Set a few operations to noop
|
||||
monkeypatch.setattr(shutil, "copytree", noop)
|
||||
monkeypatch.setattr(os, "unlink", noop)
|
||||
monkeypatch.setattr(os, "symlink", noop)
|
||||
monkeypatch.setattr(os, "listdir", _listdir)
|
||||
|
||||
# Qt is so far the only package that uses this code path, change
|
||||
# introduced in https://github.com/spack/spack/pull/1832
|
||||
pkg.use_xcode = True
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
assert len(env) == 3
|
||||
assert env.env_modifications[0].name == "SPACK_CC"
|
||||
assert env.env_modifications[1].name == "SPACK_CXX"
|
||||
assert env.env_modifications[2].name == "DEVELOPER_DIR"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@pytest.mark.parametrize("xcode_select_output", ["", "/Library/Developer/CommandLineTools"])
|
||||
def test_xcode_not_available(xcode_select_output, mock_executable, monkeypatch):
|
||||
# Prepare mock executables to fake the Xcode environment
|
||||
xcrun = mock_executable(
|
||||
"xcrun",
|
||||
"""
|
||||
if [[ "$2" == "clang" ]] ; then
|
||||
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
|
||||
fi
|
||||
if [[ "$2" == "clang++" ]] ; then
|
||||
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
|
||||
fi
|
||||
""",
|
||||
)
|
||||
mock_executable(
|
||||
"xcode-select",
|
||||
"""
|
||||
echo "{0}"
|
||||
""".format(
|
||||
xcode_select_output
|
||||
),
|
||||
)
|
||||
bin_dir = os.path.dirname(xcrun)
|
||||
monkeypatch.setenv("PATH", bin_dir, prepend=os.pathsep)
|
||||
# Prepare compiler
|
||||
apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang")
|
||||
compiler = apple_clang_cls(
|
||||
spack.spec.CompilerSpec("apple-clang@11.0.0"),
|
||||
"catalina",
|
||||
"x86_64",
|
||||
["/usr/bin/clang", "/usr/bin/clang++", None, None],
|
||||
)
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
class MockPackage:
|
||||
use_xcode = True
|
||||
|
||||
pkg = MockPackage()
|
||||
with pytest.raises(OSError):
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_verification
|
||||
def test_compiler_executable_verification_raises(tmpdir):
|
||||
compiler = MockCompiler()
|
||||
|
@@ -43,7 +43,7 @@ def check_spec(abstract, concrete):
|
||||
cflag = concrete.compiler_flags[flag]
|
||||
assert set(aflag) <= set(cflag)
|
||||
|
||||
for name in spack.repo.path.get_pkg_class(abstract.name).variants:
|
||||
for name in spack.repo.PATH.get_pkg_class(abstract.name).variants:
|
||||
assert name in concrete.variants
|
||||
|
||||
for flag in concrete.compiler_flags.valid_compiler_flags():
|
||||
@@ -292,7 +292,7 @@ def test_concretize_with_provides_when(self):
|
||||
"""Make sure insufficient versions of MPI are not in providers list when
|
||||
we ask for some advanced version.
|
||||
"""
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
assert not any(s.intersects("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
|
||||
assert not any(s.intersects("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
|
||||
assert not any(s.intersects("mpich@:1") for s in repo.providers_for("mpi@2"))
|
||||
@@ -301,7 +301,7 @@ def test_concretize_with_provides_when(self):
|
||||
|
||||
def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
""" """
|
||||
providers = spack.repo.path.providers_for("mpi@3.0")
|
||||
providers = spack.repo.PATH.providers_for("mpi@3.0")
|
||||
|
||||
# Note that providers are repo-specific, so we don't misinterpret
|
||||
# providers, but vdeps are not namespace-specific, so we can
|
||||
@@ -1446,7 +1446,7 @@ def test_non_default_provider_of_multiple_virtuals(self):
|
||||
assert s["lapack"].name == "low-priority-provider"
|
||||
|
||||
for virtual_pkg in ("mpi", "lapack"):
|
||||
for pkg in spack.repo.path.providers_for(virtual_pkg):
|
||||
for pkg in spack.repo.PATH.providers_for(virtual_pkg):
|
||||
if pkg.name == "low-priority-provider":
|
||||
continue
|
||||
assert pkg not in s
|
||||
@@ -1919,7 +1919,7 @@ def test_installed_specs_disregard_conflicts(self, mutable_database, monkeypatch
|
||||
pytest.xfail("Use case not supported by the original concretizer")
|
||||
|
||||
# Add a conflict to "mpich" that match an already installed "mpich~debug"
|
||||
pkg_cls = spack.repo.path.get_pkg_class("mpich")
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class("mpich")
|
||||
monkeypatch.setitem(pkg_cls.conflicts, "~debug", [(Spec(), None)])
|
||||
|
||||
# If we concretize with --fresh the conflict is taken into account
|
||||
|
@@ -1158,13 +1158,13 @@ def test_license_dir_config(mutable_config, mock_packages):
|
||||
expected_dir = spack.paths.default_license_dir
|
||||
assert spack.config.get("config:license_dir") == expected_dir
|
||||
assert spack.package_base.PackageBase.global_license_dir == expected_dir
|
||||
assert spack.repo.path.get_pkg_class("a").global_license_dir == expected_dir
|
||||
assert spack.repo.PATH.get_pkg_class("a").global_license_dir == expected_dir
|
||||
|
||||
rel_path = os.path.join(os.path.sep, "foo", "bar", "baz")
|
||||
spack.config.set("config:license_dir", rel_path)
|
||||
assert spack.config.get("config:license_dir") == rel_path
|
||||
assert spack.package_base.PackageBase.global_license_dir == rel_path
|
||||
assert spack.repo.path.get_pkg_class("a").global_license_dir == rel_path
|
||||
assert spack.repo.PATH.get_pkg_class("a").global_license_dir == rel_path
|
||||
|
||||
|
||||
@pytest.mark.regression("22547")
|
||||
|
@@ -773,7 +773,7 @@ def concretize_scope(mutable_config, tmpdir):
|
||||
yield str(tmpdir.join("concretize"))
|
||||
|
||||
mutable_config.pop_scope()
|
||||
spack.repo.path._provider_index = None
|
||||
spack.repo.PATH._provider_index = None
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -950,21 +950,14 @@ def disable_compiler_execution(monkeypatch, request):
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def install_mockery(temporary_store, mutable_config, mock_packages):
|
||||
def install_mockery(temporary_store: spack.store.Store, mutable_config, mock_packages):
|
||||
"""Hooks a fake install directory, DB, and stage directory into Spack."""
|
||||
# We use a fake package, so temporarily disable checksumming
|
||||
with spack.config.override("config:checksum", False):
|
||||
yield
|
||||
|
||||
# Also wipe out any cached prefix failure locks (associated with
|
||||
# the session-scoped mock archive).
|
||||
for pkg_id in list(temporary_store.db._prefix_failures.keys()):
|
||||
lock = spack.store.STORE.db._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
try:
|
||||
lock.release_write()
|
||||
except Exception:
|
||||
pass
|
||||
# Wipe out any cached prefix failure locks (associated with the session-scoped mock archive)
|
||||
temporary_store.failure_tracker.clear_all()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -1944,5 +1937,5 @@ def nullify_globals(request, monkeypatch):
|
||||
ensure_configuration_fixture_run_before(request)
|
||||
monkeypatch.setattr(spack.config, "config", None)
|
||||
monkeypatch.setattr(spack.caches, "misc_cache", None)
|
||||
monkeypatch.setattr(spack.repo, "path", None)
|
||||
monkeypatch.setattr(spack.repo, "PATH", None)
|
||||
monkeypatch.setattr(spack.store, "STORE", None)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user