Compare commits
239 Commits
features/c
...
hs/fix/tar
Author | SHA1 | Date | |
---|---|---|---|
![]() |
8b020067f4 | ||
![]() |
40a1da4a73 | ||
![]() |
82e091e2c2 | ||
![]() |
c86112b0e8 | ||
![]() |
bb25c04845 | ||
![]() |
d69d26d9ce | ||
![]() |
06d660b9ba | ||
![]() |
40b3196412 | ||
![]() |
7e893da4a6 | ||
![]() |
13aa8b6867 | ||
![]() |
b0afb619de | ||
![]() |
7a82c703c7 | ||
![]() |
0d3667175a | ||
![]() |
a754341f6c | ||
![]() |
a50c45f00c | ||
![]() |
87e65e5377 | ||
![]() |
50fe96aaf6 | ||
![]() |
56495a8cd8 | ||
![]() |
c054cb818d | ||
![]() |
bc28ec35d1 | ||
![]() |
e47a6059a7 | ||
![]() |
0d170b9ef3 | ||
![]() |
5174cb9180 | ||
![]() |
22ba366e85 | ||
![]() |
13558269b5 | ||
![]() |
615b7a6ddb | ||
![]() |
0415b21d3d | ||
![]() |
053c9d2846 | ||
![]() |
1e763629f6 | ||
![]() |
7568687f1e | ||
![]() |
3b81c0e6b7 | ||
![]() |
c764400338 | ||
![]() |
4e8a6eec1a | ||
![]() |
ebc9f03dda | ||
![]() |
8ac0bd2825 | ||
![]() |
cc9e0137df | ||
![]() |
b8e448afa0 | ||
![]() |
209d670bf3 | ||
![]() |
c6202842ed | ||
![]() |
b2a75db030 | ||
![]() |
0ec00a9c9a | ||
![]() |
5e3020ad02 | ||
![]() |
a0d0e6321f | ||
![]() |
0afac0beaa | ||
![]() |
6155be8548 | ||
![]() |
611cb98b02 | ||
![]() |
ea5742853f | ||
![]() |
25a3e8ba59 | ||
![]() |
7fbb3df6b0 | ||
![]() |
a728db95de | ||
![]() |
7bc4069b9e | ||
![]() |
51fc195d14 | ||
![]() |
27a0593104 | ||
![]() |
f95e27a159 | ||
![]() |
effe433c96 | ||
![]() |
21988fbb18 | ||
![]() |
2db654bf5a | ||
![]() |
9992b563db | ||
![]() |
daba1a805e | ||
![]() |
832bf95aa4 | ||
![]() |
81e6dcd95c | ||
![]() |
518572e710 | ||
![]() |
6f4ac31a67 | ||
![]() |
e291daaa17 | ||
![]() |
58f1e791a0 | ||
![]() |
aba0a740c2 | ||
![]() |
0fe8e763c3 | ||
![]() |
0e2d261b7e | ||
![]() |
85cb234861 | ||
![]() |
87a83db623 | ||
![]() |
e1e17786c5 | ||
![]() |
68af5cc4c0 | ||
![]() |
70df460fa7 | ||
![]() |
31a1b2fd6c | ||
![]() |
f8fd51e12f | ||
![]() |
12784594aa | ||
![]() |
e0eb0aba37 | ||
![]() |
f47bf5f6b8 | ||
![]() |
9296527775 | ||
![]() |
08c53fa405 | ||
![]() |
0c6f0c090d | ||
![]() |
c623448f81 | ||
![]() |
df71341972 | ||
![]() |
75862c456d | ||
![]() |
e680a0c153 | ||
![]() |
9ad36080ca | ||
![]() |
ecd14f0ad9 | ||
![]() |
c44edf1e8d | ||
![]() |
1eacdca5aa | ||
![]() |
4a8f5efb38 | ||
![]() |
2e753571bd | ||
![]() |
da16336550 | ||
![]() |
1818e70e74 | ||
![]() |
1dde785e9a | ||
![]() |
a7af32c23b | ||
![]() |
6c92ad439b | ||
![]() |
93f555eb14 | ||
![]() |
fa3725e9de | ||
![]() |
870dd6206f | ||
![]() |
b1d411ab06 | ||
![]() |
783eccfbd5 | ||
![]() |
a842332b1b | ||
![]() |
7e41288ca6 | ||
![]() |
3bb375a47f | ||
![]() |
478855728f | ||
![]() |
5e3baeabfa | ||
![]() |
58b9b54066 | ||
![]() |
3918deab74 | ||
![]() |
ceb2ce352f | ||
![]() |
7dc6bff7b1 | ||
![]() |
05fbbd7164 | ||
![]() |
58421866c2 | ||
![]() |
962498095d | ||
![]() |
d0217cf04e | ||
![]() |
65745fa0df | ||
![]() |
9f7cff1780 | ||
![]() |
bb43fa5444 | ||
![]() |
847f560a6e | ||
![]() |
623ff835fc | ||
![]() |
ca19790ff2 | ||
![]() |
f23366e4f8 | ||
![]() |
42fb689501 | ||
![]() |
c33bbdb77d | ||
![]() |
1af6aa22c1 | ||
![]() |
03d9373e5c | ||
![]() |
f469a3d6ab | ||
![]() |
25f24d947a | ||
![]() |
af25a84a56 | ||
![]() |
59a71959e7 | ||
![]() |
00e804a94b | ||
![]() |
db997229f2 | ||
![]() |
6fac041d40 | ||
![]() |
f8b2c65ddf | ||
![]() |
c504304d39 | ||
![]() |
976f1c2198 | ||
![]() |
e7c591a8b8 | ||
![]() |
f3522cba74 | ||
![]() |
0bd9c235a0 | ||
![]() |
335fca7049 | ||
![]() |
ce5ef14fdb | ||
![]() |
2447d16e55 | ||
![]() |
8196c68ff3 | ||
![]() |
308f74fe8b | ||
![]() |
864f09fef0 | ||
![]() |
29b53581e2 | ||
![]() |
e441e780b9 | ||
![]() |
4c642df5ae | ||
![]() |
217774c972 | ||
![]() |
8ec89fc54c | ||
![]() |
66ce93a2e3 | ||
![]() |
116ffe5809 | ||
![]() |
6b0ea2db1d | ||
![]() |
d72b371c8a | ||
![]() |
aa88ced154 | ||
![]() |
d89ae7bcde | ||
![]() |
53d1665a8b | ||
![]() |
9fa1654102 | ||
![]() |
2c692a5755 | ||
![]() |
c0df012b18 | ||
![]() |
66e8523e14 | ||
![]() |
3932299768 | ||
![]() |
3eba6b8379 | ||
![]() |
369928200a | ||
![]() |
81ed0f8d87 | ||
![]() |
194b6311e9 | ||
![]() |
8420898f79 | ||
![]() |
f556ba46d9 | ||
![]() |
ddaa9d5d81 | ||
![]() |
b878fe5555 | ||
![]() |
b600bfc779 | ||
![]() |
612c289c41 | ||
![]() |
e42c76cccf | ||
![]() |
25013bacf2 | ||
![]() |
3d554db198 | ||
![]() |
b6def50dcb | ||
![]() |
bf591c96bd | ||
![]() |
edf1d2ec40 | ||
![]() |
07f607ec9f | ||
![]() |
93747c5e24 | ||
![]() |
b746d4596a | ||
![]() |
8814705936 | ||
![]() |
c989541ebc | ||
![]() |
1759ce05dd | ||
![]() |
c0c1a4aea1 | ||
![]() |
53353ae64e | ||
![]() |
62f7a4c9b1 | ||
![]() |
39679d0882 | ||
![]() |
50e6bf9979 | ||
![]() |
b874c31cc8 | ||
![]() |
04baad90f5 | ||
![]() |
1022527923 | ||
![]() |
7ef19ec1d8 | ||
![]() |
6e45b51f27 | ||
![]() |
5f9cd0991b | ||
![]() |
98c44fc351 | ||
![]() |
b99f850c8e | ||
![]() |
cbbd68d16b | ||
![]() |
e4fbf99497 | ||
![]() |
6a225d5405 | ||
![]() |
af9fd82476 | ||
![]() |
29c1152484 | ||
![]() |
d6a8af6a1d | ||
![]() |
3c3dad0a7a | ||
![]() |
109efdff88 | ||
![]() |
fa318e2c92 | ||
![]() |
064e70990d | ||
![]() |
c40139b7d6 | ||
![]() |
c302e1a768 | ||
![]() |
7171015f1c | ||
![]() |
8ab6f33eb6 | ||
![]() |
a66ab9cc6c | ||
![]() |
70534ac9d4 | ||
![]() |
b369d8b250 | ||
![]() |
4d2319a785 | ||
![]() |
d6a9511f39 | ||
![]() |
dd69b646ad | ||
![]() |
b670205e54 | ||
![]() |
d6d8800466 | ||
![]() |
7a32954f7f | ||
![]() |
92564ecd42 | ||
![]() |
c1258a1431 | ||
![]() |
d46ac9b1e4 | ||
![]() |
2e472a13e5 | ||
![]() |
7edb525599 | ||
![]() |
93cd216603 | ||
![]() |
c1d385ada2 | ||
![]() |
464390962f | ||
![]() |
16734cd8c6 | ||
![]() |
1dd9eeb0c6 | ||
![]() |
f4ef0aec28 | ||
![]() |
ea2c70a21a | ||
![]() |
72ddc03da9 | ||
![]() |
32de71b0b6 | ||
![]() |
e94d5b935f | ||
![]() |
85649be232 | ||
![]() |
c23d2cdb2b | ||
![]() |
dc5dd896a2 | ||
![]() |
43f23589ef | ||
![]() |
5085f635dd |
@@ -2,6 +2,6 @@ black==24.10.0
|
|||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.11.2
|
||||||
types-six==1.17.0.20241205
|
types-six==1.17.0.20241205
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
|||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.13'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
run: |
|
run: |
|
||||||
@@ -39,7 +39,7 @@ jobs:
|
|||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.13'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
@@ -58,7 +58,7 @@ jobs:
|
|||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.11'
|
python_version: '3.13'
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
bootstrap-dev-rhel8:
|
bootstrap-dev-rhel8:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@@ -25,7 +25,6 @@ exit 1
|
|||||||
# The code above runs this file with our preferred python interpreter.
|
# The code above runs this file with our preferred python interpreter.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
min_python3 = (3, 6)
|
min_python3 = (3, 6)
|
||||||
|
@@ -36,7 +36,7 @@ packages:
|
|||||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||||
iconv: [libiconv]
|
iconv: [libiconv]
|
||||||
ipp: [intel-oneapi-ipp]
|
ipp: [intel-oneapi-ipp]
|
||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk]
|
||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
@@ -73,15 +73,27 @@ packages:
|
|||||||
permissions:
|
permissions:
|
||||||
read: world
|
read: world
|
||||||
write: user
|
write: user
|
||||||
|
cray-fftw:
|
||||||
|
buildable: false
|
||||||
|
cray-libsci:
|
||||||
|
buildable: false
|
||||||
cray-mpich:
|
cray-mpich:
|
||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
cray-pmi:
|
||||||
|
buildable: false
|
||||||
egl:
|
egl:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
essl:
|
||||||
|
buildable: false
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
fujitsu-ssl2:
|
||||||
|
buildable: false
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
mpt:
|
||||||
|
buildable: false
|
||||||
spectrum-mpi:
|
spectrum-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
|||||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||||
|
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||||
|
|
||||||
This command needs to be run on a machine with internet access and the resulting folder
|
This command needs to be run on a machine with internet access and the resulting folder
|
||||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ["configure", "build", "install"]
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
Similarly, ``cmake`` defines:
|
Similarly, ``cmake`` defines:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
phases = ["bootstrap", "build", "install"]
|
phases = ("bootstrap", "build", "install")
|
||||||
|
|
||||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||||
...
|
...
|
||||||
|
|
||||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||||
Out[1]: ...
|
Out[1]: ...
|
||||||
|
|
||||||
or a file:
|
or a file:
|
||||||
|
@@ -456,14 +456,13 @@ For instance, the following config options,
|
|||||||
tcl:
|
tcl:
|
||||||
all:
|
all:
|
||||||
suffixes:
|
suffixes:
|
||||||
^python@3: 'python{^python.version}'
|
^python@3: 'python{^python.version.up_to_2}'
|
||||||
^openblas: 'openblas'
|
^openblas: 'openblas'
|
||||||
|
|
||||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||||
Python matching the spec, ``python@3``. This is useful to know which
|
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||||
version of Python a set of Python extensions is associated with. Likewise, the
|
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||||
``openblas`` string is attached to any program that has openblas in the spec,
|
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||||
most likely via the ``+blas`` variant specification.
|
|
||||||
|
|
||||||
The most heavyweight solution to module naming is to change the entire
|
The most heavyweight solution to module naming is to change the entire
|
||||||
naming convention for module files. This uses the projections format
|
naming convention for module files. This uses the projections format
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""URL primitives that just require Python standard library."""
|
"""URL primitives that just require Python standard library."""
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Optional, Set, Tuple
|
from typing import Optional, Set, Tuple
|
||||||
from urllib.parse import urlsplit, urlunsplit
|
from urllib.parse import urlsplit, urlunsplit
|
||||||
|
@@ -75,7 +75,6 @@
|
|||||||
"install_tree",
|
"install_tree",
|
||||||
"is_exe",
|
"is_exe",
|
||||||
"join_path",
|
"join_path",
|
||||||
"last_modification_time_recursive",
|
|
||||||
"library_extensions",
|
"library_extensions",
|
||||||
"mkdirp",
|
"mkdirp",
|
||||||
"partition_path",
|
"partition_path",
|
||||||
@@ -1470,15 +1469,36 @@ def set_executable(path):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def last_modification_time_recursive(path):
|
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||||
path = os.path.abspath(path)
|
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||||
times = [os.stat(path).st_mtime]
|
# use bfs order to increase likelihood of early return
|
||||||
times.extend(
|
queue: Deque[str] = collections.deque([path])
|
||||||
os.lstat(os.path.join(root, name)).st_mtime
|
|
||||||
for root, dirs, files in os.walk(path)
|
if os.stat(path).st_mtime > time:
|
||||||
for name in dirs + files
|
return True
|
||||||
)
|
|
||||||
return max(times)
|
while queue:
|
||||||
|
current = queue.popleft()
|
||||||
|
|
||||||
|
try:
|
||||||
|
entries = os.scandir(current)
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with entries:
|
||||||
|
for entry in entries:
|
||||||
|
try:
|
||||||
|
st = entry.stat(follow_symlinks=False)
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if st.st_mtime > time:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if entry.is_dir(follow_symlinks=False):
|
||||||
|
queue.append(entry.path)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -1740,8 +1760,7 @@ def find(
|
|||||||
|
|
||||||
|
|
||||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
tty.debug(f"find must skip {path}: {e}")
|
||||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||||
|
@@ -1356,14 +1356,8 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
|||||||
|
|
||||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
result = []
|
result = []
|
||||||
# Check items are of the same type
|
|
||||||
if not isinstance(_detected, type(_expected)):
|
|
||||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
|
||||||
_details = [f"{_detected} was detected instead"]
|
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
|
||||||
|
|
||||||
# If they are string expected is a regex
|
# If they are string expected is a regex
|
||||||
if isinstance(_expected, str):
|
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||||
try:
|
try:
|
||||||
_regex = re.compile(_expected)
|
_regex = re.compile(_expected)
|
||||||
except re.error:
|
except re.error:
|
||||||
@@ -1379,7 +1373,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
_details = [f"{_detected} does not match the regex"]
|
_details = [f"{_detected} does not match the regex"]
|
||||||
return [error_cls(summary=_summary, details=_details)]
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
if isinstance(_expected, dict):
|
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||||
if _not_detected:
|
if _not_detected:
|
||||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||||
@@ -1394,6 +1388,10 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
|||||||
result.extend(
|
result.extend(
|
||||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||||
|
_details = [f"{_detected} was detected instead"]
|
||||||
|
return [error_cls(summary=_summary, details=_details)]
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@@ -5,6 +5,7 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
|
import contextlib
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
@@ -23,7 +24,7 @@
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -91,6 +92,9 @@
|
|||||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||||
|
|
||||||
|
|
||||||
|
INDEX_HASH_FILE = "index.json.hash"
|
||||||
|
|
||||||
|
|
||||||
class BuildCacheDatabase(spack_db.Database):
|
class BuildCacheDatabase(spack_db.Database):
|
||||||
"""A database for binary buildcaches.
|
"""A database for binary buildcaches.
|
||||||
|
|
||||||
@@ -502,7 +506,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
|||||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||||
|
|
||||||
if scheme != "oci" and not web_util.url_exists(
|
if scheme != "oci" and not web_util.url_exists(
|
||||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -591,32 +595,18 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
|||||||
f.seek(0)
|
f.seek(0)
|
||||||
|
|
||||||
|
|
||||||
def deps_to_relocate(spec):
|
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||||
"""Return the transitive link and direct run dependencies of the spec.
|
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
||||||
|
We currently include non-external transitive link and direct run dependencies."""
|
||||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
specs = [
|
||||||
|
|
||||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
|
||||||
we need to rewrite those locations when dependencies are in a different place at install time
|
|
||||||
than they were at build time.
|
|
||||||
|
|
||||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
|
||||||
|
|
||||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
|
||||||
dependency libraries.
|
|
||||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
|
||||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
|
||||||
|
|
||||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
|
||||||
"""
|
|
||||||
deps = [
|
|
||||||
s
|
s
|
||||||
for s in itertools.chain(
|
for s in itertools.chain(
|
||||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
||||||
|
spec.dependencies(deptype="run"),
|
||||||
)
|
)
|
||||||
if not s.external
|
if not s.external
|
||||||
]
|
]
|
||||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec):
|
||||||
@@ -630,7 +620,7 @@ def get_buildinfo_dict(spec):
|
|||||||
# "relocate_binaries": [],
|
# "relocate_binaries": [],
|
||||||
# "relocate_links": [],
|
# "relocate_links": [],
|
||||||
"hardlinks_deduped": True,
|
"hardlinks_deduped": True,
|
||||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -683,19 +673,24 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def _read_specs_and_push_index(
|
def _read_specs_and_push_index(
|
||||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
file_list: List[str],
|
||||||
|
read_method: Callable,
|
||||||
|
cache_prefix: str,
|
||||||
|
db: BuildCacheDatabase,
|
||||||
|
temp_dir: str,
|
||||||
|
concurrency: int,
|
||||||
):
|
):
|
||||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||||
generate the index, and push it to the mirror.
|
generate the index, and push it to the mirror.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
file_list: List of urls or file paths pointing at spec files to read
|
||||||
read_method: A function taking a single argument, either a url or a file path,
|
read_method: A function taking a single argument, either a url or a file path,
|
||||||
and which reads the spec file at that location, and returns the spec.
|
and which reads the spec file at that location, and returns the spec.
|
||||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||||
db: A spack database used for adding specs and then writing the index.
|
db: A spack database used for adding specs and then writing the index.
|
||||||
temp_dir (str): Location to write index.json and hash for pushing
|
temp_dir: Location to write index.json and hash for pushing
|
||||||
concurrency (int): Number of parallel processes to use when fetching
|
concurrency: Number of parallel processes to use when fetching
|
||||||
"""
|
"""
|
||||||
for file in file_list:
|
for file in file_list:
|
||||||
contents = read_method(file)
|
contents = read_method(file)
|
||||||
@@ -713,7 +708,7 @@ def _read_specs_and_push_index(
|
|||||||
|
|
||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
# the mirror.
|
# the mirror.
|
||||||
index_json_path = os.path.join(temp_dir, "index.json")
|
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -723,14 +718,14 @@ def _read_specs_and_push_index(
|
|||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
|
||||||
# Write the hash out to a local file
|
# Write the hash out to a local file
|
||||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||||
f.write(index_hash)
|
f.write(index_hash)
|
||||||
|
|
||||||
# Push the index itself
|
# Push the index itself
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_json_path,
|
index_json_path,
|
||||||
url_util.join(cache_prefix, "index.json"),
|
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -738,7 +733,7 @@ def _read_specs_and_push_index(
|
|||||||
# Push the hash
|
# Push the hash
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
index_hash_path,
|
index_hash_path,
|
||||||
url_util.join(cache_prefix, "index.json.hash"),
|
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||||
keep_original=False,
|
keep_original=False,
|
||||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||||
)
|
)
|
||||||
@@ -807,7 +802,7 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except web_util.SpackWebError as e:
|
except (web_util.SpackWebError, OSError) as e:
|
||||||
tty.error(f"Error reading specfile: {url}: {e}")
|
tty.error(f"Error reading specfile: {url}: {e}")
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
@@ -875,9 +870,12 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
|||||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||||
|
|
||||||
db = BuildCacheDatabase(tmpdir)
|
db = BuildCacheDatabase(tmpdir)
|
||||||
|
db._write()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
_read_specs_and_push_index(
|
||||||
|
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||||
|
|
||||||
@@ -1112,7 +1110,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
|||||||
|
|
||||||
|
|
||||||
def prefixes_to_relocate(spec):
|
def prefixes_to_relocate(spec):
|
||||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
||||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||||
prefixes.append(str(spack.store.STORE.layout.root))
|
prefixes.append(str(spack.store.STORE.layout.root))
|
||||||
return prefixes
|
return prefixes
|
||||||
@@ -1791,7 +1789,7 @@ def _oci_update_index(
|
|||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
index_json_path = os.path.join(tmpdir, "index.json")
|
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
@@ -2012,7 +2010,7 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
# Download the config = spec.json and the relevant tarball
|
# Download the config = spec.json and the relevant tarball
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||||
tarball_digest = spack.oci.image.Digest.from_string(
|
tarball_digest = spack.oci.image.Digest.from_string(
|
||||||
manifest["layers"][-1]["digest"]
|
manifest["layers"][-1]["digest"]
|
||||||
@@ -2139,10 +2137,9 @@ def fetch_url_to_mirror(url):
|
|||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
"""Updates a buildinfo dict for old archives that did
|
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||||
when relocating files in parallel and in-place. This
|
when relocating."""
|
||||||
means we must preserve inodes when relocating."""
|
|
||||||
|
|
||||||
# New archives don't need this.
|
# New archives don't need this.
|
||||||
if buildinfo.get("hardlinks_deduped", False):
|
if buildinfo.get("hardlinks_deduped", False):
|
||||||
@@ -2171,65 +2168,48 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
|||||||
buildinfo[key] = new_list
|
buildinfo[key] = new_list
|
||||||
|
|
||||||
|
|
||||||
def relocate_package(spec):
|
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||||
"""
|
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||||
Relocate the given package
|
spec_prefix = str(spec.prefix)
|
||||||
"""
|
buildinfo = read_buildinfo_file(spec_prefix)
|
||||||
workdir = str(spec.prefix)
|
|
||||||
buildinfo = read_buildinfo_file(workdir)
|
|
||||||
new_layout_root = str(spack.store.STORE.layout.root)
|
|
||||||
new_prefix = str(spec.prefix)
|
|
||||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
|
||||||
new_spack_prefix = str(spack.paths.prefix)
|
|
||||||
|
|
||||||
old_sbang_install_path = None
|
|
||||||
if "sbang_install_path" in buildinfo:
|
|
||||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
|
||||||
old_layout_root = str(buildinfo["buildpath"])
|
old_layout_root = str(buildinfo["buildpath"])
|
||||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
|
||||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
|
||||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
|
||||||
rel = buildinfo.get("relative_rpaths", False)
|
|
||||||
|
|
||||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||||
# were not unique.
|
if buildinfo.get("relative_rpaths", False):
|
||||||
|
tty.warn(
|
||||||
|
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||||
|
)
|
||||||
|
|
||||||
|
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||||
|
# prefixes were not unique.
|
||||||
if "hash_to_prefix" in buildinfo:
|
if "hash_to_prefix" in buildinfo:
|
||||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||||
elif "prefix_to_hash" in buildinfo:
|
elif "prefix_to_hash" in buildinfo:
|
||||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||||
else:
|
else:
|
||||||
hash_to_old_prefix = dict()
|
raise NewLayoutException(
|
||||||
|
"Package tarball was created from an install prefix with a different directory layout "
|
||||||
|
"and an older buildcache create implementation. It cannot be relocated."
|
||||||
|
)
|
||||||
|
|
||||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
prefix_to_prefix: Dict[str, str] = {}
|
||||||
msg = "Package tarball was created from an install "
|
|
||||||
msg += "prefix with a different directory layout and an older "
|
|
||||||
msg += "buildcache create implementation. It cannot be relocated."
|
|
||||||
raise NewLayoutException(msg)
|
|
||||||
|
|
||||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
if "sbang_install_path" in buildinfo:
|
||||||
# For example, the new sbang can be longer than the old one.
|
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||||
# Hence 2 dictionaries are maintained here.
|
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||||
prefix_to_prefix_text = collections.OrderedDict()
|
|
||||||
prefix_to_prefix_bin = collections.OrderedDict()
|
|
||||||
|
|
||||||
if old_sbang_install_path:
|
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||||
install_path = spack.hooks.sbang.sbang_install_path()
|
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
# the new spack store root.
|
||||||
|
|
||||||
# First match specific prefix paths. Possibly the *local* install prefix
|
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||||
# of some dependency is in an upstream, so we cannot assume the original
|
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||||
# spack store root can be mapped uniformly to the new spack store root.
|
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||||
#
|
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||||
# from the old install_tree to the new install_tree and from the build_spec
|
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||||
# to the spliced spec.
|
# that s replaced when we spliced.
|
||||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
relocation_specs = specs_to_relocate(spec)
|
||||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
|
||||||
# without any explicit check for whether the spec is spliced.
|
|
||||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
|
||||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
|
||||||
# is the spec that s replaced when we spliced.
|
|
||||||
relocation_specs = deps_to_relocate(spec)
|
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||||
for s in relocation_specs:
|
for s in relocation_specs:
|
||||||
analog = s
|
analog = s
|
||||||
@@ -2248,98 +2228,66 @@ def relocate_package(spec):
|
|||||||
lookup_dag_hash = analog.dag_hash()
|
lookup_dag_hash = analog.dag_hash()
|
||||||
if lookup_dag_hash in hash_to_old_prefix:
|
if lookup_dag_hash in hash_to_old_prefix:
|
||||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
|
||||||
|
|
||||||
# Only then add the generic fallback of install prefix -> install prefix.
|
# Only then add the generic fallback of install prefix -> install prefix.
|
||||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
|
||||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
|
||||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
|
||||||
|
|
||||||
# This is vestigial code for the *old* location of sbang. Previously,
|
# Delete identity mappings from prefix_to_prefix
|
||||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||||
# now a POSIX script that lives in the install prefix. Old packages
|
|
||||||
# will have the old sbang location in their shebangs.
|
|
||||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
|
||||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
|
||||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
|
||||||
|
|
||||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
# If there's nothing to relocate, we're done.
|
||||||
|
if not prefix_to_prefix:
|
||||||
|
return
|
||||||
|
|
||||||
# Old archives maybe have hardlinks repeated.
|
for old, new in prefix_to_prefix.items():
|
||||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
tty.debug(f"Relocating: {old} => {new}.")
|
||||||
|
|
||||||
def is_backup_file(file):
|
# Old archives may have hardlinks repeated.
|
||||||
return file.endswith("~")
|
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||||
|
|
||||||
# Text files containing the prefix text
|
# Text files containing the prefix text
|
||||||
text_names = list()
|
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||||
for filename in buildinfo["relocate_textfiles"]:
|
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||||
text_name = os.path.join(workdir, filename)
|
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||||
# Don't add backup files generated by filter_file during install step.
|
|
||||||
if not is_backup_file(text_name):
|
|
||||||
text_names.append(text_name)
|
|
||||||
|
|
||||||
# If we are not installing back to the same install tree do the relocation
|
platform = spack.platforms.by_name(spec.platform)
|
||||||
if old_prefix != new_prefix:
|
if "macho" in platform.binary_formats:
|
||||||
files_to_relocate = [
|
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
elif "elf" in platform.binary_formats:
|
||||||
]
|
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||||
# If the buildcache was not created with relativized rpaths
|
|
||||||
# do the relocation of path in binaries
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
|
||||||
if "macho" in platform.binary_formats:
|
|
||||||
relocate.relocate_macho_binaries(
|
|
||||||
files_to_relocate,
|
|
||||||
old_layout_root,
|
|
||||||
new_layout_root,
|
|
||||||
prefix_to_prefix_bin,
|
|
||||||
rel,
|
|
||||||
old_prefix,
|
|
||||||
new_prefix,
|
|
||||||
)
|
|
||||||
elif "elf" in platform.binary_formats and not rel:
|
|
||||||
# The new ELF dynamic section relocation logic only handles absolute to
|
|
||||||
# absolute relocation.
|
|
||||||
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
|
||||||
elif "elf" in platform.binary_formats and rel:
|
|
||||||
relocate.relocate_elf_binaries(
|
|
||||||
files_to_relocate,
|
|
||||||
old_layout_root,
|
|
||||||
new_layout_root,
|
|
||||||
prefix_to_prefix_bin,
|
|
||||||
rel,
|
|
||||||
old_prefix,
|
|
||||||
new_prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relocate links to the new install prefix
|
relocate.relocate_links(links, prefix_to_prefix)
|
||||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||||
|
|
||||||
# For all buildcaches
|
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||||
# relocate the install prefixes in text files including dependencies
|
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
codesign = which("codesign")
|
||||||
|
if not codesign:
|
||||||
|
return
|
||||||
|
for binary in changed_files:
|
||||||
|
# preserve the original inode by running codesign on a copy
|
||||||
|
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||||
|
codesign("-fs-", tmp_binary)
|
||||||
|
|
||||||
# relocate the install prefixes in binary files including dependencies
|
install_manifest = os.path.join(
|
||||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
spec.prefix,
|
||||||
|
spack.store.STORE.layout.metadata_dir,
|
||||||
|
spack.store.STORE.layout.manifest_file_name,
|
||||||
|
)
|
||||||
|
if not os.path.exists(install_manifest):
|
||||||
|
spec_id = spec.format("{name}/{hash:7}")
|
||||||
|
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||||
|
|
||||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
# overwrite old metadata with new
|
||||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
if spec.spliced:
|
||||||
codesign = which("codesign")
|
# rewrite spec on disk
|
||||||
if not codesign:
|
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||||
return
|
|
||||||
for binary in changed_files:
|
|
||||||
# preserve the original inode by running codesign on a copy
|
|
||||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
|
||||||
codesign("-fs-", tmp_binary)
|
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# de-cache the install manifest
|
||||||
# relocate the sbang location if the spack directory changed
|
with contextlib.suppress(FileNotFoundError):
|
||||||
else:
|
os.unlink(install_manifest)
|
||||||
if old_spack_prefix != new_spack_prefix:
|
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||||
@@ -2507,15 +2455,6 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||||
raise e
|
raise e
|
||||||
else:
|
|
||||||
manifest_file = os.path.join(
|
|
||||||
spec.prefix,
|
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
if not os.path.exists(manifest_file):
|
|
||||||
spec_id = spec.format("{name}/{hash:7}")
|
|
||||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
|
||||||
finally:
|
finally:
|
||||||
if tmpdir:
|
if tmpdir:
|
||||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||||
@@ -2620,10 +2559,6 @@ def install_root_node(
|
|||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
spec.package.windows_establish_runtime_linkage()
|
||||||
if spec.spliced: # overwrite old metadata with new
|
|
||||||
spack.store.STORE.layout.write_spec(
|
|
||||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
|
||||||
)
|
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||||
|
|
||||||
@@ -2661,11 +2596,14 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except web_util.SpackWebError as e1:
|
except (web_util.SpackWebError, OSError) as e1:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except web_util.SpackWebError as e2:
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
specfile_is_signed = False
|
||||||
|
except (web_util.SpackWebError, OSError) as e2:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||||
e1,
|
e1,
|
||||||
@@ -2675,7 +2613,6 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
|
||||||
|
|
||||||
# read the spec from the build cache file. All specs in build caches
|
# read the spec from the build cache file. All specs in build caches
|
||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
@@ -2769,8 +2706,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(json_file)
|
||||||
except web_util.SpackWebError as url_err:
|
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||||
|
# TODO: avoid repeated request
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
tty.error(
|
tty.error(
|
||||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||||
@@ -3017,14 +2955,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def get_remote_hash(self):
|
def get_remote_hash(self):
|
||||||
# Failure to fetch index.json.hash is not fatal
|
# Failure to fetch index.json.hash is not fatal
|
||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError):
|
remote_hash = response.read(64)
|
||||||
|
except OSError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
remote_hash = response.read(64)
|
|
||||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||||
return None
|
return None
|
||||||
return remote_hash.decode("utf-8")
|
return remote_hash.decode("utf-8")
|
||||||
@@ -3038,17 +2976,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise, download index.json
|
# Otherwise, download index.json
|
||||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||||
|
|
||||||
computed_hash = compute_hash(result)
|
computed_hash = compute_hash(result)
|
||||||
|
|
||||||
@@ -3082,7 +3020,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
|||||||
|
|
||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
# Just do a conditional fetch immediately
|
# Just do a conditional fetch immediately
|
||||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -3092,12 +3030,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e: # URLError, socket.timeout, etc.
|
||||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except (ValueError, OSError) as e:
|
||||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
@@ -3129,11 +3067,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
except OSError as e:
|
||||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.load(response)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
@@ -3148,14 +3086,16 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
|
|
||||||
# Otherwise fetch the blob / index.json
|
# Otherwise fetch the blob / index.json
|
||||||
response = self.urlopen(
|
try:
|
||||||
urllib.request.Request(
|
response = self.urlopen(
|
||||||
url=self.ref.blob_url(index_digest),
|
urllib.request.Request(
|
||||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
url=self.ref.blob_url(index_digest),
|
||||||
|
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
|
except (OSError, ValueError) as e:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
# Make sure the blob we download has the advertised hash
|
# Make sure the blob we download has the advertised hash
|
||||||
if compute_hash(result) != index_digest.digest:
|
if compute_hash(result) != index_digest.digest:
|
||||||
|
@@ -5,12 +5,14 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, Optional, Sequence, Union
|
from typing import Optional, Sequence, Union
|
||||||
|
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -18,13 +20,17 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
from .config import spec_for_current_python
|
from .config import spec_for_current_python
|
||||||
|
|
||||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
|
||||||
|
class QueryInfo(TypedDict, total=False):
|
||||||
|
spec: spack.spec.Spec
|
||||||
|
command: spack.util.executable.Executable
|
||||||
|
|
||||||
|
|
||||||
def _python_import(module: str) -> bool:
|
def _python_import(module: str) -> bool:
|
||||||
@@ -211,7 +217,9 @@ def _executables_in_store(
|
|||||||
):
|
):
|
||||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||||
if query_info is not None:
|
if query_info is not None:
|
||||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
query_info["command"] = spack.util.executable.which(
|
||||||
|
*executables, path=bin_dir, required=True
|
||||||
|
)
|
||||||
query_info["spec"] = concrete_spec
|
query_info["spec"] = concrete_spec
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
"""Manage configuration swapping for bootstrapping purposes"""
|
"""Manage configuration swapping for bootstrapping purposes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import os.path
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||||
|
|
||||||
|
@@ -25,7 +25,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||||
@@ -34,8 +33,10 @@
|
|||||||
from llnl.util.lang import GroupedExceptionHandler
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
|
import spack.error
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -44,10 +45,17 @@
|
|||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
|
|
||||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
from ._common import (
|
||||||
|
QueryInfo,
|
||||||
|
_executables_in_store,
|
||||||
|
_python_import,
|
||||||
|
_root_spec,
|
||||||
|
_try_import_from_store,
|
||||||
|
)
|
||||||
from .clingo import ClingoBootstrapConcretizer
|
from .clingo import ClingoBootstrapConcretizer
|
||||||
from .config import spack_python_interpreter, spec_for_current_python
|
from .config import spack_python_interpreter, spec_for_current_python
|
||||||
|
|
||||||
@@ -89,8 +97,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.name = conf["name"]
|
self.name = conf["name"]
|
||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Check for relative paths, and turn them into absolute paths
|
||||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
# root is the metadata_dir
|
||||||
|
maybe_url = conf["info"]["url"]
|
||||||
|
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||||
|
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||||
|
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -134,7 +146,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[ConfigDictionary] = None
|
self.last_search: Optional[QueryInfo] = None
|
||||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -211,14 +223,14 @@ def _install_and_test(
|
|||||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||||
|
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary
|
info: QueryInfo
|
||||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
return True
|
return True
|
||||||
@@ -231,7 +243,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary
|
info: QueryInfo
|
||||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -249,11 +261,11 @@ class SourceBootstrapper(Bootstrapper):
|
|||||||
|
|
||||||
def __init__(self, conf) -> None:
|
def __init__(self, conf) -> None:
|
||||||
super().__init__(conf)
|
super().__init__(conf)
|
||||||
self.last_search: Optional[ConfigDictionary] = None
|
self.last_search: Optional[QueryInfo] = None
|
||||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||||
|
|
||||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -270,10 +282,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||||
concrete_spec = bootstrapper.concretize()
|
concrete_spec = bootstrapper.concretize()
|
||||||
else:
|
else:
|
||||||
concrete_spec = spack.spec.Spec(
|
abstract_spec = spack.spec.Spec(
|
||||||
abstract_spec_str + " ^" + spec_for_current_python()
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
)
|
)
|
||||||
concrete_spec.concretize()
|
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||||
|
|
||||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
@@ -288,7 +300,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||||
info: ConfigDictionary = {}
|
info: QueryInfo = {}
|
||||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -299,7 +311,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
|
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
@@ -316,11 +328,9 @@ def create_bootstrapper(conf: ConfigDictionary):
|
|||||||
return _bootstrap_methods[btype](conf)
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
"""Returns true if the source is not enabled for bootstrapping"""
|
||||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||||
if not trusted.get(name, False):
|
|
||||||
raise ValueError("source is not trusted")
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||||
@@ -350,24 +360,23 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
|
if not source_is_enabled(current_config):
|
||||||
|
continue
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
source_is_enabled_or_raise(current_config)
|
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||||
current_bootstrapper = create_bootstrapper(current_config)
|
|
||||||
if current_bootstrapper.try_import(module, abstract_spec):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
assert exception_handler, (
|
|
||||||
f"expected at least one exception to have been raised at this point: "
|
|
||||||
f"while bootstrapping {module}"
|
|
||||||
)
|
|
||||||
msg = f'cannot bootstrap the "{module}" Python module '
|
msg = f'cannot bootstrap the "{module}" Python module '
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
if tty.is_debug():
|
|
||||||
|
if not exception_handler:
|
||||||
|
msg += ": no bootstrapping sources are enabled"
|
||||||
|
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||||
raise ImportError(msg)
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -405,8 +414,9 @@ def ensure_executables_in_path_or_raise(
|
|||||||
exception_handler = GroupedExceptionHandler()
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
for current_config in bootstrapping_sources():
|
for current_config in bootstrapping_sources():
|
||||||
|
if not source_is_enabled(current_config):
|
||||||
|
continue
|
||||||
with exception_handler.forward(current_config["name"], Exception):
|
with exception_handler.forward(current_config["name"], Exception):
|
||||||
source_is_enabled_or_raise(current_config)
|
|
||||||
current_bootstrapper = create_bootstrapper(current_config)
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||||
# Additional environment variables needed
|
# Additional environment variables needed
|
||||||
@@ -414,6 +424,7 @@ def ensure_executables_in_path_or_raise(
|
|||||||
current_bootstrapper.last_search["spec"],
|
current_bootstrapper.last_search["spec"],
|
||||||
current_bootstrapper.last_search["command"],
|
current_bootstrapper.last_search["command"],
|
||||||
)
|
)
|
||||||
|
assert cmd is not None, "expected an Executable"
|
||||||
cmd.add_default_envmod(
|
cmd.add_default_envmod(
|
||||||
spack.user_environment.environment_modifications_for_specs(
|
spack.user_environment.environment_modifications_for_specs(
|
||||||
concrete_spec, set_package_py_globals=False
|
concrete_spec, set_package_py_globals=False
|
||||||
@@ -421,18 +432,17 @@ def ensure_executables_in_path_or_raise(
|
|||||||
)
|
)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
assert exception_handler, (
|
|
||||||
f"expected at least one exception to have been raised at this point: "
|
|
||||||
f"while bootstrapping {executables_str}"
|
|
||||||
)
|
|
||||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||||
if abstract_spec:
|
if abstract_spec:
|
||||||
msg += f'from spec "{abstract_spec}" '
|
msg += f'from spec "{abstract_spec}" '
|
||||||
if tty.is_debug():
|
|
||||||
|
if not exception_handler:
|
||||||
|
msg += ": no bootstrapping sources are enabled"
|
||||||
|
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
else:
|
else:
|
||||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -63,7 +63,6 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
|||||||
|
|
||||||
def _core_requirements() -> List[RequiredResponseType]:
|
def _core_requirements() -> List[RequiredResponseType]:
|
||||||
_core_system_exes = {
|
_core_system_exes = {
|
||||||
"make": _missing("make", "required to build software from sources"),
|
|
||||||
"patch": _missing("patch", "required to patch source code before building"),
|
"patch": _missing("patch", "required to patch source code before building"),
|
||||||
"tar": _missing("tar", "required to manage code archives"),
|
"tar": _missing("tar", "required to manage code archives"),
|
||||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||||
|
@@ -44,7 +44,19 @@
|
|||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from multiprocessing.connection import Connection
|
from multiprocessing.connection import Connection
|
||||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
TextIO,
|
||||||
|
Tuple,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -146,48 +158,128 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
|||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
"""Special callable executable object for make so the user can specify
|
"""Special callable executable object for make so the user can specify parallelism options
|
||||||
parallelism options on a per-invocation basis. Specifying
|
on a per-invocation basis.
|
||||||
'parallel' to the call will override whatever the package's
|
|
||||||
global setting is, so you can either default to true or false and
|
|
||||||
override particular calls. Specifying 'jobs_env' to a particular
|
|
||||||
call will name an environment variable which will be set to the
|
|
||||||
parallelism level (without affecting the normal invocation with
|
|
||||||
-j).
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name, jobs, **kwargs):
|
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
super().__init__(name)
|
||||||
super().__init__(name, **kwargs)
|
|
||||||
self.supports_jobserver = supports_jobserver
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
@overload
|
||||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
def __call__(
|
||||||
remaining arguments are passed through to the superclass.
|
self,
|
||||||
"""
|
*args: str,
|
||||||
parallel = kwargs.pop("parallel", True)
|
parallel: bool = ...,
|
||||||
jobs_env = kwargs.pop("jobs_env", None)
|
jobs_env: Optional[str] = ...,
|
||||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Optional[TextIO], str] = ...,
|
||||||
|
error: Union[Optional[TextIO], str] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = ...,
|
||||||
|
jobs_env: Optional[str] = ...,
|
||||||
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Type[str], Callable] = ...,
|
||||||
|
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = ...,
|
||||||
|
jobs_env: Optional[str] = ...,
|
||||||
|
jobs_env_supports_jobserver: bool = ...,
|
||||||
|
fail_on_error: bool = ...,
|
||||||
|
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||||
|
ignore_quotes: Optional[bool] = ...,
|
||||||
|
timeout: Optional[int] = ...,
|
||||||
|
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||||
|
input: Optional[TextIO] = ...,
|
||||||
|
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||||
|
error: Union[Type[str], Callable] = ...,
|
||||||
|
_dump_env: Optional[Dict[str, str]] = ...,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
*args: str,
|
||||||
|
parallel: bool = True,
|
||||||
|
jobs_env: Optional[str] = None,
|
||||||
|
jobs_env_supports_jobserver: bool = False,
|
||||||
|
**kwargs,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Runs this "make" executable in a subprocess.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parallel: if False, parallelism is disabled
|
||||||
|
jobs_env: environment variable that will be set to the current level of parallelism
|
||||||
|
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||||
|
|
||||||
|
For all the other **kwargs, refer to the base class.
|
||||||
|
"""
|
||||||
jobs = get_effective_jobs(
|
jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs is not None:
|
if jobs is not None:
|
||||||
args = ("-j{0}".format(jobs),) + args
|
args = (f"-j{jobs}",) + args
|
||||||
|
|
||||||
if jobs_env:
|
if jobs_env:
|
||||||
# Caller wants us to set an environment variable to
|
# Caller wants us to set an environment variable to control the parallelism
|
||||||
# control the parallelism.
|
|
||||||
jobs_env_jobs = get_effective_jobs(
|
jobs_env_jobs = get_effective_jobs(
|
||||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||||
)
|
)
|
||||||
if jobs_env_jobs is not None:
|
if jobs_env_jobs is not None:
|
||||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
extra_env = kwargs.setdefault("extra_env", {})
|
||||||
|
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||||
|
|
||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class UndeclaredDependencyError(spack.error.SpackError):
|
||||||
|
"""Raised if a dependency is invoking an executable through a module global, without
|
||||||
|
declaring a dependency on it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class DeprecatedExecutable:
|
||||||
|
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||||
|
self.pkg = pkg
|
||||||
|
self.exe = exe
|
||||||
|
self.exe_pkg = exe_pkg
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwargs):
|
||||||
|
raise UndeclaredDependencyError(
|
||||||
|
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_default_env(self, key: str, value: str):
|
||||||
|
self.__call__()
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -209,11 +301,13 @@ def clean_environment():
|
|||||||
env.unset("CPLUS_INCLUDE_PATH")
|
env.unset("CPLUS_INCLUDE_PATH")
|
||||||
env.unset("OBJC_INCLUDE_PATH")
|
env.unset("OBJC_INCLUDE_PATH")
|
||||||
|
|
||||||
|
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||||
|
env.set("CONFIG_SITE", os.devnull)
|
||||||
env.unset("CMAKE_PREFIX_PATH")
|
env.unset("CMAKE_PREFIX_PATH")
|
||||||
|
|
||||||
env.unset("PYTHONPATH")
|
env.unset("PYTHONPATH")
|
||||||
env.unset("R_HOME")
|
env.unset("R_HOME")
|
||||||
env.unset("R_ENVIRON")
|
env.unset("R_ENVIRON")
|
||||||
|
|
||||||
env.unset("LUA_PATH")
|
env.unset("LUA_PATH")
|
||||||
env.unset("LUA_CPATH")
|
env.unset("LUA_CPATH")
|
||||||
|
|
||||||
@@ -621,10 +715,9 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||||
module.make = MakeExecutable("make", jobs)
|
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||||
module.gmake = MakeExecutable("gmake", jobs)
|
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
|
||||||
# TODO: johnwparent: add package or builder support to define these build tools
|
# TODO: johnwparent: add package or builder support to define these build tools
|
||||||
# for now there is no entrypoint for builders to define these on their
|
# for now there is no entrypoint for builders to define these on their
|
||||||
# own
|
# own
|
||||||
|
@@ -6,7 +6,9 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
import spack.util.prefix
|
||||||
|
|
||||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
|
|
||||||
@@ -17,19 +19,18 @@ class AspellBuilder(AutotoolsBuilder):
|
|||||||
to the Aspell extensions.
|
to the Aspell extensions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self,
|
||||||
|
pkg: "AspellDictPackage", # type: ignore[override]
|
||||||
|
spec: spack.spec.Spec,
|
||||||
|
prefix: spack.util.prefix.Prefix,
|
||||||
|
):
|
||||||
aspell = spec["aspell"].prefix.bin.aspell
|
aspell = spec["aspell"].prefix.bin.aspell
|
||||||
prezip = spec["aspell"].prefix.bin.prezip
|
prezip = spec["aspell"].prefix.bin.prezip
|
||||||
destdir = prefix
|
destdir = prefix
|
||||||
|
|
||||||
sh = spack.util.executable.which("sh")
|
sh = spack.util.executable.Executable("/bin/sh")
|
||||||
sh(
|
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||||
"./configure",
|
|
||||||
"--vars",
|
|
||||||
"ASPELL={0}".format(aspell),
|
|
||||||
"PREZIP={0}".format(prezip),
|
|
||||||
"DESTDIR={0}".format(destdir),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Aspell dictionaries install their bits into their prefix.lib
|
# Aspell dictionaries install their bits into their prefix.lib
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||||
@@ -356,6 +355,13 @@ def _do_patch_libtool_configure(self) -> None:
|
|||||||
)
|
)
|
||||||
# Support Libtool 2.4.2 and older:
|
# Support Libtool 2.4.2 and older:
|
||||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||||
|
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||||
|
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||||
|
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||||
|
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||||
|
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||||
|
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||||
|
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("configure")
|
@spack.phase_callbacks.run_after("configure")
|
||||||
def _do_patch_libtool(self) -> None:
|
def _do_patch_libtool(self) -> None:
|
||||||
@@ -527,7 +533,7 @@ def build_directory(self) -> str:
|
|||||||
return build_dir
|
return build_dir
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.phase_callbacks.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self) -> None:
|
def _delete_configure_to_force_update(self) -> None:
|
||||||
if self.force_autoreconf:
|
if self.force_autoreconf:
|
||||||
fs.force_remove(self.configure_abs_path)
|
fs.force_remove(self.configure_abs_path)
|
||||||
|
|
||||||
@@ -540,7 +546,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
|||||||
return _autoreconf_search_path_args(self.spec)
|
return _autoreconf_search_path_args(self.spec)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("autoreconf")
|
@spack.phase_callbacks.run_after("autoreconf")
|
||||||
def set_configure_or_die(self) -> None:
|
def _set_configure_or_die(self) -> None:
|
||||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||||
is found, a module level attribute is set.
|
is found, a module level attribute is set.
|
||||||
|
|
||||||
@@ -564,10 +570,7 @@ def configure_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def autoreconf(
|
def autoreconf(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Not needed usually, configure should be already there"""
|
"""Not needed usually, configure should be already there"""
|
||||||
|
|
||||||
@@ -596,10 +599,7 @@ def autoreconf(
|
|||||||
self.pkg.module.autoreconf(*autoreconf_args)
|
self.pkg.module.autoreconf(*autoreconf_args)
|
||||||
|
|
||||||
def configure(
|
def configure(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "configure", with the arguments specified by the builder and an
|
"""Run "configure", with the arguments specified by the builder and an
|
||||||
appropriately set prefix.
|
appropriately set prefix.
|
||||||
@@ -612,10 +612,7 @@ def configure(
|
|||||||
pkg.module.configure(*options)
|
pkg.module.configure(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
# See https://autotools.io/automake/silent.html
|
# See https://autotools.io/automake/silent.html
|
||||||
@@ -625,10 +622,7 @@ def build(
|
|||||||
pkg.module.make(*params)
|
pkg.module.make(*params)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -825,7 +819,7 @@ def installcheck(self) -> None:
|
|||||||
self.pkg._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
def remove_libtool_archives(self) -> None:
|
def _remove_libtool_archives(self) -> None:
|
||||||
"""Remove all .la files in prefix sub-folders if the package sets
|
"""Remove all .la files in prefix sub-folders if the package sets
|
||||||
``install_libtool_archives`` to be False.
|
``install_libtool_archives`` to be False.
|
||||||
"""
|
"""
|
||||||
|
@@ -10,6 +10,8 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -293,6 +295,13 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||||
|
|
||||||
|
if spec.satisfies("%gcc"):
|
||||||
|
entries.append(
|
||||||
|
cmake_cache_string(
|
||||||
|
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
@@ -323,7 +332,9 @@ def initconfig_package_entries(self):
|
|||||||
"""This method is to be overwritten by the package"""
|
"""This method is to be overwritten by the package"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def initconfig(self, pkg, spec, prefix):
|
def initconfig(
|
||||||
|
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
cache_entries = (
|
cache_entries = (
|
||||||
self.std_initconfig_entries()
|
self.std_initconfig_entries()
|
||||||
+ self.initconfig_compiler_entries()
|
+ self.initconfig_compiler_entries()
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -81,12 +83,16 @@ def check_args(self):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("CARGO_HOME", self.stage.path)
|
env.set("CARGO_HOME", self.stage.path)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Runs ``cargo install`` in the source directory"""
|
"""Runs ``cargo install`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Copy build files into package prefix."""
|
"""Copy build files into package prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree("out", prefix)
|
fs.install_tree("out", prefix)
|
||||||
|
@@ -454,10 +454,7 @@ def cmake_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def cmake(
|
def cmake(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
@@ -474,10 +471,7 @@ def cmake(
|
|||||||
pkg.module.cmake(*options)
|
pkg.module.cmake(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
@@ -488,10 +482,7 @@ def build(
|
|||||||
pkg.module.ninja(*self.build_targets)
|
pkg.module.ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||||
|
|
||||||
@@ -48,3 +50,8 @@ class GenericBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
# unconditionally perform any post-install phase tests
|
# unconditionally perform any post-install phase tests
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
|
def install(
|
||||||
|
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
@@ -7,7 +7,9 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
from spack.directives import build_system, extends
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||||
@@ -26,9 +28,7 @@ class GoPackage(spack.package_base.PackageBase):
|
|||||||
build_system("go")
|
build_system("go")
|
||||||
|
|
||||||
with when("build_system=go"):
|
with when("build_system=go"):
|
||||||
# TODO: this seems like it should be depends_on, see
|
depends_on("go", type="build")
|
||||||
# setup_dependent_build_environment in go for why I kept it like this
|
|
||||||
extends("go@1.14:", type="build")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("go")
|
@spack.builder.builder("go")
|
||||||
@@ -71,6 +71,7 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
env.set("GO111MODULE", "on")
|
env.set("GO111MODULE", "on")
|
||||||
env.set("GOTOOLCHAIN", "local")
|
env.set("GOTOOLCHAIN", "local")
|
||||||
|
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
@@ -81,19 +82,31 @@ def build_directory(self):
|
|||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``go build``."""
|
"""Arguments for ``go build``."""
|
||||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
return [
|
||||||
|
"-p",
|
||||||
|
str(self.pkg.module.make_jobs),
|
||||||
|
"-modcacherw",
|
||||||
|
"-ldflags",
|
||||||
|
"-s -w",
|
||||||
|
"-o",
|
||||||
|
f"{self.pkg.name}",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
"""Argument for ``go test`` during check phase"""
|
"""Argument for ``go test`` during check phase"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Runs ``go build`` in the source directory"""
|
"""Runs ``go build`` in the source directory"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.go("build", *self.build_args)
|
pkg.module.go("build", *self.build_args)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install built binaries into prefix bin."""
|
"""Install built binaries into prefix bin."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.mkdirp(prefix.bin)
|
fs.mkdirp(prefix.bin)
|
||||||
|
@@ -7,7 +7,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -55,7 +57,9 @@ class LuaBuilder(spack.builder.Builder):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def unpack(self, pkg, spec, prefix):
|
def unpack(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||||
dirlines = directory.split("\n")
|
dirlines = directory.split("\n")
|
||||||
@@ -66,15 +70,16 @@ def unpack(self, pkg, spec, prefix):
|
|||||||
def _generate_tree_line(name, prefix):
|
def _generate_tree_line(name, prefix):
|
||||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||||
|
|
||||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
def generate_luarocks_config(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
table_entries = []
|
table_entries = []
|
||||||
for d in spec.traverse(deptype=("build", "run")):
|
for d in spec.traverse(deptype=("build", "run")):
|
||||||
if d.package.extends(self.pkg.extendee_spec):
|
if d.package.extends(self.pkg.extendee_spec):
|
||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
path = self._luarocks_config_path()
|
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||||
with open(path, "w", encoding="utf-8") as config:
|
|
||||||
config.write(
|
config.write(
|
||||||
"""
|
"""
|
||||||
deps_mode="all"
|
deps_mode="all"
|
||||||
@@ -85,23 +90,26 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
|||||||
"\n".join(table_entries)
|
"\n".join(table_entries)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return path
|
|
||||||
|
|
||||||
def preprocess(self, pkg, spec, prefix):
|
def preprocess(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Override this to preprocess source before building with luarocks"""
|
"""Override this to preprocess source before building with luarocks"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def luarocks_args(self):
|
def luarocks_args(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
rock = "."
|
rock = "."
|
||||||
specs = find(".", "*.rockspec", recursive=False)
|
specs = find(".", "*.rockspec", recursive=False)
|
||||||
if specs:
|
if specs:
|
||||||
rock = specs[0]
|
rock = specs[0]
|
||||||
rocks_args = self.luarocks_args()
|
rocks_args = self.luarocks_args()
|
||||||
rocks_args.append(rock)
|
rocks_args.append(rock)
|
||||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||||
|
|
||||||
def _luarocks_config_path(self):
|
def _luarocks_config_path(self):
|
||||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||||
|
@@ -98,29 +98,20 @@ def build_directory(self) -> str:
|
|||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def edit(
|
def edit(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
@@ -58,16 +60,20 @@ def build_args(self):
|
|||||||
"""List of args to pass to build phase."""
|
"""List of args to pass to build phase."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Compile code and package into a JAR file."""
|
"""Compile code and package into a JAR file."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
mvn = which("mvn")
|
mvn = which("mvn", required=True)
|
||||||
if self.pkg.run_tests:
|
if self.pkg.run_tests:
|
||||||
mvn("verify", *self.build_args())
|
mvn("verify", *self.build_args())
|
||||||
else:
|
else:
|
||||||
mvn("package", "-DskipTests", *self.build_args())
|
mvn("package", "-DskipTests", *self.build_args())
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Copy to installation prefix."""
|
"""Copy to installation prefix."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree(".", prefix)
|
fs.install_tree(".", prefix)
|
||||||
|
@@ -188,10 +188,7 @@ def meson_args(self) -> List[str]:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
def meson(
|
def meson(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Run ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = []
|
options = []
|
||||||
@@ -204,10 +201,7 @@ def meson(
|
|||||||
pkg.module.meson(*options)
|
pkg.module.meson(*options)
|
||||||
|
|
||||||
def build(
|
def build(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
@@ -216,10 +210,7 @@ def build(
|
|||||||
pkg.module.ninja(*options)
|
pkg.module.ninja(*options)
|
||||||
|
|
||||||
def install(
|
def install(
|
||||||
self,
|
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -99,7 +101,9 @@ def msbuild_install_args(self):
|
|||||||
as `msbuild_args` by default."""
|
as `msbuild_args` by default."""
|
||||||
return self.msbuild_args()
|
return self.msbuild_args()
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "msbuild" on the build targets specified by the builder."""
|
"""Run "msbuild" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.msbuild(
|
pkg.module.msbuild(
|
||||||
@@ -108,7 +112,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
self.define_targets(*self.build_targets),
|
self.define_targets(*self.build_targets),
|
||||||
)
|
)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "msbuild" on the install targets specified by the builder.
|
"""Run "msbuild" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
|
@@ -7,6 +7,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -123,7 +125,9 @@ def nmake_install_args(self):
|
|||||||
Individual packages should override to specify NMake args to command line"""
|
Individual packages should override to specify NMake args to command line"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "nmake" on the build targets specified by the builder."""
|
"""Run "nmake" on the build targets specified by the builder."""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
opts += self.nmake_args()
|
opts += self.nmake_args()
|
||||||
@@ -132,7 +136,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run "nmake" on the install targets specified by the builder.
|
"""Run "nmake" on the install targets specified by the builder.
|
||||||
This is INSTALL by default"""
|
This is INSTALL by default"""
|
||||||
opts = self.std_nmake_args
|
opts = self.std_nmake_args
|
||||||
|
@@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
@@ -42,7 +44,9 @@ class OctaveBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package from the archive file"""
|
"""Install the package from the archive file"""
|
||||||
pkg.module.octave(
|
pkg.module.octave(
|
||||||
"--quiet",
|
"--quiet",
|
||||||
|
@@ -10,6 +10,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.install_test import SkipTest, test_part
|
from spack.install_test import SkipTest, test_part
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
@@ -149,7 +151,9 @@ def configure_args(self):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||||
an appropriate installation base directory followed by the
|
an appropriate installation base directory followed by the
|
||||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||||
@@ -173,7 +177,9 @@ def fix_shebang(self):
|
|||||||
repl = "#!/usr/bin/env perl"
|
repl = "#!/usr/bin/env perl"
|
||||||
filter_file(pattern, repl, "Build", backup=False)
|
filter_file(pattern, repl, "Build", backup=False)
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Builds a Perl package."""
|
"""Builds a Perl package."""
|
||||||
self.build_executable()
|
self.build_executable()
|
||||||
|
|
||||||
@@ -184,6 +190,8 @@ def check(self):
|
|||||||
"""Runs built-in tests of a Perl package."""
|
"""Runs built-in tests of a Perl package."""
|
||||||
self.build_executable("test")
|
self.build_executable("test")
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Installs a Perl package."""
|
"""Installs a Perl package."""
|
||||||
self.build_executable("install")
|
self.build_executable("install")
|
||||||
|
@@ -28,6 +28,7 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import test_part
|
from spack.install_test import test_part
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -27,6 +29,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("qmake")
|
build_system("qmake")
|
||||||
|
|
||||||
depends_on("qmake", type="build", when="build_system=qmake")
|
depends_on("qmake", type="build", when="build_system=qmake")
|
||||||
|
depends_on("gmake", type="build")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("qmake")
|
@spack.builder.builder("qmake")
|
||||||
@@ -61,17 +64,23 @@ def qmake_args(self):
|
|||||||
"""List of arguments passed to qmake."""
|
"""List of arguments passed to qmake."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def qmake(self, pkg, spec, prefix):
|
def qmake(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.qmake(*self.qmake_args())
|
pkg.module.qmake(*self.qmake_args())
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make()
|
pkg.module.make()
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
pkg.module.make("install")
|
pkg.module.make("install")
|
||||||
|
@@ -94,7 +94,7 @@ def list_url(cls):
|
|||||||
if cls.cran:
|
if cls.cran:
|
||||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||||
|
|
||||||
@property
|
@lang.classproperty
|
||||||
def git(self):
|
def git(cls):
|
||||||
if self.bioc:
|
if cls.bioc:
|
||||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||||
|
@@ -9,6 +9,8 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.config import determine_number_of_jobs
|
from spack.config import determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
@@ -74,18 +76,22 @@ def build_directory(self):
|
|||||||
ret = os.path.join(ret, self.subdirectory)
|
ret = os.path.join(ret, self.subdirectory)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
raco = Executable("raco")
|
raco = Executable("raco")
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||||
|
name = pkg.racket_name
|
||||||
|
assert name is not None, "Racket package name is not set"
|
||||||
args = [
|
args = [
|
||||||
"pkg",
|
"pkg",
|
||||||
"install",
|
"install",
|
||||||
"-t",
|
"-t",
|
||||||
"dir",
|
"dir",
|
||||||
"-n",
|
"-n",
|
||||||
self.pkg.racket_name,
|
name,
|
||||||
"--deps",
|
"--deps",
|
||||||
"fail",
|
"fail",
|
||||||
"--ignore-implies",
|
"--ignore-implies",
|
||||||
@@ -101,8 +107,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
except ProcessError:
|
except ProcessError:
|
||||||
args.insert(-2, "--skip-installed")
|
args.insert(-2, "--skip-installed")
|
||||||
raco(*args)
|
raco(*args)
|
||||||
msg = (
|
tty.warn(
|
||||||
"Racket package {0} was already installed, uninstalling via "
|
f"Racket package {name} was already installed, uninstalling via "
|
||||||
"Spack may make someone unhappy!"
|
"Spack may make someone unhappy!"
|
||||||
)
|
)
|
||||||
tty.warn(msg.format(self.pkg.racket_name))
|
|
||||||
|
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
|||||||
when="+rocm",
|
when="+rocm",
|
||||||
)
|
)
|
||||||
|
|
||||||
depends_on("llvm-amdgpu", when="+rocm")
|
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||||
depends_on("hsa-rocr-dev", when="+rocm")
|
depends_on("hsa-rocr-dev", when="+rocm")
|
||||||
depends_on("hip +rocm", when="+rocm")
|
depends_on("hip +rocm", when="+rocm")
|
||||||
|
|
||||||
|
@@ -5,6 +5,8 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BuilderWithDefaults
|
||||||
@@ -42,7 +44,9 @@ class RubyBuilder(BuilderWithDefaults):
|
|||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ()
|
legacy_attributes = ()
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build a Ruby gem."""
|
"""Build a Ruby gem."""
|
||||||
|
|
||||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||||
@@ -58,7 +62,9 @@ def build(self, pkg, spec, prefix):
|
|||||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install a Ruby gem.
|
"""Install a Ruby gem.
|
||||||
|
|
||||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||||
|
@@ -4,6 +4,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||||
@@ -59,7 +61,9 @@ def build_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
pkg.module.scons(*self.build_args(spec, prefix))
|
pkg.module.scons(*self.build_args(spec, prefix))
|
||||||
|
|
||||||
@@ -67,7 +71,9 @@ def install_args(self, spec, prefix):
|
|||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||||
|
|
||||||
|
@@ -11,6 +11,8 @@
|
|||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -41,6 +43,7 @@ class SIPPackage(spack.package_base.PackageBase):
|
|||||||
with when("build_system=sip"):
|
with when("build_system=sip"):
|
||||||
extends("python", type=("build", "link", "run"))
|
extends("python", type=("build", "link", "run"))
|
||||||
depends_on("py-sip", type="build")
|
depends_on("py-sip", type="build")
|
||||||
|
depends_on("gmake", type="build")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -130,7 +133,9 @@ class SIPBuilder(BuilderWithDefaults):
|
|||||||
|
|
||||||
build_directory = "build"
|
build_directory = "build"
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
|
|
||||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||||
@@ -148,7 +153,9 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -159,7 +166,9 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.prefix
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||||
@@ -97,7 +99,9 @@ def waf(self, *args, **kwargs):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||||
|
|
||||||
def configure(self, pkg, spec, prefix):
|
def configure(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Configures the project."""
|
"""Configures the project."""
|
||||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||||
args += self.configure_args()
|
args += self.configure_args()
|
||||||
@@ -108,7 +112,9 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, pkg, spec, prefix):
|
def build(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Executes the build."""
|
"""Executes the build."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -118,7 +124,9 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, pkg, spec, prefix):
|
def install(
|
||||||
|
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||||
|
) -> None:
|
||||||
"""Installs the targets on the system."""
|
"""Installs the targets on the system."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
|
@@ -14,7 +14,6 @@
|
|||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set
|
from typing import Callable, Dict, List, Set
|
||||||
from urllib.error import HTTPError, URLError
|
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -472,12 +471,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
|||||||
# Use all unpruned specs to populate the build group for this set
|
# Use all unpruned specs to populate the build group for this set
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||||
try:
|
options.cdash_handler.populate_buildgroup(
|
||||||
options.cdash_handler.populate_buildgroup(
|
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
)
|
||||||
)
|
|
||||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
|
||||||
tty.warn(f"Problem populating buildgroup: {err}")
|
|
||||||
elif cdash_config:
|
elif cdash_config:
|
||||||
# warn only if there was actually a CDash configuration.
|
# warn only if there was actually a CDash configuration.
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
|
@@ -1,23 +1,21 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import codecs
|
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import ssl
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import Singleton, memoized
|
from llnl.util.lang import memoized
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
@@ -35,32 +33,11 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
|
||||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
|
||||||
with_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
|
||||||
without_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
|
||||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|
||||||
opener = with_ssl if verify_ssl else without_ssl
|
|
||||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
|
||||||
|
|
||||||
return dispatch_open
|
|
||||||
|
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
|
||||||
|
# this exists purely for testing purposes
|
||||||
|
_urlopen = web_util.urlopen
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
@@ -279,26 +256,25 @@ def copy_test_results(self, source, dest):
|
|||||||
reports = fs.join_path(source, "*_Test*.xml")
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
copy_files_to_artifacts(reports, dest)
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||||
|
except OSError as e:
|
||||||
if response_code not in [200, 201]:
|
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
|
||||||
tty.warn(msg)
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
response_text = response.read()
|
try:
|
||||||
response_json = json.loads(response_text)
|
response_json = json.loads(response_text)
|
||||||
build_group_id = response_json["id"]
|
return response_json["id"]
|
||||||
|
except (json.JSONDecodeError, KeyError) as e:
|
||||||
return build_group_id
|
tty.warn(f"Failed to parse CDash response: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
def populate_buildgroup(self, job_names):
|
||||||
url = f"{self.url}/api/v1/buildgroup.php"
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
@@ -308,16 +284,11 @@ def populate_buildgroup(self, job_names):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||||
|
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
|
||||||
group_id = self.create_buildgroup(
|
|
||||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||||
tty.warn(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
@@ -329,15 +300,12 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
try:
|
||||||
response_code = response.getcode()
|
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
except OSError as e:
|
||||||
if response_code != 200:
|
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||||
@@ -735,9 +703,6 @@ def _apply_section(dest, src):
|
|||||||
for value in header.values():
|
for value in header.values():
|
||||||
value = os.path.expandvars(value)
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
|
||||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
required = mapping.get("require", [])
|
||||||
allowed = mapping.get("allow", [])
|
allowed = mapping.get("allow", [])
|
||||||
ignored = mapping.get("ignore", [])
|
ignored = mapping.get("ignore", [])
|
||||||
@@ -771,19 +736,15 @@ def job_query(job):
|
|||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
response = _dyn_mapping_urlopener(
|
response = _urlopen(request)
|
||||||
request, verify_ssl=verify_ssl, timeout=timeout
|
config = json.load(response)
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
# This is still experimental, and failures should not stop CI
|
# This is still experimental, and failures should not stop CI
|
||||||
# from running normally
|
# from running normally
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||||
tty.warn(f"{e}")
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
config = json.load(codecs.getreader("utf-8")(response))
|
|
||||||
|
|
||||||
# Strip ignore keys
|
# Strip ignore keys
|
||||||
if ignored:
|
if ignored:
|
||||||
for key in ignored:
|
for key in ignored:
|
||||||
|
@@ -202,7 +202,7 @@ def _concretize_spec_pairs(
|
|||||||
# Special case for concretizing a single spec
|
# Special case for concretizing a single spec
|
||||||
if len(to_concretize) == 1:
|
if len(to_concretize) == 1:
|
||||||
abstract, concrete = to_concretize[0]
|
abstract, concrete = to_concretize[0]
|
||||||
return [concrete or abstract.concretized(tests=tests)]
|
return [concrete or spack.concretize.concretize_one(abstract, tests=tests)]
|
||||||
|
|
||||||
# Special case if every spec is either concrete or has an abstract hash
|
# Special case if every spec is either concrete or has an abstract hash
|
||||||
if all(
|
if all(
|
||||||
@@ -254,9 +254,9 @@ def matching_spec_from_env(spec):
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
return env.matching_spec(spec) or spec.concretized()
|
return env.matching_spec(spec) or spack.concretize.concretize_one(spec)
|
||||||
else:
|
else:
|
||||||
return spec.concretized()
|
return spack.concretize.concretize_one(spec)
|
||||||
|
|
||||||
|
|
||||||
def matching_specs_from_env(specs):
|
def matching_specs_from_env(specs):
|
||||||
@@ -297,7 +297,7 @@ def disambiguate_spec(
|
|||||||
|
|
||||||
def disambiguate_spec_from_hashes(
|
def disambiguate_spec_from_hashes(
|
||||||
spec: spack.spec.Spec,
|
spec: spack.spec.Spec,
|
||||||
hashes: List[str],
|
hashes: Optional[List[str]],
|
||||||
local: bool = False,
|
local: bool = False,
|
||||||
installed: Union[bool, InstallRecordStatus] = True,
|
installed: Union[bool, InstallRecordStatus] = True,
|
||||||
first: bool = False,
|
first: bool = False,
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.mirrors.utils
|
||||||
import spack.spec
|
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
@@ -397,7 +397,7 @@ def _mirror(args):
|
|||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
# Suppress tty from the call below for terser messages
|
# Suppress tty from the call below for terser messages
|
||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.spec.Spec(spec_str).concretized()
|
spec = spack.concretize.concretize_one(spec_str)
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirrors.utils.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
@@ -436,6 +436,7 @@ def write_metadata(subdir, metadata):
|
|||||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||||
instructions += cmd.format("local-binaries", rel_directory)
|
instructions += cmd.format("local-binaries", rel_directory)
|
||||||
|
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -554,8 +555,7 @@ def check_fn(args: argparse.Namespace):
|
|||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
return
|
return
|
||||||
|
|
||||||
for spec in specs:
|
specs = [spack.concretize.concretize_one(s) for s in specs]
|
||||||
spec.concretize()
|
|
||||||
|
|
||||||
# Next see if there are any configured binary mirrors
|
# Next see if there are any configured binary mirrors
|
||||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||||
@@ -623,7 +623,7 @@ def save_specfile_fn(args):
|
|||||||
root = specs[0]
|
root = specs[0]
|
||||||
|
|
||||||
if not root.concrete:
|
if not root.concrete:
|
||||||
root.concretize()
|
root = spack.concretize.concretize_one(root)
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os.path
|
import os
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
|
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
|
|
||||||
|
@@ -18,6 +18,7 @@
|
|||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.installer
|
import spack.installer
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -103,7 +104,7 @@ def deprecate(parser, args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if args.install:
|
if args.install:
|
||||||
deprecator = specs[1].concretized()
|
deprecator = spack.concretize.concretize_one(specs[1])
|
||||||
else:
|
else:
|
||||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||||
|
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
@@ -113,8 +114,8 @@ def dev_build(self, args):
|
|||||||
source_path = os.path.abspath(source_path)
|
source_path = os.path.abspath(source_path)
|
||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain("dev_path=%s" % source_path)
|
spec.constrain(f'dev_path="{source_path}"')
|
||||||
spec.concretize()
|
spec = spack.concretize.concretize_one(spec)
|
||||||
|
|
||||||
if spec.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % spec.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
|
@@ -13,6 +13,7 @@
|
|||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -450,7 +451,7 @@ def concrete_specs_from_file(args):
|
|||||||
else:
|
else:
|
||||||
s = spack.spec.Spec.from_json(f)
|
s = spack.spec.Spec.from_json(f)
|
||||||
|
|
||||||
concretized = s.concretized()
|
concretized = spack.concretize.concretize_one(s)
|
||||||
if concretized.dag_hash() != s.dag_hash():
|
if concretized.dag_hash() != s.dag_hash():
|
||||||
msg = 'skipped invalid file "{0}". '
|
msg = 'skipped invalid file "{0}". '
|
||||||
msg += "The file does not contain a concrete spec."
|
msg += "The file does not contain a concrete spec."
|
||||||
|
@@ -7,9 +7,9 @@
|
|||||||
|
|
||||||
from llnl.path import convert_to_posix_path
|
from llnl.path import convert_to_posix_path
|
||||||
|
|
||||||
|
import spack.concretize
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.spec import Spec
|
|
||||||
|
|
||||||
description = "generate Windows installer"
|
description = "generate Windows installer"
|
||||||
section = "admin"
|
section = "admin"
|
||||||
@@ -65,8 +65,7 @@ def make_installer(parser, args):
|
|||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
output_dir = args.output_dir
|
output_dir = args.output_dir
|
||||||
cmake_spec = Spec("cmake")
|
cmake_spec = spack.concretize.concretize_one("cmake")
|
||||||
cmake_spec.concretize()
|
|
||||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||||
spack_source = args.spack_source
|
spack_source = args.spack_source
|
||||||
|
@@ -492,7 +492,7 @@ def extend_with_additional_versions(specs, num_versions):
|
|||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||||
else:
|
else:
|
||||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
mirror_specs = [spack.concretize.concretize_one(x) for x in mirror_specs]
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"""Implementation details of the ``spack module`` command."""
|
"""Implementation details of the ``spack module`` command."""
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os.path
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -144,7 +144,7 @@ def is_installed(spec):
|
|||||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||||
return record and record.installed
|
return record and record.installed
|
||||||
|
|
||||||
specs = traverse.traverse_nodes(
|
all_specs = traverse.traverse_nodes(
|
||||||
specs,
|
specs,
|
||||||
root=False,
|
root=False,
|
||||||
order="breadth",
|
order="breadth",
|
||||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
|||||||
)
|
)
|
||||||
|
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
return [spec for spec in specs if is_installed(spec)]
|
return [spec for spec in all_specs if is_installed(spec)]
|
||||||
|
|
||||||
|
|
||||||
def dependent_environments(
|
def dependent_environments(
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -37,13 +37,12 @@ def enable_compiler_existence_check():
|
|||||||
|
|
||||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||||
SpecPair = Tuple[Spec, Spec]
|
SpecPair = Tuple[Spec, Spec]
|
||||||
SpecLike = Union[Spec, str]
|
|
||||||
TestsType = Union[bool, Iterable[str]]
|
TestsType = Union[bool, Iterable[str]]
|
||||||
|
|
||||||
|
|
||||||
def concretize_specs_together(
|
def _concretize_specs_together(
|
||||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
abstract_specs: Sequence[Spec], tests: TestsType = False
|
||||||
) -> Sequence[Spec]:
|
) -> List[Spec]:
|
||||||
"""Given a number of specs as input, tries to concretize them together.
|
"""Given a number of specs as input, tries to concretize them together.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -51,11 +50,10 @@ def concretize_specs_together(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.solver.asp
|
from spack.solver.asp import Solver
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
solver = spack.solver.asp.Solver()
|
result = Solver().solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
|
||||||
return [s.copy() for s in result.specs]
|
return [s.copy() for s in result.specs]
|
||||||
|
|
||||||
|
|
||||||
@@ -72,7 +70,7 @@ def concretize_together(
|
|||||||
"""
|
"""
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
concrete_specs = _concretize_specs_together(to_concretize, tests=tests)
|
||||||
return list(zip(abstract_specs, concrete_specs))
|
return list(zip(abstract_specs, concrete_specs))
|
||||||
|
|
||||||
|
|
||||||
@@ -90,7 +88,7 @@ def concretize_together_when_possible(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.solver.asp
|
from spack.solver.asp import Solver
|
||||||
|
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
old_concrete_to_abstract = {
|
old_concrete_to_abstract = {
|
||||||
@@ -98,9 +96,8 @@ def concretize_together_when_possible(
|
|||||||
}
|
}
|
||||||
|
|
||||||
result_by_user_spec = {}
|
result_by_user_spec = {}
|
||||||
solver = spack.solver.asp.Solver()
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
for result in solver.solve_in_rounds(
|
for result in Solver().solve_in_rounds(
|
||||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||||
):
|
):
|
||||||
result_by_user_spec.update(result.specs_by_input)
|
result_by_user_spec.update(result.specs_by_input)
|
||||||
@@ -124,7 +121,7 @@ def concretize_separately(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
import spack.bootstrap
|
from spack.bootstrap import ensure_bootstrap_configuration, ensure_clingo_importable_or_raise
|
||||||
|
|
||||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||||
args = [
|
args = [
|
||||||
@@ -134,8 +131,8 @@ def concretize_separately(
|
|||||||
]
|
]
|
||||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -190,10 +187,52 @@ def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int,
|
|||||||
index, spec_str, tests = packed_arguments
|
index, spec_str, tests = packed_arguments
|
||||||
with tty.SuppressOutput(msg_enabled=False):
|
with tty.SuppressOutput(msg_enabled=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
spec = Spec(spec_str).concretized(tests=tests)
|
spec = concretize_one(Spec(spec_str), tests=tests)
|
||||||
return index, spec, time.time() - start
|
return index, spec, time.time() - start
|
||||||
|
|
||||||
|
|
||||||
|
def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
||||||
|
"""Return a concretized copy of the given spec.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
||||||
|
the packages in the list, if True activate 'test' dependencies for all packages.
|
||||||
|
"""
|
||||||
|
from spack.solver.asp import Solver, SpecBuilder
|
||||||
|
|
||||||
|
if isinstance(spec, str):
|
||||||
|
spec = Spec(spec)
|
||||||
|
spec = spec.lookup_hash()
|
||||||
|
|
||||||
|
if spec.concrete:
|
||||||
|
return spec.copy()
|
||||||
|
|
||||||
|
for node in spec.traverse():
|
||||||
|
if not node.name:
|
||||||
|
raise spack.error.SpecError(
|
||||||
|
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
||||||
|
)
|
||||||
|
|
||||||
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
|
result = Solver().solve([spec], tests=tests, allow_deprecated=allow_deprecated)
|
||||||
|
|
||||||
|
# take the best answer
|
||||||
|
opt, i, answer = min(result.answers)
|
||||||
|
name = spec.name
|
||||||
|
# TODO: Consolidate this code with similar code in solve.py
|
||||||
|
if spec.virtual:
|
||||||
|
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
||||||
|
name = providers[0]
|
||||||
|
|
||||||
|
node = SpecBuilder.make_node(pkg=name)
|
||||||
|
assert (
|
||||||
|
node in answer
|
||||||
|
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||||
|
|
||||||
|
concretized = answer[node]
|
||||||
|
return concretized
|
||||||
|
|
||||||
|
|
||||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||||
"""Raised when there is no available compiler that satisfies a
|
"""Raised when there is no available compiler that satisfies a
|
||||||
compiler spec."""
|
compiler spec."""
|
||||||
|
@@ -36,6 +36,8 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
from llnl.util import filesystem, lang, tty
|
from llnl.util import filesystem, lang, tty
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -51,6 +53,7 @@
|
|||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
|
import spack.schema.env_vars
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -68,6 +71,7 @@
|
|||||||
"compilers": spack.schema.compilers.schema,
|
"compilers": spack.schema.compilers.schema,
|
||||||
"concretizer": spack.schema.concretizer.schema,
|
"concretizer": spack.schema.concretizer.schema,
|
||||||
"definitions": spack.schema.definitions.schema,
|
"definitions": spack.schema.definitions.schema,
|
||||||
|
"env_vars": spack.schema.env_vars.schema,
|
||||||
"view": spack.schema.view.schema,
|
"view": spack.schema.view.schema,
|
||||||
"develop": spack.schema.develop.schema,
|
"develop": spack.schema.develop.schema,
|
||||||
"mirrors": spack.schema.mirrors.schema,
|
"mirrors": spack.schema.mirrors.schema,
|
||||||
@@ -1048,8 +1052,6 @@ def validate(
|
|||||||
This leverages the line information (start_mark, end_mark) stored
|
This leverages the line information (start_mark, end_mark) stored
|
||||||
on Spack YAML structures.
|
on Spack YAML structures.
|
||||||
"""
|
"""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.schema.Validator(schema).validate(data)
|
spack.schema.Validator(schema).validate(data)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
"""
|
"""
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.schema.env as env
|
import spack.schema.env as env
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -30,8 +32,6 @@ def validate(configuration_file):
|
|||||||
Returns:
|
Returns:
|
||||||
A sanitized copy of the configuration stored in the input file
|
A sanitized copy of the configuration stored in the input file
|
||||||
"""
|
"""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
with open(configuration_file, encoding="utf-8") as f:
|
with open(configuration_file, encoding="utf-8") as f:
|
||||||
config = syaml.load(f)
|
config = syaml.load(f)
|
||||||
|
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Manages the details on the images used in the various stages."""
|
"""Manages the details on the images used in the various stages."""
|
||||||
import json
|
import json
|
||||||
import os.path
|
import os
|
||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@@ -9,6 +9,8 @@
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
@@ -188,8 +190,6 @@ def paths(self):
|
|||||||
@tengine.context_property
|
@tengine.context_property
|
||||||
def manifest(self):
|
def manifest(self):
|
||||||
"""The spack.yaml file that should be used in the image"""
|
"""The spack.yaml file that should be used in the image"""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||||
manifest = copy.deepcopy(self.config)
|
manifest = copy.deepcopy(self.config)
|
||||||
manifest.pop("container")
|
manifest.pop("container")
|
||||||
|
@@ -123,6 +123,15 @@
|
|||||||
"deprecated_for",
|
"deprecated_for",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
#: File where the database is written
|
||||||
|
INDEX_JSON_FILE = "index.json"
|
||||||
|
|
||||||
|
# Verifier file to check last modification of the DB
|
||||||
|
_INDEX_VERIFIER_FILE = "index_verifier"
|
||||||
|
|
||||||
|
# Lockfile for the database
|
||||||
|
_LOCK_FILE = "lock"
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _getfqdn():
|
def _getfqdn():
|
||||||
@@ -260,7 +269,7 @@ class ForbiddenLockError(SpackError):
|
|||||||
|
|
||||||
class ForbiddenLock:
|
class ForbiddenLock:
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
||||||
|
|
||||||
def __reduce__(self):
|
def __reduce__(self):
|
||||||
return ForbiddenLock, tuple()
|
return ForbiddenLock, tuple()
|
||||||
@@ -419,14 +428,25 @@ class FailureTracker:
|
|||||||
the likelihood of collision very low with no cleanup required.
|
the likelihood of collision very low with no cleanup required.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
#: root directory of the failure tracker
|
||||||
|
dir: pathlib.Path
|
||||||
|
|
||||||
|
#: File for locking particular concrete spec hashes
|
||||||
|
locker: SpecLocker
|
||||||
|
|
||||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||||
#: Ensure a persistent location for dealing with parallel installation
|
#: Ensure a persistent location for dealing with parallel installation
|
||||||
#: failures (e.g., across near-concurrent processes).
|
#: failures (e.g., across near-concurrent processes).
|
||||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||||
self.dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||||
|
|
||||||
|
def _ensure_parent_directories(self) -> None:
|
||||||
|
"""Ensure that parent directories of the FailureTracker exist.
|
||||||
|
|
||||||
|
Accesses the filesystem only once, the first time it's called on a given FailureTracker.
|
||||||
|
"""
|
||||||
|
self.dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||||
"""Removes any persistent and cached failure tracking for the spec.
|
"""Removes any persistent and cached failure tracking for the spec.
|
||||||
|
|
||||||
@@ -469,13 +489,18 @@ def clear_all(self) -> None:
|
|||||||
|
|
||||||
tty.debug("Removing prefix failure tracking files")
|
tty.debug("Removing prefix failure tracking files")
|
||||||
try:
|
try:
|
||||||
for fail_mark in os.listdir(str(self.dir)):
|
marks = os.listdir(str(self.dir))
|
||||||
try:
|
except FileNotFoundError:
|
||||||
(self.dir / fail_mark).unlink()
|
return # directory doesn't exist yet
|
||||||
except OSError as exc:
|
|
||||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||||
|
return
|
||||||
|
|
||||||
|
for fail_mark in marks:
|
||||||
|
try:
|
||||||
|
(self.dir / fail_mark).unlink()
|
||||||
|
except OSError as exc:
|
||||||
|
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||||
|
|
||||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||||
"""Marks a spec as failing to install.
|
"""Marks a spec as failing to install.
|
||||||
@@ -483,6 +508,8 @@ def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
|||||||
Args:
|
Args:
|
||||||
spec: spec that failed to install
|
spec: spec that failed to install
|
||||||
"""
|
"""
|
||||||
|
self._ensure_parent_directories()
|
||||||
|
|
||||||
# Dump the spec to the failure file for (manual) debugging purposes
|
# Dump the spec to the failure file for (manual) debugging purposes
|
||||||
path = self._path(spec)
|
path = self._path(spec)
|
||||||
path.write_text(spec.to_json())
|
path.write_text(spec.to_json())
|
||||||
@@ -567,17 +594,13 @@ def __init__(
|
|||||||
Relevant only if the repository is not an upstream.
|
Relevant only if the repository is not an upstream.
|
||||||
"""
|
"""
|
||||||
self.root = root
|
self.root = root
|
||||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
self.database_directory = pathlib.Path(self.root) / _DB_DIRNAME
|
||||||
self.layout = layout
|
self.layout = layout
|
||||||
|
|
||||||
# Set up layout of database files within the db dir
|
# Set up layout of database files within the db dir
|
||||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
self._index_path = self.database_directory / INDEX_JSON_FILE
|
||||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
||||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
self._lock_path = self.database_directory / _LOCK_FILE
|
||||||
|
|
||||||
# Create needed directories and files
|
|
||||||
if not is_upstream and not os.path.exists(self.database_directory):
|
|
||||||
fs.mkdirp(self.database_directory)
|
|
||||||
|
|
||||||
self.is_upstream = is_upstream
|
self.is_upstream = is_upstream
|
||||||
self.last_seen_verifier = ""
|
self.last_seen_verifier = ""
|
||||||
@@ -592,14 +615,14 @@ def __init__(
|
|||||||
|
|
||||||
# initialize rest of state.
|
# initialize rest of state.
|
||||||
self.db_lock_timeout = lock_cfg.database_timeout
|
self.db_lock_timeout = lock_cfg.database_timeout
|
||||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
||||||
|
|
||||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||||
if self.is_upstream:
|
if self.is_upstream:
|
||||||
self.lock = ForbiddenLock()
|
self.lock = ForbiddenLock()
|
||||||
else:
|
else:
|
||||||
self.lock = lk.Lock(
|
self.lock = lk.Lock(
|
||||||
self._lock_path,
|
str(self._lock_path),
|
||||||
default_timeout=self.db_lock_timeout,
|
default_timeout=self.db_lock_timeout,
|
||||||
desc="database",
|
desc="database",
|
||||||
enable=lock_cfg.enable,
|
enable=lock_cfg.enable,
|
||||||
@@ -616,6 +639,11 @@ def __init__(
|
|||||||
self._write_transaction_impl = lk.WriteTransaction
|
self._write_transaction_impl = lk.WriteTransaction
|
||||||
self._read_transaction_impl = lk.ReadTransaction
|
self._read_transaction_impl = lk.ReadTransaction
|
||||||
|
|
||||||
|
def _ensure_parent_directories(self):
|
||||||
|
"""Create the parent directory for the DB, if necessary."""
|
||||||
|
if not self.is_upstream:
|
||||||
|
self.database_directory.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
def write_transaction(self):
|
def write_transaction(self):
|
||||||
"""Get a write lock context manager for use in a `with` block."""
|
"""Get a write lock context manager for use in a `with` block."""
|
||||||
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
||||||
@@ -630,6 +658,8 @@ def _write_to_file(self, stream):
|
|||||||
|
|
||||||
This function does not do any locking or transactions.
|
This function does not do any locking or transactions.
|
||||||
"""
|
"""
|
||||||
|
self._ensure_parent_directories()
|
||||||
|
|
||||||
# map from per-spec hash code to installation record.
|
# map from per-spec hash code to installation record.
|
||||||
installs = dict(
|
installs = dict(
|
||||||
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
||||||
@@ -759,7 +789,7 @@ def _read_from_file(self, filename):
|
|||||||
Does not do any locking.
|
Does not do any locking.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(str(filename), "r", encoding="utf-8") as f:
|
||||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -860,11 +890,13 @@ def reindex(self):
|
|||||||
if self.is_upstream:
|
if self.is_upstream:
|
||||||
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
||||||
|
|
||||||
|
self._ensure_parent_directories()
|
||||||
|
|
||||||
# Special transaction to avoid recursive reindex calls and to
|
# Special transaction to avoid recursive reindex calls and to
|
||||||
# ignore errors if we need to rebuild a corrupt database.
|
# ignore errors if we need to rebuild a corrupt database.
|
||||||
def _read_suppress_error():
|
def _read_suppress_error():
|
||||||
try:
|
try:
|
||||||
if os.path.isfile(self._index_path):
|
if self._index_path.is_file():
|
||||||
self._read_from_file(self._index_path)
|
self._read_from_file(self._index_path)
|
||||||
except CorruptDatabaseError as e:
|
except CorruptDatabaseError as e:
|
||||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||||
@@ -1007,7 +1039,7 @@ def _check_ref_counts(self):
|
|||||||
% (key, found, expected, self._index_path)
|
% (key, found, expected, self._index_path)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _write(self, type, value, traceback):
|
def _write(self, type=None, value=None, traceback=None):
|
||||||
"""Write the in-memory database index to its file path.
|
"""Write the in-memory database index to its file path.
|
||||||
|
|
||||||
This is a helper function called by the WriteTransaction context
|
This is a helper function called by the WriteTransaction context
|
||||||
@@ -1018,6 +1050,8 @@ def _write(self, type, value, traceback):
|
|||||||
|
|
||||||
This routine does no locking.
|
This routine does no locking.
|
||||||
"""
|
"""
|
||||||
|
self._ensure_parent_directories()
|
||||||
|
|
||||||
# Do not write if exceptions were raised
|
# Do not write if exceptions were raised
|
||||||
if type is not None:
|
if type is not None:
|
||||||
# A failure interrupted a transaction, so we should record that
|
# A failure interrupted a transaction, so we should record that
|
||||||
@@ -1026,16 +1060,16 @@ def _write(self, type, value, traceback):
|
|||||||
self._state_is_inconsistent = True
|
self._state_is_inconsistent = True
|
||||||
return
|
return
|
||||||
|
|
||||||
temp_file = self._index_path + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
temp_file = str(self._index_path) + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||||
|
|
||||||
# Write a temporary database file them move it into place
|
# Write a temporary database file them move it into place
|
||||||
try:
|
try:
|
||||||
with open(temp_file, "w", encoding="utf-8") as f:
|
with open(temp_file, "w", encoding="utf-8") as f:
|
||||||
self._write_to_file(f)
|
self._write_to_file(f)
|
||||||
fs.rename(temp_file, self._index_path)
|
fs.rename(temp_file, str(self._index_path))
|
||||||
|
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
with self._verifier_path.open("w", encoding="utf-8") as f:
|
||||||
new_verifier = str(uuid.uuid4())
|
new_verifier = str(uuid.uuid4())
|
||||||
f.write(new_verifier)
|
f.write(new_verifier)
|
||||||
self.last_seen_verifier = new_verifier
|
self.last_seen_verifier = new_verifier
|
||||||
@@ -1048,11 +1082,11 @@ def _write(self, type, value, traceback):
|
|||||||
|
|
||||||
def _read(self):
|
def _read(self):
|
||||||
"""Re-read Database from the data in the set location. This does no locking."""
|
"""Re-read Database from the data in the set location. This does no locking."""
|
||||||
if os.path.isfile(self._index_path):
|
if self._index_path.is_file():
|
||||||
current_verifier = ""
|
current_verifier = ""
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
try:
|
try:
|
||||||
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
with self._verifier_path.open("r", encoding="utf-8") as f:
|
||||||
current_verifier = f.read()
|
current_verifier = f.read()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
@@ -1065,7 +1099,7 @@ def _read(self):
|
|||||||
self._state_is_inconsistent = False
|
self._state_is_inconsistent = False
|
||||||
return
|
return
|
||||||
elif self.is_upstream:
|
elif self.is_upstream:
|
||||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
tty.warn(f"upstream not found: {self._index_path}")
|
||||||
|
|
||||||
def _add(
|
def _add(
|
||||||
self,
|
self,
|
||||||
@@ -1330,7 +1364,7 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
|||||||
def installed_relatives(
|
def installed_relatives(
|
||||||
self,
|
self,
|
||||||
spec: "spack.spec.Spec",
|
spec: "spack.spec.Spec",
|
||||||
direction: str = "children",
|
direction: tr.DirectionType = "children",
|
||||||
transitive: bool = True,
|
transitive: bool = True,
|
||||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||||
) -> Set["spack.spec.Spec"]:
|
) -> Set["spack.spec.Spec"]:
|
||||||
@@ -1681,7 +1715,7 @@ def query(
|
|||||||
)
|
)
|
||||||
|
|
||||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||||
results.sort()
|
results.sort() # type: ignore[call-overload]
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def query_one(
|
def query_one(
|
||||||
|
@@ -15,7 +15,6 @@
|
|||||||
import glob
|
import glob
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@@ -7,7 +7,6 @@
|
|||||||
import collections
|
import collections
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
|||||||
"""
|
"""
|
||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.schema.environment
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
@@ -156,6 +157,11 @@ def activate(
|
|||||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||||
# become PATH variables.
|
# become PATH variables.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||||
|
if env_vars_yaml:
|
||||||
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if view and env.has_view(view):
|
if view and env.has_view(view):
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
@@ -189,6 +195,10 @@ def deactivate() -> EnvironmentModifications:
|
|||||||
if active is None:
|
if active is None:
|
||||||
return env_mods
|
return env_mods
|
||||||
|
|
||||||
|
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||||
|
if env_vars_yaml:
|
||||||
|
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||||
|
|
||||||
active_view = os.getenv(ev.spack_env_view_var)
|
active_view = os.getenv(ev.spack_env_view_var)
|
||||||
|
|
||||||
if active_view and active.has_view(active_view):
|
if active_view and active.has_view(active_view):
|
||||||
|
@@ -25,7 +25,6 @@
|
|||||||
import functools
|
import functools
|
||||||
import http.client
|
import http.client
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import urllib.error
|
import urllib.error
|
||||||
@@ -321,9 +320,15 @@ def _fetch_urllib(self, url):
|
|||||||
|
|
||||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||||
|
|
||||||
|
if os.path.lexists(save_file):
|
||||||
|
os.remove(save_file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = web_util.urlopen(request)
|
response = web_util.urlopen(request)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
tty.msg(f"Fetching {url}")
|
||||||
|
with open(save_file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
except OSError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -331,14 +336,6 @@ def _fetch_urllib(self, url):
|
|||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
tty.msg(f"Fetching {url}")
|
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
|
||||||
os.remove(save_file)
|
|
||||||
|
|
||||||
with open(save_file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||||
# for users to know which URL was actually fetched.
|
# for users to know which URL was actually fetched.
|
||||||
@@ -535,11 +532,16 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
file = self.stage.save_filename
|
file = self.stage.save_filename
|
||||||
tty.msg(f"Fetching {self.url}")
|
|
||||||
|
if os.path.lexists(file):
|
||||||
|
os.remove(file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._urlopen(self.url)
|
response = self._urlopen(self.url)
|
||||||
except (TimeoutError, urllib.error.URLError) as e:
|
tty.msg(f"Fetching {self.url}")
|
||||||
|
with open(file, "wb") as f:
|
||||||
|
shutil.copyfileobj(response, f)
|
||||||
|
except OSError as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
@@ -547,12 +549,6 @@ def fetch(self):
|
|||||||
os.remove(file)
|
os.remove(file)
|
||||||
raise FailedDownloadError(e) from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
if os.path.lexists(file):
|
|
||||||
os.remove(file)
|
|
||||||
|
|
||||||
with open(file, "wb") as f:
|
|
||||||
shutil.copyfileobj(response, f)
|
|
||||||
|
|
||||||
|
|
||||||
class VCSFetchStrategy(FetchStrategy):
|
class VCSFetchStrategy(FetchStrategy):
|
||||||
"""Superclass for version control system fetch strategies.
|
"""Superclass for version control system fetch strategies.
|
||||||
|
@@ -35,7 +35,6 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.directory_layout
|
import spack.directory_layout
|
||||||
import spack.paths
|
|
||||||
import spack.projections
|
import spack.projections
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
@@ -44,7 +43,6 @@
|
|||||||
import spack.util.spack_json as s_json
|
import spack.util.spack_json as s_json
|
||||||
import spack.util.spack_yaml as s_yaml
|
import spack.util.spack_yaml as s_yaml
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.hooks import sbang
|
|
||||||
|
|
||||||
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
||||||
|
|
||||||
@@ -91,16 +89,10 @@ def view_copy(
|
|||||||
if stat.S_ISLNK(src_stat.st_mode):
|
if stat.S_ISLNK(src_stat.st_mode):
|
||||||
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
||||||
elif spack.relocate.is_binary(dst):
|
elif spack.relocate.is_binary(dst):
|
||||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
spack.relocate.relocate_text_bin(binaries=[dst], prefix_to_prefix=prefix_to_projection)
|
||||||
else:
|
else:
|
||||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||||
|
spack.relocate.relocate_text(files=[dst], prefix_to_prefix=prefix_to_projection)
|
||||||
# This is vestigial code for the *old* location of sbang.
|
|
||||||
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
|
|
||||||
sbang.sbang_shebang_line()
|
|
||||||
)
|
|
||||||
|
|
||||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
|
||||||
|
|
||||||
# The os module on Windows does not have a chown function.
|
# The os module on Windows does not have a chown function.
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
|
@@ -275,7 +275,7 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
|
|||||||
fs.mkdirp(pkg.prefix.bin)
|
fs.mkdirp(pkg.prefix.bin)
|
||||||
fs.touch(os.path.join(pkg.prefix.bin, command))
|
fs.touch(os.path.join(pkg.prefix.bin, command))
|
||||||
if sys.platform != "win32":
|
if sys.platform != "win32":
|
||||||
chmod = which("chmod")
|
chmod = which("chmod", required=True)
|
||||||
chmod("+x", os.path.join(pkg.prefix.bin, command))
|
chmod("+x", os.path.join(pkg.prefix.bin, command))
|
||||||
|
|
||||||
# Install fake header file
|
# Install fake header file
|
||||||
@@ -539,7 +539,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
# Note that we copy them in as they are in the *install* directory
|
# Note that we copy them in as they are in the *install* directory
|
||||||
# NOT as they are in the repository, because we want a snapshot of
|
# NOT as they are in the repository, because we want a snapshot of
|
||||||
# how *this* particular build was done.
|
# how *this* particular build was done.
|
||||||
for node in spec.traverse(deptype=all):
|
for node in spec.traverse(deptype="all"):
|
||||||
if node is not spec:
|
if node is not spec:
|
||||||
# Locate the dependency package in the install tree and find
|
# Locate the dependency package in the install tree and find
|
||||||
# its provenance information.
|
# its provenance information.
|
||||||
|
@@ -14,7 +14,6 @@
|
|||||||
import io
|
import io
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pstats
|
import pstats
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.url
|
import llnl.url
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
@@ -31,7 +31,7 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import inspect
|
import inspect
|
||||||
import os.path
|
import os
|
||||||
import re
|
import re
|
||||||
import string
|
import string
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import collections
|
import collections
|
||||||
import itertools
|
import itertools
|
||||||
import os.path
|
import os
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
"""This module implements the classes necessary to generate Tcl
|
"""This module implements the classes necessary to generate Tcl
|
||||||
non-hierarchical modules.
|
non-hierarchical modules.
|
||||||
"""
|
"""
|
||||||
import os.path
|
import os
|
||||||
from typing import Dict, Optional, Tuple
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
@@ -7,6 +7,7 @@
|
|||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import re
|
import re
|
||||||
|
import socket
|
||||||
import time
|
import time
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -410,7 +411,7 @@ def wrapper(*args, **kwargs):
|
|||||||
for i in range(retries):
|
for i in range(retries):
|
||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except (urllib.error.URLError, TimeoutError) as e:
|
except OSError as e:
|
||||||
# Retry on internal server errors, and rate limit errors
|
# Retry on internal server errors, and rate limit errors
|
||||||
# Potentially this could take into account the Retry-After header
|
# Potentially this could take into account the Retry-After header
|
||||||
# if registries support it
|
# if registries support it
|
||||||
@@ -420,9 +421,10 @@ def wrapper(*args, **kwargs):
|
|||||||
and (500 <= e.code < 600 or e.code == 429)
|
and (500 <= e.code < 600 or e.code == 429)
|
||||||
)
|
)
|
||||||
or (
|
or (
|
||||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
isinstance(e, urllib.error.URLError)
|
||||||
|
and isinstance(e.reason, socket.timeout)
|
||||||
)
|
)
|
||||||
or isinstance(e, TimeoutError)
|
or isinstance(e, socket.timeout)
|
||||||
):
|
):
|
||||||
# Exponential backoff
|
# Exponential backoff
|
||||||
sleep(2**i)
|
sleep(2**i)
|
||||||
|
@@ -3,8 +3,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.lazy_lexicographic_ordering
|
@llnl.util.lang.lazy_lexicographic_ordering
|
||||||
class OperatingSystem:
|
class OperatingSystem:
|
||||||
@@ -42,4 +40,4 @@ def _cmp_iter(self):
|
|||||||
yield self.version
|
yield self.version
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return syaml.syaml_dict([("name", self.name), ("version", self.version)])
|
return {"name": self.name, "version": self.version}
|
||||||
|
@@ -106,8 +106,16 @@
|
|||||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
# These are just here for editor support; they will be replaced when the build env
|
# These are just here for editor support; they may be set when the build env is set up.
|
||||||
# is set up.
|
configure: Executable
|
||||||
make = MakeExecutable("make", jobs=1)
|
make_jobs: int
|
||||||
ninja = MakeExecutable("ninja", jobs=1)
|
make: MakeExecutable
|
||||||
configure = Executable(join_path(".", "configure"))
|
ninja: MakeExecutable
|
||||||
|
python_include: str
|
||||||
|
python_platlib: str
|
||||||
|
python_purelib: str
|
||||||
|
python: Executable
|
||||||
|
spack_cc: str
|
||||||
|
spack_cxx: str
|
||||||
|
spack_f77: str
|
||||||
|
spack_fc: str
|
||||||
|
@@ -767,6 +767,9 @@ def __init__(self, spec):
|
|||||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> "PackageBase":
|
||||||
|
return self.spec[key].package
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dependency_names(cls):
|
def dependency_names(cls):
|
||||||
return _subkeys(cls.dependencies)
|
return _subkeys(cls.dependencies)
|
||||||
@@ -1096,14 +1099,14 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def detect_dev_src_change(self):
|
def detect_dev_src_change(self) -> bool:
|
||||||
"""
|
"""
|
||||||
Method for checking for source code changes to trigger rebuild/reinstall
|
Method for checking for source code changes to trigger rebuild/reinstall
|
||||||
"""
|
"""
|
||||||
dev_path_var = self.spec.variants.get("dev_path", None)
|
dev_path_var = self.spec.variants.get("dev_path", None)
|
||||||
_, record = spack.store.STORE.db.query_by_spec_hash(self.spec.dag_hash())
|
_, record = spack.store.STORE.db.query_by_spec_hash(self.spec.dag_hash())
|
||||||
mtime = fsys.last_modification_time_recursive(dev_path_var.value)
|
assert dev_path_var and record, "dev_path variant and record must be present"
|
||||||
return mtime > record.installation_time
|
return fsys.recursive_mtime_greater_than(dev_path_var.value, record.installation_time)
|
||||||
|
|
||||||
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
def all_urls_for_version(self, version: StandardVersion) -> List[str]:
|
||||||
"""Return all URLs derived from version_urls(), url, urls, and
|
"""Return all URLs derived from version_urls(), url, urls, and
|
||||||
@@ -1816,12 +1819,6 @@ def _has_make_target(self, target):
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if 'target' is found, else False
|
bool: True if 'target' is found, else False
|
||||||
"""
|
"""
|
||||||
# Prevent altering LC_ALL for 'make' outside this function
|
|
||||||
make = copy.deepcopy(self.module.make)
|
|
||||||
|
|
||||||
# Use English locale for missing target message comparison
|
|
||||||
make.add_default_env("LC_ALL", "C")
|
|
||||||
|
|
||||||
# Check if we have a Makefile
|
# Check if we have a Makefile
|
||||||
for makefile in ["GNUmakefile", "Makefile", "makefile"]:
|
for makefile in ["GNUmakefile", "Makefile", "makefile"]:
|
||||||
if os.path.exists(makefile):
|
if os.path.exists(makefile):
|
||||||
@@ -1830,6 +1827,12 @@ def _has_make_target(self, target):
|
|||||||
tty.debug("No Makefile found in the build directory")
|
tty.debug("No Makefile found in the build directory")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Prevent altering LC_ALL for 'make' outside this function
|
||||||
|
make = copy.deepcopy(self.module.make)
|
||||||
|
|
||||||
|
# Use English locale for missing target message comparison
|
||||||
|
make.add_default_env("LC_ALL", "C")
|
||||||
|
|
||||||
# Check if 'target' is a valid target.
|
# Check if 'target' is a valid target.
|
||||||
#
|
#
|
||||||
# `make -n target` performs a "dry run". It prints the commands that
|
# `make -n target` performs a "dry run". It prints the commands that
|
||||||
|
@@ -4,7 +4,6 @@
|
|||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Optional, Tuple, Type, Union
|
from typing import Any, Dict, Optional, Tuple, Type, Union
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
|
@@ -6,8 +6,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from collections import OrderedDict
|
from typing import Dict, Iterable, List, Optional
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
import macholib.mach_o
|
import macholib.mach_o
|
||||||
import macholib.MachO
|
import macholib.MachO
|
||||||
@@ -18,28 +17,11 @@
|
|||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
from llnl.util.symlink import readlink, symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.error
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.elf as elf
|
import spack.util.elf as elf
|
||||||
import spack.util.executable as executable
|
import spack.util.executable as executable
|
||||||
|
|
||||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
from .relocate_text import BinaryFilePrefixReplacer, PrefixToPrefix, TextFilePrefixReplacer
|
||||||
|
|
||||||
|
|
||||||
class InstallRootStringError(spack.error.SpackError):
|
|
||||||
def __init__(self, file_path, root_path):
|
|
||||||
"""Signal that the relocated binary still has the original
|
|
||||||
Spack's store root string
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path (str): path of the binary
|
|
||||||
root_path (str): original Spack's store root string
|
|
||||||
"""
|
|
||||||
super().__init__(
|
|
||||||
"\n %s \ncontains string\n %s \n"
|
|
||||||
"after replacing it in rpaths.\n"
|
|
||||||
"Package should not be relocated.\n Use -a to override." % (file_path, root_path)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
@memoized
|
||||||
@@ -54,144 +36,11 @@ def _patchelf() -> Optional[executable.Executable]:
|
|||||||
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
return spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||||
|
|
||||||
|
|
||||||
def _elf_rpaths_for(path):
|
|
||||||
"""Return the RPATHs for an executable or a library.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str): full path to the executable or library
|
|
||||||
|
|
||||||
Return:
|
|
||||||
RPATHs as a list of strings. Returns an empty array
|
|
||||||
on ELF parsing errors, or when the ELF file simply
|
|
||||||
has no rpaths.
|
|
||||||
"""
|
|
||||||
return elf.get_rpaths(path) or []
|
|
||||||
|
|
||||||
|
|
||||||
def _make_relative(reference_file, path_root, paths):
|
|
||||||
"""Return a list where any path in ``paths`` that starts with
|
|
||||||
``path_root`` is made relative to the directory in which the
|
|
||||||
reference file is stored.
|
|
||||||
|
|
||||||
After a path is made relative it is prefixed with the ``$ORIGIN``
|
|
||||||
string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
reference_file (str): file from which the reference directory
|
|
||||||
is computed
|
|
||||||
path_root (str): root of the relative paths
|
|
||||||
paths: (list) paths to be examined
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of relative paths
|
|
||||||
"""
|
|
||||||
start_directory = os.path.dirname(reference_file)
|
|
||||||
pattern = re.compile(path_root)
|
|
||||||
relative_paths = []
|
|
||||||
|
|
||||||
for path in paths:
|
|
||||||
if pattern.match(path):
|
|
||||||
rel = os.path.relpath(path, start=start_directory)
|
|
||||||
path = os.path.join("$ORIGIN", rel)
|
|
||||||
|
|
||||||
relative_paths.append(path)
|
|
||||||
|
|
||||||
return relative_paths
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_relative_paths(start_path, relative_paths):
|
|
||||||
"""Normalize the relative paths with respect to the original path name
|
|
||||||
of the file (``start_path``).
|
|
||||||
|
|
||||||
The paths that are passed to this function existed or were relevant
|
|
||||||
on another filesystem, so os.path.abspath cannot be used.
|
|
||||||
|
|
||||||
A relative path may contain the signifier $ORIGIN. Assuming that
|
|
||||||
``start_path`` is absolute, this implies that the relative path
|
|
||||||
(relative to start_path) should be replaced with an absolute path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
start_path (str): path from which the starting directory
|
|
||||||
is extracted
|
|
||||||
relative_paths (str): list of relative paths as obtained by a
|
|
||||||
call to :ref:`_make_relative`
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of normalized paths
|
|
||||||
"""
|
|
||||||
normalized_paths = []
|
|
||||||
pattern = re.compile(re.escape("$ORIGIN"))
|
|
||||||
start_directory = os.path.dirname(start_path)
|
|
||||||
|
|
||||||
for path in relative_paths:
|
|
||||||
if path.startswith("$ORIGIN"):
|
|
||||||
sub = pattern.sub(start_directory, path)
|
|
||||||
path = os.path.normpath(sub)
|
|
||||||
normalized_paths.append(path)
|
|
||||||
|
|
||||||
return normalized_paths
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_macho_data(bytestring):
|
def _decode_macho_data(bytestring):
|
||||||
return bytestring.rstrip(b"\x00").decode("ascii")
|
return bytestring.rstrip(b"\x00").decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def macho_make_paths_relative(path_name, old_layout_root, rpaths, deps, idpath):
|
def _macho_find_paths(orig_rpaths, deps, idpath, prefix_to_prefix):
|
||||||
"""
|
|
||||||
Return a dictionary mapping the original rpaths to the relativized rpaths.
|
|
||||||
This dictionary is used to replace paths in mach-o binaries.
|
|
||||||
Replace old_dir with relative path from dirname of path name
|
|
||||||
in rpaths and deps; idpath is replaced with @rpath/libname.
|
|
||||||
"""
|
|
||||||
paths_to_paths = dict()
|
|
||||||
if idpath:
|
|
||||||
paths_to_paths[idpath] = os.path.join("@rpath", "%s" % os.path.basename(idpath))
|
|
||||||
for rpath in rpaths:
|
|
||||||
if re.match(old_layout_root, rpath):
|
|
||||||
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
|
|
||||||
paths_to_paths[rpath] = os.path.join("@loader_path", "%s" % rel)
|
|
||||||
else:
|
|
||||||
paths_to_paths[rpath] = rpath
|
|
||||||
for dep in deps:
|
|
||||||
if re.match(old_layout_root, dep):
|
|
||||||
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
|
|
||||||
paths_to_paths[dep] = os.path.join("@loader_path", "%s" % rel)
|
|
||||||
else:
|
|
||||||
paths_to_paths[dep] = dep
|
|
||||||
return paths_to_paths
|
|
||||||
|
|
||||||
|
|
||||||
def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
|
|
||||||
"""
|
|
||||||
Return a dictionary mapping the relativized rpaths to the original rpaths.
|
|
||||||
This dictionary is used to replace paths in mach-o binaries.
|
|
||||||
Replace '@loader_path' with the dirname of the origname path name
|
|
||||||
in rpaths and deps; idpath is replaced with the original path name
|
|
||||||
"""
|
|
||||||
rel_to_orig = dict()
|
|
||||||
if idpath:
|
|
||||||
rel_to_orig[idpath] = orig_path_name
|
|
||||||
|
|
||||||
for rpath in rpaths:
|
|
||||||
if re.match("@loader_path", rpath):
|
|
||||||
norm = os.path.normpath(
|
|
||||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), rpath)
|
|
||||||
)
|
|
||||||
rel_to_orig[rpath] = norm
|
|
||||||
else:
|
|
||||||
rel_to_orig[rpath] = rpath
|
|
||||||
for dep in deps:
|
|
||||||
if re.match("@loader_path", dep):
|
|
||||||
norm = os.path.normpath(
|
|
||||||
re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), dep)
|
|
||||||
)
|
|
||||||
rel_to_orig[dep] = norm
|
|
||||||
else:
|
|
||||||
rel_to_orig[dep] = dep
|
|
||||||
return rel_to_orig
|
|
||||||
|
|
||||||
|
|
||||||
def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefix):
|
|
||||||
"""
|
"""
|
||||||
Inputs
|
Inputs
|
||||||
original rpaths from mach-o binaries
|
original rpaths from mach-o binaries
|
||||||
@@ -207,13 +56,12 @@ def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefi
|
|||||||
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
# Sort from longest path to shortest, to ensure we try /foo/bar/baz before /foo/bar
|
||||||
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
prefix_iteration_order = sorted(prefix_to_prefix, key=len, reverse=True)
|
||||||
for orig_rpath in orig_rpaths:
|
for orig_rpath in orig_rpaths:
|
||||||
if orig_rpath.startswith(old_layout_root):
|
for old_prefix in prefix_iteration_order:
|
||||||
for old_prefix in prefix_iteration_order:
|
new_prefix = prefix_to_prefix[old_prefix]
|
||||||
new_prefix = prefix_to_prefix[old_prefix]
|
if orig_rpath.startswith(old_prefix):
|
||||||
if orig_rpath.startswith(old_prefix):
|
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
||||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
paths_to_paths[orig_rpath] = new_rpath
|
||||||
paths_to_paths[orig_rpath] = new_rpath
|
break
|
||||||
break
|
|
||||||
else:
|
else:
|
||||||
paths_to_paths[orig_rpath] = orig_rpath
|
paths_to_paths[orig_rpath] = orig_rpath
|
||||||
|
|
||||||
@@ -237,7 +85,7 @@ def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefi
|
|||||||
return paths_to_paths
|
return paths_to_paths
|
||||||
|
|
||||||
|
|
||||||
def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
def _modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||||
"""
|
"""
|
||||||
This function is used to make machO buildcaches on macOS by
|
This function is used to make machO buildcaches on macOS by
|
||||||
replacing old paths with new paths using install_name_tool
|
replacing old paths with new paths using install_name_tool
|
||||||
@@ -280,7 +128,7 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
|||||||
install_name_tool(*args, temp_path)
|
install_name_tool(*args, temp_path)
|
||||||
|
|
||||||
|
|
||||||
def macholib_get_paths(cur_path):
|
def _macholib_get_paths(cur_path):
|
||||||
"""Get rpaths, dependent libraries, and library id of mach-o objects."""
|
"""Get rpaths, dependent libraries, and library id of mach-o objects."""
|
||||||
headers = []
|
headers = []
|
||||||
try:
|
try:
|
||||||
@@ -348,9 +196,7 @@ def _set_elf_rpaths_and_interpreter(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def relocate_macho_binaries(
|
def relocate_macho_binaries(path_names, prefix_to_prefix):
|
||||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Use macholib python package to get the rpaths, depedent libraries
|
Use macholib python package to get the rpaths, depedent libraries
|
||||||
and library identity for libraries from the MachO object. Modify them
|
and library identity for libraries from the MachO object. Modify them
|
||||||
@@ -363,88 +209,26 @@ def relocate_macho_binaries(
|
|||||||
# Corner case where macho object file ended up in the path name list
|
# Corner case where macho object file ended up in the path name list
|
||||||
if path_name.endswith(".o"):
|
if path_name.endswith(".o"):
|
||||||
continue
|
continue
|
||||||
if rel:
|
# get the paths in the old prefix
|
||||||
# get the relativized paths
|
rpaths, deps, idpath = _macholib_get_paths(path_name)
|
||||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
# get the mapping of paths in the old prerix to the new prefix
|
||||||
# get the file path name in the original prefix
|
paths_to_paths = _macho_find_paths(rpaths, deps, idpath, prefix_to_prefix)
|
||||||
orig_path_name = re.sub(re.escape(new_prefix), old_prefix, path_name)
|
# replace the old paths with new paths
|
||||||
# get the mapping of the relativized paths to the original
|
_modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
||||||
# normalized paths
|
|
||||||
rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
|
|
||||||
# replace the relativized paths with normalized paths
|
|
||||||
modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
|
|
||||||
# get the normalized paths in the mach-o binary
|
|
||||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
|
||||||
# get the mapping of paths in old prefix to path in new prefix
|
|
||||||
paths_to_paths = macho_find_paths(
|
|
||||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
|
||||||
)
|
|
||||||
# replace the old paths with new paths
|
|
||||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
|
||||||
# get the new normalized path in the mach-o binary
|
|
||||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
|
||||||
# get the mapping of paths to relative paths in the new prefix
|
|
||||||
paths_to_paths = macho_make_paths_relative(
|
|
||||||
path_name, new_layout_root, rpaths, deps, idpath
|
|
||||||
)
|
|
||||||
# replace the new paths with relativized paths in the new prefix
|
|
||||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
|
||||||
else:
|
|
||||||
# get the paths in the old prefix
|
|
||||||
rpaths, deps, idpath = macholib_get_paths(path_name)
|
|
||||||
# get the mapping of paths in the old prerix to the new prefix
|
|
||||||
paths_to_paths = macho_find_paths(
|
|
||||||
rpaths, deps, idpath, old_layout_root, prefix_to_prefix
|
|
||||||
)
|
|
||||||
# replace the old paths with new paths
|
|
||||||
modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
|
|
||||||
|
|
||||||
|
|
||||||
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
|
def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||||
"""Return an updated list of RPATHs where each entry in the original list
|
"""Take a list of binaries, and an ordered prefix to prefix mapping, and update the rpaths
|
||||||
starting with the old root is relocated to another place according to the
|
accordingly."""
|
||||||
mapping passed as argument.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
orig_rpaths (list): list of the original RPATHs
|
|
||||||
orig_root (str): original root to be substituted
|
|
||||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
|
||||||
where they should be relocated
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of paths
|
|
||||||
"""
|
|
||||||
new_rpaths = []
|
|
||||||
for orig_rpath in orig_rpaths:
|
|
||||||
# If the original RPATH doesn't start with the target root
|
|
||||||
# append it verbatim and proceed
|
|
||||||
if not orig_rpath.startswith(orig_root):
|
|
||||||
new_rpaths.append(orig_rpath)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Otherwise inspect the mapping and transform + append any prefix
|
|
||||||
# that starts with a registered key
|
|
||||||
# avoiding duplicates
|
|
||||||
for old_prefix, new_prefix in new_prefixes.items():
|
|
||||||
if orig_rpath.startswith(old_prefix):
|
|
||||||
new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
|
|
||||||
if new_rpath not in new_rpaths:
|
|
||||||
new_rpaths.append(new_rpath)
|
|
||||||
return new_rpaths
|
|
||||||
|
|
||||||
|
|
||||||
def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
|
||||||
"""Take a list of binaries, and an ordered dictionary of
|
|
||||||
prefix to prefix mapping, and update the rpaths accordingly."""
|
|
||||||
|
|
||||||
# Transform to binary string
|
# Transform to binary string
|
||||||
prefix_to_prefix = OrderedDict(
|
prefix_to_prefix_bin = {
|
||||||
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefix_to_prefix.items()
|
k.encode("utf-8"): v.encode("utf-8") for k, v in prefix_to_prefix.items()
|
||||||
)
|
}
|
||||||
|
|
||||||
for path in binaries:
|
for path in binaries:
|
||||||
try:
|
try:
|
||||||
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix)
|
elf.substitute_rpath_and_pt_interp_in_place_or_raise(path, prefix_to_prefix_bin)
|
||||||
except elf.ElfCStringUpdatesFailed as e:
|
except elf.ElfCStringUpdatesFailed as e:
|
||||||
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
|
# Fall back to `patchelf --set-rpath ... --set-interpreter ...`
|
||||||
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
|
rpaths = e.rpath.new_value.decode("utf-8").split(":") if e.rpath else []
|
||||||
@@ -452,105 +236,13 @@ def new_relocate_elf_binaries(binaries, prefix_to_prefix):
|
|||||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||||
|
|
||||||
|
|
||||||
def relocate_elf_binaries(
|
def _warn_if_link_cant_be_relocated(link: str, target: str):
|
||||||
binaries, orig_root, new_root, new_prefixes, rel, orig_prefix, new_prefix
|
|
||||||
):
|
|
||||||
"""Relocate the binaries passed as arguments by changing their RPATHs.
|
|
||||||
|
|
||||||
Use patchelf to get the original RPATHs and then replace them with
|
|
||||||
rpaths in the new directory layout.
|
|
||||||
|
|
||||||
New RPATHs are determined from a dictionary mapping the prefixes in the
|
|
||||||
old directory layout to the prefixes in the new directory layout if the
|
|
||||||
rpath was in the old layout root, i.e. system paths are not replaced.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
binaries (list): list of binaries that might need relocation, located
|
|
||||||
in the new prefix
|
|
||||||
orig_root (str): original root to be substituted
|
|
||||||
new_root (str): new root to be used, only relevant for relative RPATHs
|
|
||||||
new_prefixes (dict): dictionary that maps the original prefixes to
|
|
||||||
where they should be relocated
|
|
||||||
rel (bool): True if the RPATHs are relative, False if they are absolute
|
|
||||||
orig_prefix (str): prefix where the executable was originally located
|
|
||||||
new_prefix (str): prefix where we want to relocate the executable
|
|
||||||
"""
|
|
||||||
for new_binary in binaries:
|
|
||||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
|
||||||
# TODO: Can we deduce `rel` from the original RPATHs?
|
|
||||||
if rel:
|
|
||||||
# Get the file path in the original prefix
|
|
||||||
orig_binary = re.sub(re.escape(new_prefix), orig_prefix, new_binary)
|
|
||||||
|
|
||||||
# Get the normalized RPATHs in the old prefix using the file path
|
|
||||||
# in the orig prefix
|
|
||||||
orig_norm_rpaths = _normalize_relative_paths(orig_binary, orig_rpaths)
|
|
||||||
# Get the normalize RPATHs in the new prefix
|
|
||||||
new_norm_rpaths = _transform_rpaths(orig_norm_rpaths, orig_root, new_prefixes)
|
|
||||||
# Get the relative RPATHs in the new prefix
|
|
||||||
new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
|
|
||||||
# check to see if relative rpaths are changed before rewriting
|
|
||||||
if sorted(new_rpaths) != sorted(orig_rpaths):
|
|
||||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
|
||||||
else:
|
|
||||||
new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
|
|
||||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
|
||||||
|
|
||||||
|
|
||||||
def make_link_relative(new_links, orig_links):
|
|
||||||
"""Compute the relative target from the original link and
|
|
||||||
make the new link relative.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
new_links (list): new links to be made relative
|
|
||||||
orig_links (list): original links
|
|
||||||
"""
|
|
||||||
for new_link, orig_link in zip(new_links, orig_links):
|
|
||||||
target = readlink(orig_link)
|
|
||||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
|
||||||
os.unlink(new_link)
|
|
||||||
symlink(relative_target, new_link)
|
|
||||||
|
|
||||||
|
|
||||||
def make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root):
|
|
||||||
"""
|
|
||||||
Replace old RPATHs with paths relative to old_dir in binary files
|
|
||||||
"""
|
|
||||||
if not sys.platform == "darwin":
|
|
||||||
return
|
|
||||||
|
|
||||||
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
|
|
||||||
(rpaths, deps, idpath) = macholib_get_paths(cur_path)
|
|
||||||
paths_to_paths = macho_make_paths_relative(
|
|
||||||
orig_path, old_layout_root, rpaths, deps, idpath
|
|
||||||
)
|
|
||||||
modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths)
|
|
||||||
|
|
||||||
|
|
||||||
def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
|
|
||||||
"""Replace the original RPATHs in the new binaries making them
|
|
||||||
relative to the original layout root.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
new_binaries (list): new binaries whose RPATHs is to be made relative
|
|
||||||
orig_binaries (list): original binaries
|
|
||||||
orig_layout_root (str): path to be used as a base for making
|
|
||||||
RPATHs relative
|
|
||||||
"""
|
|
||||||
for new_binary, orig_binary in zip(new_binaries, orig_binaries):
|
|
||||||
orig_rpaths = _elf_rpaths_for(new_binary)
|
|
||||||
if orig_rpaths:
|
|
||||||
new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
|
|
||||||
_set_elf_rpaths_and_interpreter(new_binary, new_rpaths)
|
|
||||||
|
|
||||||
|
|
||||||
def warn_if_link_cant_be_relocated(link, target):
|
|
||||||
if not os.path.isabs(target):
|
if not os.path.isabs(target):
|
||||||
return
|
return
|
||||||
tty.warn('Symbolic link at "{}" to "{}" cannot be relocated'.format(link, target))
|
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
|
||||||
|
|
||||||
|
|
||||||
def relocate_links(links, prefix_to_prefix):
|
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||||
"""Relocate links to a new install prefix."""
|
"""Relocate links to a new install prefix."""
|
||||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||||
for link in links:
|
for link in links:
|
||||||
@@ -559,7 +251,7 @@ def relocate_links(links, prefix_to_prefix):
|
|||||||
|
|
||||||
# No match.
|
# No match.
|
||||||
if match is None:
|
if match is None:
|
||||||
warn_if_link_cant_be_relocated(link, old_target)
|
_warn_if_link_cant_be_relocated(link, old_target)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
||||||
@@ -567,32 +259,32 @@ def relocate_links(links, prefix_to_prefix):
|
|||||||
symlink(new_target, link)
|
symlink(new_target, link)
|
||||||
|
|
||||||
|
|
||||||
def relocate_text(files, prefixes):
|
def relocate_text(files: Iterable[str], prefix_to_prefix: PrefixToPrefix) -> None:
|
||||||
"""Relocate text file from the original installation prefix to the
|
"""Relocate text file from the original installation prefix to the
|
||||||
new prefix.
|
new prefix.
|
||||||
|
|
||||||
Relocation also affects the the path in Spack's sbang script.
|
Relocation also affects the the path in Spack's sbang script.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
files (list): Text files to be relocated
|
files: Text files to be relocated
|
||||||
prefixes (OrderedDict): String prefixes which need to be changed
|
prefix_to_prefix: ordered prefix to prefix mapping
|
||||||
"""
|
"""
|
||||||
TextFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(files)
|
TextFilePrefixReplacer.from_strings_or_bytes(prefix_to_prefix).apply(files)
|
||||||
|
|
||||||
|
|
||||||
def relocate_text_bin(binaries, prefixes):
|
def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix) -> List[str]:
|
||||||
"""Replace null terminated path strings hard-coded into binaries.
|
"""Replace null terminated path strings hard-coded into binaries.
|
||||||
|
|
||||||
The new install prefix must be shorter than the original one.
|
The new install prefix must be shorter than the original one.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
binaries (list): binaries to be relocated
|
binaries: paths to binaries to be relocated
|
||||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
prefix_to_prefix: ordered prefix to prefix mapping
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
||||||
"""
|
"""
|
||||||
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefix_to_prefix).apply(binaries)
|
||||||
|
|
||||||
|
|
||||||
def is_macho_magic(magic: bytes) -> bool:
|
def is_macho_magic(magic: bytes) -> bool:
|
||||||
@@ -629,7 +321,7 @@ def _exists_dir(dirname):
|
|||||||
return os.path.isdir(dirname)
|
return os.path.isdir(dirname)
|
||||||
|
|
||||||
|
|
||||||
def is_macho_binary(path):
|
def is_macho_binary(path: str) -> bool:
|
||||||
try:
|
try:
|
||||||
with open(path, "rb") as f:
|
with open(path, "rb") as f:
|
||||||
return is_macho_magic(f.read(4))
|
return is_macho_magic(f.read(4))
|
||||||
@@ -653,7 +345,7 @@ def fixup_macos_rpath(root, filename):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Get Mach-O header commands
|
# Get Mach-O header commands
|
||||||
(rpath_list, deps, id_dylib) = macholib_get_paths(abspath)
|
(rpath_list, deps, id_dylib) = _macholib_get_paths(abspath)
|
||||||
|
|
||||||
# Convert rpaths list to (name -> number of occurrences)
|
# Convert rpaths list to (name -> number of occurrences)
|
||||||
add_rpaths = set()
|
add_rpaths = set()
|
||||||
|
@@ -6,64 +6,61 @@
|
|||||||
paths inside text files and binaries."""
|
paths inside text files and binaries."""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from collections import OrderedDict
|
from typing import IO, Dict, Iterable, List, Union
|
||||||
from typing import Dict, Union
|
|
||||||
|
from llnl.util.lang import PatternBytes
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
|
|
||||||
Prefix = Union[str, bytes]
|
Prefix = Union[str, bytes]
|
||||||
|
PrefixToPrefix = Union[Dict[str, str], Dict[bytes, bytes]]
|
||||||
|
|
||||||
|
|
||||||
def encode_path(p: Prefix) -> bytes:
|
def encode_path(p: Prefix) -> bytes:
|
||||||
return p if isinstance(p, bytes) else p.encode("utf-8")
|
return p if isinstance(p, bytes) else p.encode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def _prefix_to_prefix_as_bytes(prefix_to_prefix) -> Dict[bytes, bytes]:
|
def _prefix_to_prefix_as_bytes(prefix_to_prefix: PrefixToPrefix) -> Dict[bytes, bytes]:
|
||||||
return OrderedDict((encode_path(k), encode_path(v)) for (k, v) in prefix_to_prefix.items())
|
return {encode_path(k): encode_path(v) for (k, v) in prefix_to_prefix.items()}
|
||||||
|
|
||||||
|
|
||||||
def utf8_path_to_binary_regex(prefix: str):
|
def utf8_path_to_binary_regex(prefix: str) -> PatternBytes:
|
||||||
"""Create a binary regex that matches the input path in utf8"""
|
"""Create a binary regex that matches the input path in utf8"""
|
||||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||||
|
|
||||||
|
|
||||||
def _byte_strings_to_single_binary_regex(prefixes):
|
def _byte_strings_to_single_binary_regex(prefixes: Iterable[bytes]) -> PatternBytes:
|
||||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||||
|
|
||||||
|
|
||||||
def utf8_paths_to_single_binary_regex(prefixes):
|
def utf8_paths_to_single_binary_regex(prefixes: Iterable[str]) -> PatternBytes:
|
||||||
"""Create a (binary) regex that matches any input path in utf8"""
|
"""Create a (binary) regex that matches any input path in utf8"""
|
||||||
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||||
|
|
||||||
|
|
||||||
def filter_identity_mappings(prefix_to_prefix):
|
def filter_identity_mappings(prefix_to_prefix: Dict[bytes, bytes]) -> Dict[bytes, bytes]:
|
||||||
"""Drop mappings that are not changed."""
|
"""Drop mappings that are not changed."""
|
||||||
# NOTE: we don't guard against the following case:
|
# NOTE: we don't guard against the following case:
|
||||||
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
|
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
|
||||||
# [/abc -> /x], meaning that after this simplification /abc/def will be
|
# [/abc -> /x], meaning that after this simplification /abc/def will be
|
||||||
# mapped to /x/def instead of /abc/def. This should not be a problem.
|
# mapped to /x/def instead of /abc/def. This should not be a problem.
|
||||||
return OrderedDict((k, v) for (k, v) in prefix_to_prefix.items() if k != v)
|
return {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||||
|
|
||||||
|
|
||||||
class PrefixReplacer:
|
class PrefixReplacer:
|
||||||
"""Base class for applying a prefix to prefix map
|
"""Base class for applying a prefix to prefix map to a list of binaries or text files. Derived
|
||||||
to a list of binaries or text files.
|
classes implement _apply_to_file to do the actual work, which is different when it comes to
|
||||||
Child classes implement _apply_to_file to do the
|
|
||||||
actual work, which is different when it comes to
|
|
||||||
binaries and text files."""
|
binaries and text files."""
|
||||||
|
|
||||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]) -> None:
|
||||||
"""
|
"""
|
||||||
Arguments:
|
Arguments:
|
||||||
|
prefix_to_prefix: An ordered mapping from prefix to prefix. The order is relevant to
|
||||||
prefix_to_prefix (OrderedDict):
|
support substring fallbacks, for example
|
||||||
|
``[("/first/sub", "/x"), ("/first", "/y")]`` will ensure /first/sub is matched and
|
||||||
A ordered mapping from prefix to prefix. The order is
|
replaced before /first.
|
||||||
relevant to support substring fallbacks, for example
|
|
||||||
[("/first/sub", "/x"), ("/first", "/y")] will ensure
|
|
||||||
/first/sub is matched and replaced before /first.
|
|
||||||
"""
|
"""
|
||||||
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
|
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
|
||||||
|
|
||||||
@@ -74,7 +71,7 @@ def is_noop(self) -> bool:
|
|||||||
or there are no prefixes to replace."""
|
or there are no prefixes to replace."""
|
||||||
return not self.prefix_to_prefix
|
return not self.prefix_to_prefix
|
||||||
|
|
||||||
def apply(self, filenames: list):
|
def apply(self, filenames: Iterable[str]) -> List[str]:
|
||||||
"""Returns a list of files that were modified"""
|
"""Returns a list of files that were modified"""
|
||||||
changed_files = []
|
changed_files = []
|
||||||
if self.is_noop:
|
if self.is_noop:
|
||||||
@@ -84,17 +81,20 @@ def apply(self, filenames: list):
|
|||||||
changed_files.append(filename)
|
changed_files.append(filename)
|
||||||
return changed_files
|
return changed_files
|
||||||
|
|
||||||
def apply_to_filename(self, filename):
|
def apply_to_filename(self, filename: str) -> bool:
|
||||||
if self.is_noop:
|
if self.is_noop:
|
||||||
return False
|
return False
|
||||||
with open(filename, "rb+") as f:
|
with open(filename, "rb+") as f:
|
||||||
return self.apply_to_file(f)
|
return self.apply_to_file(f)
|
||||||
|
|
||||||
def apply_to_file(self, f):
|
def apply_to_file(self, f: IO[bytes]) -> bool:
|
||||||
if self.is_noop:
|
if self.is_noop:
|
||||||
return False
|
return False
|
||||||
return self._apply_to_file(f)
|
return self._apply_to_file(f)
|
||||||
|
|
||||||
|
def _apply_to_file(self, f: IO) -> bool:
|
||||||
|
raise NotImplementedError("Derived classes must implement this method")
|
||||||
|
|
||||||
|
|
||||||
class TextFilePrefixReplacer(PrefixReplacer):
|
class TextFilePrefixReplacer(PrefixReplacer):
|
||||||
"""This class applies prefix to prefix mappings for relocation
|
"""This class applies prefix to prefix mappings for relocation
|
||||||
@@ -112,13 +112,11 @@ def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
|||||||
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
|
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_strings_or_bytes(
|
def from_strings_or_bytes(cls, prefix_to_prefix: PrefixToPrefix) -> "TextFilePrefixReplacer":
|
||||||
cls, prefix_to_prefix: Dict[Prefix, Prefix]
|
|
||||||
) -> "TextFilePrefixReplacer":
|
|
||||||
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
|
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
|
||||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
|
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
|
||||||
|
|
||||||
def _apply_to_file(self, f):
|
def _apply_to_file(self, f: IO) -> bool:
|
||||||
"""Text replacement implementation simply reads the entire file
|
"""Text replacement implementation simply reads the entire file
|
||||||
in memory and applies the combined regex."""
|
in memory and applies the combined regex."""
|
||||||
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
|
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
|
||||||
@@ -133,12 +131,12 @@ def _apply_to_file(self, f):
|
|||||||
|
|
||||||
|
|
||||||
class BinaryFilePrefixReplacer(PrefixReplacer):
|
class BinaryFilePrefixReplacer(PrefixReplacer):
|
||||||
def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
def __init__(self, prefix_to_prefix: Dict[bytes, bytes], suffix_safety_size: int = 7) -> None:
|
||||||
"""
|
"""
|
||||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
prefix_to_prefix: Ordered dictionary where the keys are bytes representing the old prefixes
|
||||||
bytes representing the old prefixes and the values are the new
|
and the values are the new
|
||||||
suffix_safety_size (int): in case of null terminated strings, what size
|
suffix_safety_size: in case of null terminated strings, what size of the suffix should
|
||||||
of the suffix should remain to avoid aliasing issues?
|
remain to avoid aliasing issues?
|
||||||
"""
|
"""
|
||||||
assert suffix_safety_size >= 0
|
assert suffix_safety_size >= 0
|
||||||
super().__init__(prefix_to_prefix)
|
super().__init__(prefix_to_prefix)
|
||||||
@@ -146,17 +144,18 @@ def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
|||||||
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
|
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
def binary_text_regex(
|
||||||
"""
|
cls, binary_prefixes: Iterable[bytes], suffix_safety_size: int = 7
|
||||||
Create a regex that looks for exact matches of prefixes, and also tries to
|
) -> PatternBytes:
|
||||||
match a C-string type null terminator in a small lookahead window.
|
"""Create a regex that looks for exact matches of prefixes, and also tries to match a
|
||||||
|
C-string type null terminator in a small lookahead window.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
binary_prefixes (list): List of byte strings of prefixes to match
|
binary_prefixes: Iterable of byte strings of prefixes to match
|
||||||
suffix_safety_size (int): Sizeof the lookahed for null-terminated string.
|
suffix_safety_size: Sizeof the lookahed for null-terminated string.
|
||||||
|
|
||||||
Returns: compiled regex
|
|
||||||
"""
|
"""
|
||||||
|
# Note: it's important not to use capture groups for the prefix, since it destroys
|
||||||
|
# performance due to common prefix optimization.
|
||||||
return re.compile(
|
return re.compile(
|
||||||
b"("
|
b"("
|
||||||
+ b"|".join(re.escape(p) for p in binary_prefixes)
|
+ b"|".join(re.escape(p) for p in binary_prefixes)
|
||||||
@@ -165,36 +164,34 @@ def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_strings_or_bytes(
|
def from_strings_or_bytes(
|
||||||
cls, prefix_to_prefix: Dict[Prefix, Prefix], suffix_safety_size: int = 7
|
cls, prefix_to_prefix: PrefixToPrefix, suffix_safety_size: int = 7
|
||||||
) -> "BinaryFilePrefixReplacer":
|
) -> "BinaryFilePrefixReplacer":
|
||||||
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
|
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
prefix_to_prefix (OrderedDict): Ordered mapping of prefix to prefix.
|
prefix_to_prefix: Ordered mapping of prefix to prefix.
|
||||||
suffix_safety_size (int): Number of bytes to retain at the end of a C-string
|
suffix_safety_size: Number of bytes to retain at the end of a C-string to avoid binary
|
||||||
to avoid binary string-aliasing issues.
|
string-aliasing issues.
|
||||||
"""
|
"""
|
||||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
|
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
|
||||||
|
|
||||||
def _apply_to_file(self, f):
|
def _apply_to_file(self, f: IO[bytes]) -> bool:
|
||||||
"""
|
"""
|
||||||
Given a file opened in rb+ mode, apply the string replacements as
|
Given a file opened in rb+ mode, apply the string replacements as specified by an ordered
|
||||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
dictionary of prefix to prefix mappings. This method takes special care of null-terminated
|
||||||
method takes special care of null-terminated C-strings. C-string constants
|
C-strings. C-string constants are problematic because compilers and linkers optimize
|
||||||
are problematic because compilers and linkers optimize readonly strings for
|
readonly strings for space by aliasing those that share a common suffix (only suffix since
|
||||||
space by aliasing those that share a common suffix (only suffix since all
|
all of them are null terminated). See https://github.com/spack/spack/pull/31739 and
|
||||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
https://github.com/spack/spack/pull/32253 for details. Our logic matches the original
|
||||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
prefix with a ``suffix_safety_size + 1`` lookahead for null bytes. If no null terminator
|
||||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
is found, we simply pad with leading /, assuming that it's a long C-string; the full
|
||||||
If no null terminator is found, we simply pad with leading /, assuming that
|
C-string after replacement has a large suffix in common with its original value. If there
|
||||||
it's a long C-string; the full C-string after replacement has a large suffix
|
*is* a null terminator we can do the same as long as the replacement has a sufficiently
|
||||||
in common with its original value.
|
long common suffix with the original prefix. As a last resort when the replacement does
|
||||||
If there *is* a null terminator we can do the same as long as the replacement
|
not have a long enough common suffix, we can try to shorten the string, but this only
|
||||||
has a sufficiently long common suffix with the original prefix.
|
works if the new length is sufficiently short (typically the case when going from large
|
||||||
As a last resort when the replacement does not have a long enough common suffix,
|
padding -> normal path) If the replacement string is longer, or all of the above fails,
|
||||||
we can try to shorten the string, but this only works if the new length is
|
we error out.
|
||||||
sufficiently short (typically the case when going from large padding -> normal path)
|
|
||||||
If the replacement string is longer, or all of the above fails, we error out.
|
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
f: file opened in rb+ mode
|
f: file opened in rb+ mode
|
||||||
@@ -204,11 +201,10 @@ def _apply_to_file(self, f):
|
|||||||
"""
|
"""
|
||||||
assert f.tell() == 0
|
assert f.tell() == 0
|
||||||
|
|
||||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
# We *could* read binary data in chunks to avoid loading all in memory, but it's nasty to
|
||||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
# deal with matches across boundaries, so let's stick to something simple.
|
||||||
# something simple.
|
|
||||||
|
|
||||||
modified = True
|
modified = False
|
||||||
|
|
||||||
for match in self.regex.finditer(f.read()):
|
for match in self.regex.finditer(f.read()):
|
||||||
# The matching prefix (old) and its replacement (new)
|
# The matching prefix (old) and its replacement (new)
|
||||||
@@ -218,8 +214,7 @@ def _apply_to_file(self, f):
|
|||||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||||
null_terminated = match.end(0) > match.end(1)
|
null_terminated = match.end(0) > match.end(1)
|
||||||
|
|
||||||
# Suffix string length, excluding the null byte
|
# Suffix string length, excluding the null byte. Only makes sense if null_terminated
|
||||||
# Only makes sense if null_terminated
|
|
||||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||||
|
|
||||||
# How many bytes are we shrinking our string?
|
# How many bytes are we shrinking our string?
|
||||||
@@ -229,9 +224,9 @@ def _apply_to_file(self, f):
|
|||||||
if bytes_shorter < 0:
|
if bytes_shorter < 0:
|
||||||
raise CannotGrowString(old, new)
|
raise CannotGrowString(old, new)
|
||||||
|
|
||||||
# If we don't know whether this is a null terminated C-string (we're looking
|
# If we don't know whether this is a null terminated C-string (we're looking only N + 1
|
||||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
# bytes ahead), or if it is and we have a common suffix, we can simply pad with leading
|
||||||
# simply pad with leading dir separators.
|
# dir separators.
|
||||||
elif (
|
elif (
|
||||||
not null_terminated
|
not null_terminated
|
||||||
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
|
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
|
||||||
@@ -240,9 +235,9 @@ def _apply_to_file(self, f):
|
|||||||
):
|
):
|
||||||
replacement = b"/" * bytes_shorter + new
|
replacement = b"/" * bytes_shorter + new
|
||||||
|
|
||||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
# If it *was* null terminated, all that matters is that we can leave N bytes of old
|
||||||
# of old suffix in place. Note that > is required since we also insert an
|
# suffix in place. Note that > is required since we also insert an additional null
|
||||||
# additional null terminator.
|
# terminator.
|
||||||
elif bytes_shorter > self.suffix_safety_size:
|
elif bytes_shorter > self.suffix_safety_size:
|
||||||
replacement = new + match.group(2) # includes the trailing null
|
replacement = new + match.group(2) # includes the trailing null
|
||||||
|
|
||||||
@@ -257,22 +252,6 @@ def _apply_to_file(self, f):
|
|||||||
return modified
|
return modified
|
||||||
|
|
||||||
|
|
||||||
class BinaryStringReplacementError(spack.error.SpackError):
|
|
||||||
def __init__(self, file_path, old_len, new_len):
|
|
||||||
"""The size of the file changed after binary path substitution
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_path (str): file with changing size
|
|
||||||
old_len (str): original length of the file
|
|
||||||
new_len (str): length of the file after substitution
|
|
||||||
"""
|
|
||||||
super().__init__(
|
|
||||||
"Doing a binary string replacement in %s failed.\n"
|
|
||||||
"The size of the file changed from %s to %s\n"
|
|
||||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BinaryTextReplaceError(spack.error.SpackError):
|
class BinaryTextReplaceError(spack.error.SpackError):
|
||||||
def __init__(self, msg):
|
def __init__(self, msg):
|
||||||
msg += (
|
msg += (
|
||||||
@@ -284,17 +263,16 @@ def __init__(self, msg):
|
|||||||
|
|
||||||
class CannotGrowString(BinaryTextReplaceError):
|
class CannotGrowString(BinaryTextReplaceError):
|
||||||
def __init__(self, old, new):
|
def __init__(self, old, new):
|
||||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
return super().__init__(
|
||||||
super().__init__(msg)
|
f"Cannot replace {old!r} with {new!r} because the new prefix is longer."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CannotShrinkCString(BinaryTextReplaceError):
|
class CannotShrinkCString(BinaryTextReplaceError):
|
||||||
def __init__(self, old, new, full_old_string):
|
def __init__(self, old, new, full_old_string):
|
||||||
# Just interpolate binary string to not risk issues with invalid
|
# Just interpolate binary string to not risk issues with invalid unicode, which would be
|
||||||
# unicode, which would be really bad user experience: error in error.
|
# really bad user experience: error in error. We have no clue if we actually deal with a
|
||||||
# We have no clue if we actually deal with a real C-string nor what
|
# real C-string nor what encoding it has.
|
||||||
# encoding it has.
|
super().__init__(
|
||||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
f"Cannot replace {old!r} with {new!r} in the C-string {full_old_string!r}."
|
||||||
old, new, full_old_string
|
|
||||||
)
|
)
|
||||||
super().__init__(msg)
|
|
||||||
|
@@ -14,7 +14,6 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import os.path
|
import os
|
||||||
import platform
|
import platform
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os.path
|
import os
|
||||||
|
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
|
|
||||||
|
@@ -3,15 +3,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.deptypes as dt
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.platforms
|
|
||||||
import spack.relocate as relocate
|
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
|
|
||||||
@@ -42,79 +38,11 @@ def rewire_node(spec, explicit):
|
|||||||
|
|
||||||
spack.hooks.pre_install(spec)
|
spack.hooks.pre_install(spec)
|
||||||
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
|
bindist.extract_buildcache_tarball(tarball, destination=spec.prefix)
|
||||||
buildinfo = bindist.read_buildinfo_file(spec.prefix)
|
bindist.relocate_package(spec)
|
||||||
|
|
||||||
# compute prefix-to-prefix for every node from the build spec to the spliced
|
# run post install hooks and add to db
|
||||||
# spec
|
|
||||||
prefix_to_prefix = {spec.build_spec.prefix: spec.prefix}
|
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
|
||||||
for s in bindist.deps_to_relocate(spec):
|
|
||||||
analog = s
|
|
||||||
if id(s) not in build_spec_ids:
|
|
||||||
analogs = [
|
|
||||||
d
|
|
||||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
|
||||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
|
||||||
]
|
|
||||||
if analogs:
|
|
||||||
# Prefer same-name analogs and prefer higher versions
|
|
||||||
# This matches the preferences in Spec.splice, so we will find same node
|
|
||||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
|
||||||
|
|
||||||
prefix_to_prefix[analog.prefix] = s.prefix
|
|
||||||
|
|
||||||
platform = spack.platforms.by_name(spec.platform)
|
|
||||||
|
|
||||||
text_to_relocate = [
|
|
||||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_textfiles"]
|
|
||||||
]
|
|
||||||
if text_to_relocate:
|
|
||||||
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
|
||||||
links = [os.path.join(spec.prefix, f) for f in buildinfo["relocate_links"]]
|
|
||||||
relocate.relocate_links(links, prefix_to_prefix)
|
|
||||||
bins_to_relocate = [
|
|
||||||
os.path.join(spec.prefix, rel_path) for rel_path in buildinfo["relocate_binaries"]
|
|
||||||
]
|
|
||||||
if bins_to_relocate:
|
|
||||||
if "macho" in platform.binary_formats:
|
|
||||||
relocate.relocate_macho_binaries(
|
|
||||||
bins_to_relocate,
|
|
||||||
str(spack.store.STORE.layout.root),
|
|
||||||
str(spack.store.STORE.layout.root),
|
|
||||||
prefix_to_prefix,
|
|
||||||
False,
|
|
||||||
spec.build_spec.prefix,
|
|
||||||
spec.prefix,
|
|
||||||
)
|
|
||||||
if "elf" in platform.binary_formats:
|
|
||||||
relocate.relocate_elf_binaries(
|
|
||||||
bins_to_relocate,
|
|
||||||
str(spack.store.STORE.layout.root),
|
|
||||||
str(spack.store.STORE.layout.root),
|
|
||||||
prefix_to_prefix,
|
|
||||||
False,
|
|
||||||
spec.build_spec.prefix,
|
|
||||||
spec.prefix,
|
|
||||||
)
|
|
||||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
|
||||||
shutil.rmtree(tempdir)
|
|
||||||
install_manifest = os.path.join(
|
|
||||||
spec.prefix,
|
|
||||||
spack.store.STORE.layout.metadata_dir,
|
|
||||||
spack.store.STORE.layout.manifest_file_name,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
os.unlink(install_manifest)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
# Write the spliced spec into spec.json. Without this, Database.add would fail because it
|
|
||||||
# checks the spec.json in the prefix against the spec being added to look for mismatches
|
|
||||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
|
||||||
# add to database, not sure about explicit
|
|
||||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
|
||||||
|
|
||||||
# run post install hooks
|
|
||||||
spack.hooks.post_install(spec, explicit)
|
spack.hooks.post_install(spec, explicit)
|
||||||
|
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||||
|
|
||||||
|
|
||||||
class RewireError(spack.error.SpackError):
|
class RewireError(spack.error.SpackError):
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
from spack.error import SpecSyntaxError
|
from spack.error import SpecSyntaxError
|
||||||
@@ -19,12 +21,8 @@ class DeprecationMessage(typing.NamedTuple):
|
|||||||
# jsonschema is imported lazily as it is heavy to import
|
# jsonschema is imported lazily as it is heavy to import
|
||||||
# and increases the start-up time
|
# and increases the start-up time
|
||||||
def _make_validator():
|
def _make_validator():
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
def _validate_spec(validator, is_spec, instance, schema):
|
def _validate_spec(validator, is_spec, instance, schema):
|
||||||
"""Check if the attributes on instance are valid specs."""
|
"""Check if the attributes on instance are valid specs."""
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
import spack.spec_parser
|
import spack.spec_parser
|
||||||
|
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
@@ -33,8 +31,8 @@ def _validate_spec(validator, is_spec, instance, schema):
|
|||||||
for spec_str in instance:
|
for spec_str in instance:
|
||||||
try:
|
try:
|
||||||
spack.spec_parser.parse(spec_str)
|
spack.spec_parser.parse(spec_str)
|
||||||
except SpecSyntaxError as e:
|
except SpecSyntaxError:
|
||||||
yield jsonschema.ValidationError(str(e))
|
yield jsonschema.ValidationError(f"the key '{spec_str}' is not a valid spec")
|
||||||
|
|
||||||
def _deprecated_properties(validator, deprecated, instance, schema):
|
def _deprecated_properties(validator, deprecated, instance, schema):
|
||||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||||
@@ -67,7 +65,7 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
|||||||
yield jsonschema.ValidationError("\n".join(errors))
|
yield jsonschema.ValidationError("\n".join(errors))
|
||||||
|
|
||||||
return jsonschema.validators.extend(
|
return jsonschema.validators.extend(
|
||||||
jsonschema.Draft4Validator,
|
jsonschema.Draft7Validator,
|
||||||
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
{"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -19,7 +19,7 @@
|
|||||||
"items": {
|
"items": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {"when": {"type": "string"}},
|
"properties": {"when": {"type": "string"}},
|
||||||
"patternProperties": {r"^(?!when$)\w*": spec_list_schema},
|
"additionalProperties": spec_list_schema,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
22
lib/spack/spack/schema/env_vars.py
Normal file
22
lib/spack/spack/schema/env_vars.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Schema for env_vars.yaml configuration file.
|
||||||
|
|
||||||
|
.. literalinclude:: _spack_root/lib/spack/spack/schema/env_vars.py
|
||||||
|
:lines: 15-
|
||||||
|
"""
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
import spack.schema.environment
|
||||||
|
|
||||||
|
properties: Dict[str, Any] = {"env_vars": spack.schema.environment.definition}
|
||||||
|
|
||||||
|
#: Full schema with metadata
|
||||||
|
schema = {
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"title": "Spack env_vars configuration file schema",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": False,
|
||||||
|
"properties": properties,
|
||||||
|
}
|
@@ -20,6 +20,7 @@
|
|||||||
import spack.schema.container
|
import spack.schema.container
|
||||||
import spack.schema.definitions
|
import spack.schema.definitions
|
||||||
import spack.schema.develop
|
import spack.schema.develop
|
||||||
|
import spack.schema.env_vars
|
||||||
import spack.schema.mirrors
|
import spack.schema.mirrors
|
||||||
import spack.schema.modules
|
import spack.schema.modules
|
||||||
import spack.schema.packages
|
import spack.schema.packages
|
||||||
@@ -38,6 +39,7 @@
|
|||||||
spack.schema.ci.properties,
|
spack.schema.ci.properties,
|
||||||
spack.schema.definitions.properties,
|
spack.schema.definitions.properties,
|
||||||
spack.schema.develop.properties,
|
spack.schema.develop.properties,
|
||||||
|
spack.schema.env_vars.properties,
|
||||||
spack.schema.mirrors.properties,
|
spack.schema.mirrors.properties,
|
||||||
spack.schema.modules.properties,
|
spack.schema.modules.properties,
|
||||||
spack.schema.packages.properties,
|
spack.schema.packages.properties,
|
||||||
|
@@ -9,6 +9,8 @@
|
|||||||
"""
|
"""
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
import jsonschema
|
||||||
|
|
||||||
#: Common properties for connection specification
|
#: Common properties for connection specification
|
||||||
connection = {
|
connection = {
|
||||||
"url": {"type": "string"},
|
"url": {"type": "string"},
|
||||||
@@ -102,8 +104,6 @@
|
|||||||
|
|
||||||
|
|
||||||
def update(data):
|
def update(data):
|
||||||
import jsonschema
|
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
|
|
||||||
def check_access_pair(name, section):
|
def check_access_pair(name, section):
|
||||||
|
@@ -12,22 +12,6 @@
|
|||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
|
|
||||||
#: Matches a spec or a multi-valued variant but not another
|
|
||||||
#: valid keyword.
|
|
||||||
#:
|
|
||||||
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
|
|
||||||
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
|
|
||||||
spec_regex = (
|
|
||||||
r"(?!hierarchy|core_specs|verbose|hash_length|defaults|filter_hierarchy_specs|hide|"
|
|
||||||
r"include|exclude|projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
|
|
||||||
)
|
|
||||||
|
|
||||||
#: Matches a valid name for a module set
|
|
||||||
valid_module_set_name = r"^(?!prefix_inspections$)\w[\w-]*$"
|
|
||||||
|
|
||||||
#: Matches an anonymous spec, i.e. a spec without a root name
|
|
||||||
anonymous_spec_regex = r"^[\^@%+~]"
|
|
||||||
|
|
||||||
#: Definitions for parts of module schema
|
#: Definitions for parts of module schema
|
||||||
array_of_strings = {"type": "array", "default": [], "items": {"type": "string"}}
|
array_of_strings = {"type": "array", "default": [], "items": {"type": "string"}}
|
||||||
|
|
||||||
@@ -56,7 +40,7 @@
|
|||||||
"suffixes": {
|
"suffixes": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"validate_spec": True,
|
"validate_spec": True,
|
||||||
"patternProperties": {r"\w[\w-]*": {"type": "string"}}, # key
|
"additionalProperties": {"type": "string"}, # key
|
||||||
},
|
},
|
||||||
"environment": spack.schema.environment.definition,
|
"environment": spack.schema.environment.definition,
|
||||||
},
|
},
|
||||||
@@ -64,34 +48,40 @@
|
|||||||
|
|
||||||
projections_scheme = spack.schema.projections.properties["projections"]
|
projections_scheme = spack.schema.projections.properties["projections"]
|
||||||
|
|
||||||
module_type_configuration = {
|
module_type_configuration: Dict = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"default": {},
|
"default": {},
|
||||||
"allOf": [
|
"validate_spec": True,
|
||||||
{
|
"properties": {
|
||||||
"properties": {
|
"verbose": {"type": "boolean", "default": False},
|
||||||
"verbose": {"type": "boolean", "default": False},
|
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
||||||
"hash_length": {"type": "integer", "minimum": 0, "default": 7},
|
"include": array_of_strings,
|
||||||
"include": array_of_strings,
|
"exclude": array_of_strings,
|
||||||
"exclude": array_of_strings,
|
"exclude_implicits": {"type": "boolean", "default": False},
|
||||||
"exclude_implicits": {"type": "boolean", "default": False},
|
"defaults": array_of_strings,
|
||||||
"defaults": array_of_strings,
|
"hide_implicits": {"type": "boolean", "default": False},
|
||||||
"hide_implicits": {"type": "boolean", "default": False},
|
"naming_scheme": {"type": "string"},
|
||||||
"naming_scheme": {"type": "string"}, # Can we be more specific here?
|
"projections": projections_scheme,
|
||||||
"projections": projections_scheme,
|
"all": module_file_configuration,
|
||||||
"all": module_file_configuration,
|
},
|
||||||
}
|
"additionalProperties": module_file_configuration,
|
||||||
},
|
|
||||||
{
|
|
||||||
"validate_spec": True,
|
|
||||||
"patternProperties": {
|
|
||||||
spec_regex: module_file_configuration,
|
|
||||||
anonymous_spec_regex: module_file_configuration,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tcl_configuration = module_type_configuration.copy()
|
||||||
|
|
||||||
|
lmod_configuration = module_type_configuration.copy()
|
||||||
|
lmod_configuration["properties"].update(
|
||||||
|
{
|
||||||
|
"core_compilers": array_of_strings,
|
||||||
|
"hierarchy": array_of_strings,
|
||||||
|
"core_specs": array_of_strings,
|
||||||
|
"filter_hierarchy_specs": {
|
||||||
|
"type": "object",
|
||||||
|
"validate_spec": True,
|
||||||
|
"additionalProperties": array_of_strings,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
module_config_properties = {
|
module_config_properties = {
|
||||||
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
"use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
|
||||||
@@ -105,31 +95,8 @@
|
|||||||
"default": [],
|
"default": [],
|
||||||
"items": {"type": "string", "enum": ["tcl", "lmod"]},
|
"items": {"type": "string", "enum": ["tcl", "lmod"]},
|
||||||
},
|
},
|
||||||
"lmod": {
|
"lmod": lmod_configuration,
|
||||||
"allOf": [
|
"tcl": tcl_configuration,
|
||||||
# Base configuration
|
|
||||||
module_type_configuration,
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"core_compilers": array_of_strings,
|
|
||||||
"hierarchy": array_of_strings,
|
|
||||||
"core_specs": array_of_strings,
|
|
||||||
"filter_hierarchy_specs": {
|
|
||||||
"type": "object",
|
|
||||||
"patternProperties": {spec_regex: array_of_strings},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}, # Specific lmod extensions
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"tcl": {
|
|
||||||
"allOf": [
|
|
||||||
# Base configuration
|
|
||||||
module_type_configuration,
|
|
||||||
{}, # Specific tcl extensions
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"prefix_inspections": {
|
"prefix_inspections": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
@@ -145,7 +112,6 @@
|
|||||||
properties: Dict[str, Any] = {
|
properties: Dict[str, Any] = {
|
||||||
"modules": {
|
"modules": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"prefix_inspections": {
|
"prefix_inspections": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -156,13 +122,11 @@
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"patternProperties": {
|
"additionalProperties": {
|
||||||
valid_module_set_name: {
|
"type": "object",
|
||||||
"type": "object",
|
"default": {},
|
||||||
"default": {},
|
"additionalProperties": False,
|
||||||
"additionalProperties": False,
|
"properties": module_config_properties,
|
||||||
"properties": module_config_properties,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -98,7 +98,6 @@
|
|||||||
"packages": {
|
"packages": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"default": {},
|
"default": {},
|
||||||
"additionalProperties": False,
|
|
||||||
"properties": {
|
"properties": {
|
||||||
"all": { # package name
|
"all": { # package name
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -140,58 +139,54 @@
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"patternProperties": {
|
"additionalProperties": { # package name
|
||||||
r"(?!^all$)(^\w[\w-]*)": { # package name
|
"type": "object",
|
||||||
"type": "object",
|
"default": {},
|
||||||
"default": {},
|
"additionalProperties": False,
|
||||||
"additionalProperties": False,
|
"properties": {
|
||||||
"properties": {
|
"require": requirements,
|
||||||
"require": requirements,
|
"prefer": prefer_and_conflict,
|
||||||
"prefer": prefer_and_conflict,
|
"conflict": prefer_and_conflict,
|
||||||
"conflict": prefer_and_conflict,
|
"version": {
|
||||||
"version": {
|
"type": "array",
|
||||||
"type": "array",
|
"default": [],
|
||||||
"default": [],
|
# version strings
|
||||||
# version strings
|
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
},
|
||||||
},
|
"buildable": {"type": "boolean", "default": True},
|
||||||
"buildable": {"type": "boolean", "default": True},
|
"permissions": permissions,
|
||||||
"permissions": permissions,
|
# If 'get_full_repo' is promoted to a Package-level
|
||||||
# If 'get_full_repo' is promoted to a Package-level
|
# attribute, it could be useful to set it here
|
||||||
# attribute, it could be useful to set it here
|
"package_attributes": package_attributes,
|
||||||
"package_attributes": package_attributes,
|
"variants": variants,
|
||||||
"variants": variants,
|
"externals": {
|
||||||
"externals": {
|
"type": "array",
|
||||||
"type": "array",
|
"items": {
|
||||||
"items": {
|
"type": "object",
|
||||||
"type": "object",
|
"properties": {
|
||||||
"properties": {
|
"spec": {"type": "string"},
|
||||||
"spec": {"type": "string"},
|
"prefix": {"type": "string"},
|
||||||
"prefix": {"type": "string"},
|
"modules": {"type": "array", "items": {"type": "string"}},
|
||||||
"modules": {"type": "array", "items": {"type": "string"}},
|
"extra_attributes": {
|
||||||
"extra_attributes": {
|
"type": "object",
|
||||||
"type": "object",
|
"additionalProperties": {"type": "string"},
|
||||||
"additionalProperties": True,
|
"properties": {
|
||||||
"properties": {
|
"compilers": {
|
||||||
"compilers": {
|
"type": "object",
|
||||||
"type": "object",
|
"patternProperties": {r"(^\w[\w-]*)": {"type": "string"}},
|
||||||
"patternProperties": {
|
|
||||||
r"(^\w[\w-]*)": {"type": "string"}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"environment": spack.schema.environment.definition,
|
|
||||||
"extra_rpaths": extra_rpaths,
|
|
||||||
"implicit_rpaths": implicit_rpaths,
|
|
||||||
"flags": flags,
|
|
||||||
},
|
},
|
||||||
|
"environment": spack.schema.environment.definition,
|
||||||
|
"extra_rpaths": extra_rpaths,
|
||||||
|
"implicit_rpaths": implicit_rpaths,
|
||||||
|
"flags": flags,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"additionalProperties": True,
|
|
||||||
"required": ["spec"],
|
|
||||||
},
|
},
|
||||||
|
"additionalProperties": True,
|
||||||
|
"required": ["spec"],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -37,6 +37,7 @@
|
|||||||
import spack.package_prefs
|
import spack.package_prefs
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.solver.splicing
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
@@ -67,7 +68,7 @@
|
|||||||
|
|
||||||
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
||||||
|
|
||||||
TransformFunction = Callable[["spack.spec.Spec", List[AspFunction]], List[AspFunction]]
|
TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]
|
||||||
|
|
||||||
#: Enable the addition of a runtime node
|
#: Enable the addition of a runtime node
|
||||||
WITH_RUNTIME = sys.platform != "win32"
|
WITH_RUNTIME = sys.platform != "win32"
|
||||||
@@ -127,8 +128,8 @@ def __str__(self):
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def named_spec(
|
def named_spec(
|
||||||
spec: Optional["spack.spec.Spec"], name: Optional[str]
|
spec: Optional[spack.spec.Spec], name: Optional[str]
|
||||||
) -> Iterator[Optional["spack.spec.Spec"]]:
|
) -> Iterator[Optional[spack.spec.Spec]]:
|
||||||
"""Context manager to temporarily set the name of a spec"""
|
"""Context manager to temporarily set the name of a spec"""
|
||||||
if spec is None or name is None:
|
if spec is None or name is None:
|
||||||
yield spec
|
yield spec
|
||||||
@@ -747,11 +748,11 @@ def on_model(model):
|
|||||||
class KnownCompiler(NamedTuple):
|
class KnownCompiler(NamedTuple):
|
||||||
"""Data class to collect information on compilers"""
|
"""Data class to collect information on compilers"""
|
||||||
|
|
||||||
spec: "spack.spec.Spec"
|
spec: spack.spec.Spec
|
||||||
os: str
|
os: str
|
||||||
target: str
|
target: Optional[str]
|
||||||
available: bool
|
available: bool
|
||||||
compiler_obj: Optional["spack.compiler.Compiler"]
|
compiler_obj: Optional[spack.compiler.Compiler]
|
||||||
|
|
||||||
def _key(self):
|
def _key(self):
|
||||||
return self.spec, self.os, self.target
|
return self.spec, self.os, self.target
|
||||||
@@ -1132,7 +1133,7 @@ def __init__(self, tests: bool = False):
|
|||||||
set
|
set
|
||||||
)
|
)
|
||||||
|
|
||||||
self.possible_compilers: List = []
|
self.possible_compilers: List[KnownCompiler] = []
|
||||||
self.possible_oses: Set = set()
|
self.possible_oses: Set = set()
|
||||||
self.variant_values_from_specs: Set = set()
|
self.variant_values_from_specs: Set = set()
|
||||||
self.version_constraints: Set = set()
|
self.version_constraints: Set = set()
|
||||||
@@ -1386,7 +1387,7 @@ def effect_rules(self):
|
|||||||
|
|
||||||
def define_variant(
|
def define_variant(
|
||||||
self,
|
self,
|
||||||
pkg: "Type[spack.package_base.PackageBase]",
|
pkg: Type[spack.package_base.PackageBase],
|
||||||
name: str,
|
name: str,
|
||||||
when: spack.spec.Spec,
|
when: spack.spec.Spec,
|
||||||
variant_def: vt.Variant,
|
variant_def: vt.Variant,
|
||||||
@@ -1490,7 +1491,7 @@ def define_auto_variant(self, name: str, multi: bool):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def variant_rules(self, pkg: "Type[spack.package_base.PackageBase]"):
|
def variant_rules(self, pkg: Type[spack.package_base.PackageBase]):
|
||||||
for name in pkg.variant_names():
|
for name in pkg.variant_names():
|
||||||
self.gen.h3(f"Variant {name} in package {pkg.name}")
|
self.gen.h3(f"Variant {name} in package {pkg.name}")
|
||||||
for when, variant_def in pkg.variant_definitions(name):
|
for when, variant_def in pkg.variant_definitions(name):
|
||||||
@@ -1681,8 +1682,8 @@ def dependency_holds(input_spec, requirements):
|
|||||||
def _gen_match_variant_splice_constraints(
|
def _gen_match_variant_splice_constraints(
|
||||||
self,
|
self,
|
||||||
pkg,
|
pkg,
|
||||||
cond_spec: "spack.spec.Spec",
|
cond_spec: spack.spec.Spec,
|
||||||
splice_spec: "spack.spec.Spec",
|
splice_spec: spack.spec.Spec,
|
||||||
hash_asp_var: "AspVar",
|
hash_asp_var: "AspVar",
|
||||||
splice_node,
|
splice_node,
|
||||||
match_variants: List[str],
|
match_variants: List[str],
|
||||||
@@ -1740,7 +1741,7 @@ def package_splice_rules(self, pkg):
|
|||||||
if any(
|
if any(
|
||||||
v in cond.variants or v in spec_to_splice.variants for v in match_variants
|
v in cond.variants or v in spec_to_splice.variants for v in match_variants
|
||||||
):
|
):
|
||||||
raise Exception(
|
raise spack.error.PackageError(
|
||||||
"Overlap between match_variants and explicitly set variants"
|
"Overlap between match_variants and explicitly set variants"
|
||||||
)
|
)
|
||||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||||
@@ -2710,7 +2711,7 @@ def setup(
|
|||||||
if env:
|
if env:
|
||||||
dev_specs = tuple(
|
dev_specs = tuple(
|
||||||
spack.spec.Spec(info["spec"]).constrained(
|
spack.spec.Spec(info["spec"]).constrained(
|
||||||
"dev_path=%s"
|
'dev_path="%s"'
|
||||||
% spack.util.path.canonicalize_path(info["path"], default_wd=env.path)
|
% spack.util.path.canonicalize_path(info["path"], default_wd=env.path)
|
||||||
)
|
)
|
||||||
for name, info in env.dev_specs.items()
|
for name, info in env.dev_specs.items()
|
||||||
@@ -2977,7 +2978,7 @@ def _specs_from_requires(self, pkg_name, section):
|
|||||||
for s in spec_group[key]:
|
for s in spec_group[key]:
|
||||||
yield _spec_with_default_name(s, pkg_name)
|
yield _spec_with_default_name(s, pkg_name)
|
||||||
|
|
||||||
def pkg_class(self, pkg_name: str) -> typing.Type["spack.package_base.PackageBase"]:
|
def pkg_class(self, pkg_name: str) -> typing.Type[spack.package_base.PackageBase]:
|
||||||
request = pkg_name
|
request = pkg_name
|
||||||
if pkg_name in self.explicitly_required_namespaces:
|
if pkg_name in self.explicitly_required_namespaces:
|
||||||
namespace = self.explicitly_required_namespaces[pkg_name]
|
namespace = self.explicitly_required_namespaces[pkg_name]
|
||||||
@@ -3096,7 +3097,7 @@ def __init__(self, configuration) -> None:
|
|||||||
|
|
||||||
self.compilers.add(candidate)
|
self.compilers.add(candidate)
|
||||||
|
|
||||||
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
|
def with_input_specs(self, input_specs: List[spack.spec.Spec]) -> "CompilerParser":
|
||||||
"""Accounts for input specs when building the list of possible compilers.
|
"""Accounts for input specs when building the list of possible compilers.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -3136,7 +3137,7 @@ def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerPar
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
|
def add_compiler_from_concrete_spec(self, spec: spack.spec.Spec) -> None:
|
||||||
"""Account for compilers that are coming from concrete specs, through reuse.
|
"""Account for compilers that are coming from concrete specs, through reuse.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -3374,14 +3375,6 @@ def consume_facts(self):
|
|||||||
self._setup.effect_rules()
|
self._setup.effect_rules()
|
||||||
|
|
||||||
|
|
||||||
# This should be a dataclass, but dataclasses don't work on Python 3.6
|
|
||||||
class Splice:
|
|
||||||
def __init__(self, splice_node: NodeArgument, child_name: str, child_hash: str):
|
|
||||||
self.splice_node = splice_node
|
|
||||||
self.child_name = child_name
|
|
||||||
self.child_hash = child_hash
|
|
||||||
|
|
||||||
|
|
||||||
class SpecBuilder:
|
class SpecBuilder:
|
||||||
"""Class with actions to rebuild a spec from ASP results."""
|
"""Class with actions to rebuild a spec from ASP results."""
|
||||||
|
|
||||||
@@ -3421,7 +3414,7 @@ def __init__(self, specs, hash_lookup=None):
|
|||||||
self._specs: Dict[NodeArgument, spack.spec.Spec] = {}
|
self._specs: Dict[NodeArgument, spack.spec.Spec] = {}
|
||||||
|
|
||||||
# Matches parent nodes to splice node
|
# Matches parent nodes to splice node
|
||||||
self._splices: Dict[NodeArgument, List[Splice]] = {}
|
self._splices: Dict[spack.spec.Spec, List[spack.solver.splicing.Splice]] = {}
|
||||||
self._result = None
|
self._result = None
|
||||||
self._command_line_specs = specs
|
self._command_line_specs = specs
|
||||||
self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict(
|
self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict(
|
||||||
@@ -3540,15 +3533,13 @@ def reorder_flags(self):
|
|||||||
)
|
)
|
||||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||||
|
|
||||||
for spec in self._specs.values():
|
for node, spec in self._specs.items():
|
||||||
# if bootstrapping, compiler is not in config and has no flags
|
# if bootstrapping, compiler is not in config and has no flags
|
||||||
flagmap_from_compiler = {}
|
flagmap_from_compiler = {}
|
||||||
if spec.compiler in compilers:
|
if spec.compiler in compilers:
|
||||||
flagmap_from_compiler = compilers[spec.compiler].flags
|
flagmap_from_compiler = compilers[spec.compiler].flags
|
||||||
|
|
||||||
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
||||||
node = SpecBuilder.make_node(pkg=spec.name)
|
|
||||||
|
|
||||||
ordered_flags = []
|
ordered_flags = []
|
||||||
|
|
||||||
# 1. Put compiler flags first
|
# 1. Put compiler flags first
|
||||||
@@ -3630,49 +3621,12 @@ def splice_at_hash(
|
|||||||
child_name: str,
|
child_name: str,
|
||||||
child_hash: str,
|
child_hash: str,
|
||||||
):
|
):
|
||||||
splice = Splice(splice_node, child_name=child_name, child_hash=child_hash)
|
parent_spec = self._specs[parent_node]
|
||||||
self._splices.setdefault(parent_node, []).append(splice)
|
splice_spec = self._specs[splice_node]
|
||||||
|
splice = spack.solver.splicing.Splice(
|
||||||
def _resolve_automatic_splices(self):
|
splice_spec, child_name=child_name, child_hash=child_hash
|
||||||
"""After all of the specs have been concretized, apply all immediate splices.
|
)
|
||||||
|
self._splices.setdefault(parent_spec, []).append(splice)
|
||||||
Use reverse topological order to ensure that all dependencies are resolved
|
|
||||||
before their parents, allowing for maximal sharing and minimal copying.
|
|
||||||
|
|
||||||
"""
|
|
||||||
fixed_specs = {}
|
|
||||||
|
|
||||||
# create a mapping from dag hash to an integer representing position in reverse topo order.
|
|
||||||
specs = self._specs.values()
|
|
||||||
topo_order = list(traverse.traverse_nodes(specs, order="topo", key=traverse.by_dag_hash))
|
|
||||||
topo_lookup = {spec.dag_hash(): index for index, spec in enumerate(reversed(topo_order))}
|
|
||||||
|
|
||||||
# iterate over specs, children before parents
|
|
||||||
for node, spec in sorted(self._specs.items(), key=lambda x: topo_lookup[x[1].dag_hash()]):
|
|
||||||
immediate = self._splices.get(node, [])
|
|
||||||
if not immediate and not any(
|
|
||||||
edge.spec in fixed_specs for edge in spec.edges_to_dependencies()
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
new_spec = spec.copy(deps=False)
|
|
||||||
new_spec.build_spec = spec
|
|
||||||
for edge in spec.edges_to_dependencies():
|
|
||||||
depflag = edge.depflag & ~dt.BUILD
|
|
||||||
if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate):
|
|
||||||
splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0]
|
|
||||||
new_spec.add_dependency_edge(
|
|
||||||
self._specs[splice.splice_node], depflag=depflag, virtuals=edge.virtuals
|
|
||||||
)
|
|
||||||
elif edge.spec in fixed_specs:
|
|
||||||
new_spec.add_dependency_edge(
|
|
||||||
fixed_specs[edge.spec], depflag=depflag, virtuals=edge.virtuals
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
new_spec.add_dependency_edge(
|
|
||||||
edge.spec, depflag=depflag, virtuals=edge.virtuals
|
|
||||||
)
|
|
||||||
self._specs[node] = new_spec
|
|
||||||
fixed_specs[spec] = new_spec
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sort_fn(function_tuple) -> Tuple[int, int]:
|
def sort_fn(function_tuple) -> Tuple[int, int]:
|
||||||
@@ -3765,7 +3719,15 @@ def build_specs(self, function_tuples):
|
|||||||
for root in roots.values():
|
for root in roots.values():
|
||||||
root._finalize_concretization()
|
root._finalize_concretization()
|
||||||
|
|
||||||
self._resolve_automatic_splices()
|
# Only attempt to resolve automatic splices if the solver produced any
|
||||||
|
if self._splices:
|
||||||
|
resolved_splices = spack.solver.splicing._resolve_collected_splices(
|
||||||
|
list(self._specs.values()), self._splices
|
||||||
|
)
|
||||||
|
new_specs = {}
|
||||||
|
for node, spec in self._specs.items():
|
||||||
|
new_specs[node] = resolved_splices.get(spec, spec)
|
||||||
|
self._specs = new_specs
|
||||||
|
|
||||||
for s in self._specs.values():
|
for s in self._specs.values():
|
||||||
spack.spec.Spec.ensure_no_deprecated(s)
|
spack.spec.Spec.ensure_no_deprecated(s)
|
||||||
|
73
lib/spack/spack/solver/splicing.py
Normal file
73
lib/spack/spack/solver/splicing.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
from functools import cmp_to_key
|
||||||
|
from typing import Dict, List, NamedTuple
|
||||||
|
|
||||||
|
import spack.deptypes as dt
|
||||||
|
from spack.spec import Spec
|
||||||
|
from spack.traverse import by_dag_hash, traverse_nodes
|
||||||
|
|
||||||
|
|
||||||
|
class Splice(NamedTuple):
|
||||||
|
#: The spec being spliced into a parent
|
||||||
|
splice_spec: Spec
|
||||||
|
#: The name of the child that splice spec is replacing
|
||||||
|
child_name: str
|
||||||
|
#: The hash of the child that `splice_spec` is replacing
|
||||||
|
child_hash: str
|
||||||
|
|
||||||
|
|
||||||
|
def _resolve_collected_splices(
|
||||||
|
specs: List[Spec], splices: Dict[Spec, List[Splice]]
|
||||||
|
) -> Dict[Spec, Spec]:
|
||||||
|
"""After all of the specs have been concretized, apply all immediate splices.
|
||||||
|
Returns a dict mapping original specs to their resolved counterparts
|
||||||
|
"""
|
||||||
|
|
||||||
|
def splice_cmp(s1: Spec, s2: Spec):
|
||||||
|
"""This function can be used to sort a list of specs such that that any
|
||||||
|
spec which will be spliced into a parent comes after the parent it will
|
||||||
|
be spliced into. This order ensures that transitive splices will be
|
||||||
|
executed in the correct order.
|
||||||
|
"""
|
||||||
|
|
||||||
|
s1_splices = splices.get(s1, [])
|
||||||
|
s2_splices = splices.get(s2, [])
|
||||||
|
if any([s2.dag_hash() == splice.splice_spec.dag_hash() for splice in s1_splices]):
|
||||||
|
return -1
|
||||||
|
elif any([s1.dag_hash() == splice.splice_spec.dag_hash() for splice in s2_splices]):
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
splice_order = sorted(specs, key=cmp_to_key(splice_cmp))
|
||||||
|
reverse_topo_order = reversed(
|
||||||
|
[x for x in traverse_nodes(splice_order, order="topo", key=by_dag_hash) if x in specs]
|
||||||
|
)
|
||||||
|
|
||||||
|
already_resolved: Dict[Spec, Spec] = {}
|
||||||
|
for spec in reverse_topo_order:
|
||||||
|
immediate = splices.get(spec, [])
|
||||||
|
if not immediate and not any(
|
||||||
|
edge.spec in already_resolved for edge in spec.edges_to_dependencies()
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
new_spec = spec.copy(deps=False)
|
||||||
|
new_spec.clear_caches(ignore=("package_hash",))
|
||||||
|
new_spec.build_spec = spec
|
||||||
|
for edge in spec.edges_to_dependencies():
|
||||||
|
depflag = edge.depflag & ~dt.BUILD
|
||||||
|
if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate):
|
||||||
|
splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0]
|
||||||
|
# If the spec being splice in is also spliced
|
||||||
|
splice_spec = already_resolved.get(splice.splice_spec, splice.splice_spec)
|
||||||
|
new_spec.add_dependency_edge(splice_spec, depflag=depflag, virtuals=edge.virtuals)
|
||||||
|
elif edge.spec in already_resolved:
|
||||||
|
new_spec.add_dependency_edge(
|
||||||
|
already_resolved[edge.spec], depflag=depflag, virtuals=edge.virtuals
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
new_spec.add_dependency_edge(edge.spec, depflag=depflag, virtuals=edge.virtuals)
|
||||||
|
already_resolved[spec] = new_spec
|
||||||
|
return already_resolved
|
@@ -58,7 +58,21 @@
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
Match,
|
||||||
|
Optional,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -72,7 +86,6 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.compiler
|
import spack.compiler
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -80,10 +93,9 @@
|
|||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.provider_index
|
import spack.provider_index
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.solver
|
|
||||||
import spack.spec_parser
|
import spack.spec_parser
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse as traverse
|
import spack.traverse
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.hash
|
import spack.util.hash
|
||||||
import spack.util.module_cmd as md
|
import spack.util.module_cmd as md
|
||||||
@@ -218,7 +230,7 @@ def ensure_modern_format_string(fmt: str) -> None:
|
|||||||
def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
|
def _make_microarchitecture(name: str) -> archspec.cpu.Microarchitecture:
|
||||||
if isinstance(name, archspec.cpu.Microarchitecture):
|
if isinstance(name, archspec.cpu.Microarchitecture):
|
||||||
return name
|
return name
|
||||||
return archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
|
return archspec.cpu.TARGETS.get(name) or archspec.cpu.generic_microarchitecture(name)
|
||||||
|
|
||||||
|
|
||||||
@lang.lazy_lexicographic_ordering
|
@lang.lazy_lexicographic_ordering
|
||||||
@@ -449,13 +461,16 @@ def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
|||||||
return bool(self._target_intersection(other))
|
return bool(self._target_intersection(other))
|
||||||
|
|
||||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||||
|
if self.target is None and other.target is None:
|
||||||
|
return False
|
||||||
|
|
||||||
if not other._target_satisfies(self, strict=False):
|
if not other._target_satisfies(self, strict=False):
|
||||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||||
|
|
||||||
if self.target_concrete:
|
if self._target_concrete:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
elif other.target_concrete:
|
elif other._target_concrete:
|
||||||
self.target = other.target
|
self.target = other.target
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -470,8 +485,8 @@ def _target_constrain(self, other: "ArchSpec") -> bool:
|
|||||||
self.target = intersection_target
|
self.target = intersection_target
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _target_intersection(self, other):
|
def _target_intersection(self, other: "ArchSpec") -> List[str]:
|
||||||
results = []
|
results: List[str] = []
|
||||||
|
|
||||||
if not self.target or not other.target:
|
if not self.target or not other.target:
|
||||||
return results
|
return results
|
||||||
@@ -497,21 +512,56 @@ def _target_intersection(self, other):
|
|||||||
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
||||||
results.append(o_min)
|
results.append(o_min)
|
||||||
else:
|
else:
|
||||||
# Take intersection of two ranges
|
# Take the "min" of the two max, if there is a partial ordering.
|
||||||
# Lots of comparisons needed
|
n_max = ""
|
||||||
_s_min = _make_microarchitecture(s_min)
|
if s_max and o_max:
|
||||||
_s_max = _make_microarchitecture(s_max)
|
_s_max = _make_microarchitecture(s_max)
|
||||||
_o_min = _make_microarchitecture(o_min)
|
_o_max = _make_microarchitecture(o_max)
|
||||||
_o_max = _make_microarchitecture(o_max)
|
if _s_max.family != _o_max.family:
|
||||||
|
continue
|
||||||
|
if _s_max <= _o_max:
|
||||||
|
n_max = s_max
|
||||||
|
elif _o_max < _s_max:
|
||||||
|
n_max = o_max
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif s_max:
|
||||||
|
n_max = s_max
|
||||||
|
elif o_max:
|
||||||
|
n_max = o_max
|
||||||
|
|
||||||
|
# Take the "max" of the two min.
|
||||||
|
n_min = ""
|
||||||
|
if s_min and o_min:
|
||||||
|
_s_min = _make_microarchitecture(s_min)
|
||||||
|
_o_min = _make_microarchitecture(o_min)
|
||||||
|
if _s_min.family != _o_min.family:
|
||||||
|
continue
|
||||||
|
if _s_min >= _o_min:
|
||||||
|
n_min = s_min
|
||||||
|
elif _o_min > _s_min:
|
||||||
|
n_min = o_min
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
elif s_min:
|
||||||
|
n_min = s_min
|
||||||
|
elif o_min:
|
||||||
|
n_min = o_min
|
||||||
|
|
||||||
|
if n_min and n_max:
|
||||||
|
_n_min = _make_microarchitecture(n_min)
|
||||||
|
_n_max = _make_microarchitecture(n_max)
|
||||||
|
if _n_min.family != _n_max.family or not _n_min <= _n_max:
|
||||||
|
continue
|
||||||
|
if n_min == n_max:
|
||||||
|
results.append(n_min)
|
||||||
|
else:
|
||||||
|
results.append(f"{n_min}:{n_max}")
|
||||||
|
elif n_min:
|
||||||
|
results.append(f"{n_min}:")
|
||||||
|
elif n_max:
|
||||||
|
results.append(f":{n_max}")
|
||||||
|
|
||||||
n_min = s_min if _s_min >= _o_min else o_min
|
|
||||||
n_max = s_max if _s_max <= _o_max else o_max
|
|
||||||
_n_min = _make_microarchitecture(n_min)
|
|
||||||
_n_max = _make_microarchitecture(n_max)
|
|
||||||
if _n_min == _n_max:
|
|
||||||
results.append(n_min)
|
|
||||||
elif not n_min or not n_max or _n_min < _n_max:
|
|
||||||
results.append("%s:%s" % (n_min, n_max))
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def constrain(self, other: "ArchSpec") -> bool:
|
def constrain(self, other: "ArchSpec") -> bool:
|
||||||
@@ -543,40 +593,35 @@ def constrain(self, other: "ArchSpec") -> bool:
|
|||||||
|
|
||||||
return constrained
|
return constrained
|
||||||
|
|
||||||
def copy(self):
|
def copy(self) -> "ArchSpec":
|
||||||
"""Copy the current instance and returns the clone."""
|
"""Copy the current instance and returns the clone."""
|
||||||
return ArchSpec(self)
|
return ArchSpec(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def concrete(self):
|
def concrete(self):
|
||||||
"""True if the spec is concrete, False otherwise"""
|
"""True if the spec is concrete, False otherwise"""
|
||||||
return self.platform and self.os and self.target and self.target_concrete
|
return self.platform and self.os and self.target and self._target_concrete
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def target_concrete(self):
|
def _target_concrete(self) -> bool:
|
||||||
"""True if the target is not a range or list."""
|
"""True if the target is not a range or list."""
|
||||||
return (
|
return (
|
||||||
self.target is not None and ":" not in str(self.target) and "," not in str(self.target)
|
self.target is not None and ":" not in str(self.target) and "," not in str(self.target)
|
||||||
)
|
)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self) -> dict:
|
||||||
# Generic targets represent either an architecture family (like x86_64)
|
# Generic targets represent either an architecture family (like x86_64)
|
||||||
# or a custom micro-architecture
|
# or a custom micro-architecture
|
||||||
if self.target.vendor == "generic":
|
if self.target.vendor == "generic":
|
||||||
target_data = str(self.target)
|
target_data = str(self.target)
|
||||||
else:
|
else:
|
||||||
# Get rid of compiler flag information before turning the uarch into a dict
|
# Get rid of compiler flag information before turning the uarch into a dict
|
||||||
uarch_dict = self.target.to_dict()
|
target_data = self.target.to_dict()
|
||||||
uarch_dict.pop("compilers", None)
|
target_data.pop("compilers", None)
|
||||||
target_data = syaml.syaml_dict(uarch_dict.items())
|
return {"arch": {"platform": self.platform, "platform_os": self.os, "target": target_data}}
|
||||||
|
|
||||||
d = syaml.syaml_dict(
|
|
||||||
[("platform", self.platform), ("platform_os", self.os), ("target", target_data)]
|
|
||||||
)
|
|
||||||
return syaml.syaml_dict([("arch", d)])
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(d):
|
def from_dict(d: dict) -> "ArchSpec":
|
||||||
"""Import an ArchSpec from raw YAML/JSON data"""
|
"""Import an ArchSpec from raw YAML/JSON data"""
|
||||||
arch = d["arch"]
|
arch = d["arch"]
|
||||||
target_name = arch["target"]
|
target_name = arch["target"]
|
||||||
@@ -586,13 +631,12 @@ def from_dict(d):
|
|||||||
return ArchSpec((arch["platform"], arch["platform_os"], target))
|
return ArchSpec((arch["platform"], arch["platform_os"], target))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "%s-%s-%s" % (self.platform, self.os, self.target)
|
return f"{self.platform}-{self.os}-{self.target}"
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
fmt = "ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))"
|
return f"ArchSpec(({self.platform!r}, {self.os!r}, {str(self.target)!r}))"
|
||||||
return fmt.format(self, str(self.target))
|
|
||||||
|
|
||||||
def __contains__(self, string):
|
def __contains__(self, string) -> bool:
|
||||||
return string in str(self) or string in self.target
|
return string in str(self) or string in self.target
|
||||||
|
|
||||||
|
|
||||||
@@ -698,10 +742,7 @@ def _cmp_iter(self):
|
|||||||
yield self.versions
|
yield self.versions
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
d = syaml.syaml_dict([("name", self.name)])
|
return {"compiler": {"name": self.name, **self.versions.to_dict()}}
|
||||||
d.update(self.versions.to_dict())
|
|
||||||
|
|
||||||
return syaml.syaml_dict([("compiler", d)])
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(d):
|
def from_dict(d):
|
||||||
@@ -1339,16 +1380,16 @@ def tree(
|
|||||||
depth: bool = False,
|
depth: bool = False,
|
||||||
hashes: bool = False,
|
hashes: bool = False,
|
||||||
hashlen: Optional[int] = None,
|
hashlen: Optional[int] = None,
|
||||||
cover: str = "nodes",
|
cover: spack.traverse.CoverType = "nodes",
|
||||||
indent: int = 0,
|
indent: int = 0,
|
||||||
format: str = DEFAULT_FORMAT,
|
format: str = DEFAULT_FORMAT,
|
||||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
deptypes: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||||
show_types: bool = False,
|
show_types: bool = False,
|
||||||
depth_first: bool = False,
|
depth_first: bool = False,
|
||||||
recurse_dependencies: bool = True,
|
recurse_dependencies: bool = True,
|
||||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||||
key=id,
|
key: Callable[["Spec"], Any] = id,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
||||||
|
|
||||||
@@ -1380,11 +1421,16 @@ def tree(
|
|||||||
# reduce deptypes over all in-edges when covering nodes
|
# reduce deptypes over all in-edges when covering nodes
|
||||||
if show_types and cover == "nodes":
|
if show_types and cover == "nodes":
|
||||||
deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
|
deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
|
||||||
for edge in traverse.traverse_edges(specs, cover="edges", deptype=deptypes, root=False):
|
for edge in spack.traverse.traverse_edges(
|
||||||
|
specs, cover="edges", deptype=deptypes, root=False
|
||||||
|
):
|
||||||
deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
|
deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
|
||||||
|
|
||||||
for d, dep_spec in traverse.traverse_tree(
|
# SupportsRichComparisonT issue with List[Spec]
|
||||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
sorted_specs: List["Spec"] = sorted(specs) # type: ignore[type-var]
|
||||||
|
|
||||||
|
for d, dep_spec in spack.traverse.traverse_tree(
|
||||||
|
sorted_specs, cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
||||||
):
|
):
|
||||||
node = dep_spec.spec
|
node = dep_spec.spec
|
||||||
|
|
||||||
@@ -1927,13 +1973,125 @@ def installed_upstream(self):
|
|||||||
upstream, _ = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
upstream, _ = spack.store.STORE.db.query_by_spec_hash(self.dag_hash())
|
||||||
return upstream
|
return upstream
|
||||||
|
|
||||||
def traverse(self, **kwargs):
|
@overload
|
||||||
"""Shorthand for :meth:`~spack.traverse.traverse_nodes`"""
|
def traverse(
|
||||||
return traverse.traverse_nodes([self], **kwargs)
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = ...,
|
||||||
|
order: spack.traverse.OrderType = ...,
|
||||||
|
cover: spack.traverse.CoverType = ...,
|
||||||
|
direction: spack.traverse.DirectionType = ...,
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||||
|
depth: Literal[False] = False,
|
||||||
|
key: Callable[["Spec"], Any] = ...,
|
||||||
|
visited: Optional[Set[Any]] = ...,
|
||||||
|
) -> Iterable["Spec"]: ...
|
||||||
|
|
||||||
def traverse_edges(self, **kwargs):
|
@overload
|
||||||
|
def traverse(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = ...,
|
||||||
|
order: spack.traverse.OrderType = ...,
|
||||||
|
cover: spack.traverse.CoverType = ...,
|
||||||
|
direction: spack.traverse.DirectionType = ...,
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||||
|
depth: Literal[True],
|
||||||
|
key: Callable[["Spec"], Any] = ...,
|
||||||
|
visited: Optional[Set[Any]] = ...,
|
||||||
|
) -> Iterable[Tuple[int, "Spec"]]: ...
|
||||||
|
|
||||||
|
def traverse(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = True,
|
||||||
|
order: spack.traverse.OrderType = "pre",
|
||||||
|
cover: spack.traverse.CoverType = "nodes",
|
||||||
|
direction: spack.traverse.DirectionType = "children",
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||||
|
depth: bool = False,
|
||||||
|
key: Callable[["Spec"], Any] = id,
|
||||||
|
visited: Optional[Set[Any]] = None,
|
||||||
|
) -> Iterable[Union["Spec", Tuple[int, "Spec"]]]:
|
||||||
|
"""Shorthand for :meth:`~spack.traverse.traverse_nodes`"""
|
||||||
|
return spack.traverse.traverse_nodes(
|
||||||
|
[self],
|
||||||
|
root=root,
|
||||||
|
order=order,
|
||||||
|
cover=cover,
|
||||||
|
direction=direction,
|
||||||
|
deptype=deptype,
|
||||||
|
depth=depth,
|
||||||
|
key=key,
|
||||||
|
visited=visited,
|
||||||
|
)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def traverse_edges(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = ...,
|
||||||
|
order: spack.traverse.OrderType = ...,
|
||||||
|
cover: spack.traverse.CoverType = ...,
|
||||||
|
direction: spack.traverse.DirectionType = ...,
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||||
|
depth: Literal[False] = False,
|
||||||
|
key: Callable[["Spec"], Any] = ...,
|
||||||
|
visited: Optional[Set[Any]] = ...,
|
||||||
|
) -> Iterable[DependencySpec]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def traverse_edges(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = ...,
|
||||||
|
order: spack.traverse.OrderType = ...,
|
||||||
|
cover: spack.traverse.CoverType = ...,
|
||||||
|
direction: spack.traverse.DirectionType = ...,
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = ...,
|
||||||
|
depth: Literal[True],
|
||||||
|
key: Callable[["Spec"], Any] = ...,
|
||||||
|
visited: Optional[Set[Any]] = ...,
|
||||||
|
) -> Iterable[Tuple[int, DependencySpec]]: ...
|
||||||
|
|
||||||
|
def traverse_edges(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
root: bool = True,
|
||||||
|
order: spack.traverse.OrderType = "pre",
|
||||||
|
cover: spack.traverse.CoverType = "nodes",
|
||||||
|
direction: spack.traverse.DirectionType = "children",
|
||||||
|
deptype: Union[dt.DepFlag, dt.DepTypes] = "all",
|
||||||
|
depth: bool = False,
|
||||||
|
key: Callable[["Spec"], Any] = id,
|
||||||
|
visited: Optional[Set[Any]] = None,
|
||||||
|
) -> Iterable[Union[DependencySpec, Tuple[int, DependencySpec]]]:
|
||||||
"""Shorthand for :meth:`~spack.traverse.traverse_edges`"""
|
"""Shorthand for :meth:`~spack.traverse.traverse_edges`"""
|
||||||
return traverse.traverse_edges([self], **kwargs)
|
return spack.traverse.traverse_edges(
|
||||||
|
[self],
|
||||||
|
root=root,
|
||||||
|
order=order,
|
||||||
|
cover=cover,
|
||||||
|
direction=direction,
|
||||||
|
deptype=deptype,
|
||||||
|
depth=depth,
|
||||||
|
key=key,
|
||||||
|
visited=visited,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_spec(self):
|
||||||
|
"""Returns a string of the spec with the dependencies completely
|
||||||
|
enumerated."""
|
||||||
|
root_str = [self.format()]
|
||||||
|
sorted_dependencies = sorted(
|
||||||
|
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||||
|
)
|
||||||
|
sorted_dependencies = [
|
||||||
|
d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
|
||||||
|
]
|
||||||
|
spec_str = " ^".join(root_str + sorted_dependencies)
|
||||||
|
return spec_str.strip()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def short_spec(self):
|
def short_spec(self):
|
||||||
@@ -2161,9 +2319,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
Arguments:
|
Arguments:
|
||||||
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
|
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
|
||||||
"""
|
"""
|
||||||
d = syaml.syaml_dict()
|
d = {"name": self.name}
|
||||||
|
|
||||||
d["name"] = self.name
|
|
||||||
|
|
||||||
if self.versions:
|
if self.versions:
|
||||||
d.update(self.versions.to_dict())
|
d.update(self.versions.to_dict())
|
||||||
@@ -2177,7 +2333,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
if self.namespace:
|
if self.namespace:
|
||||||
d["namespace"] = self.namespace
|
d["namespace"] = self.namespace
|
||||||
|
|
||||||
params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items()))
|
params = dict(sorted(v.yaml_entry() for v in self.variants.values()))
|
||||||
|
|
||||||
# Only need the string compiler flag for yaml file
|
# Only need the string compiler flag for yaml file
|
||||||
params.update(
|
params.update(
|
||||||
@@ -2203,13 +2359,16 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.external:
|
if self.external:
|
||||||
d["external"] = syaml.syaml_dict(
|
if self.extra_attributes:
|
||||||
[
|
extra_attributes = syaml.sorted_dict(self.extra_attributes)
|
||||||
("path", self.external_path),
|
else:
|
||||||
("module", self.external_modules),
|
extra_attributes = None
|
||||||
("extra_attributes", self.extra_attributes),
|
|
||||||
]
|
d["external"] = {
|
||||||
)
|
"path": self.external_path,
|
||||||
|
"module": self.external_modules,
|
||||||
|
"extra_attributes": extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
if not self._concrete:
|
if not self._concrete:
|
||||||
d["concrete"] = False
|
d["concrete"] = False
|
||||||
@@ -2240,29 +2399,25 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
# Note: Relies on sorting dict by keys later in algorithm.
|
# Note: Relies on sorting dict by keys later in algorithm.
|
||||||
deps = self._dependencies_dict(depflag=hash.depflag)
|
deps = self._dependencies_dict(depflag=hash.depflag)
|
||||||
if deps:
|
if deps:
|
||||||
deps_list = []
|
d["dependencies"] = [
|
||||||
for name, edges_for_name in sorted(deps.items()):
|
{
|
||||||
name_tuple = ("name", name)
|
"name": name,
|
||||||
for dspec in edges_for_name:
|
hash.name: dspec.spec._cached_hash(hash),
|
||||||
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
|
"parameters": {
|
||||||
parameters_tuple = (
|
"deptypes": dt.flag_to_tuple(dspec.depflag),
|
||||||
"parameters",
|
"virtuals": dspec.virtuals,
|
||||||
syaml.syaml_dict(
|
},
|
||||||
(
|
}
|
||||||
("deptypes", dt.flag_to_tuple(dspec.depflag)),
|
for name, edges_for_name in sorted(deps.items())
|
||||||
("virtuals", dspec.virtuals),
|
for dspec in edges_for_name
|
||||||
)
|
]
|
||||||
),
|
|
||||||
)
|
|
||||||
ordered_entries = [name_tuple, hash_tuple, parameters_tuple]
|
|
||||||
deps_list.append(syaml.syaml_dict(ordered_entries))
|
|
||||||
d["dependencies"] = deps_list
|
|
||||||
|
|
||||||
# Name is included in case this is replacing a virtual.
|
# Name is included in case this is replacing a virtual.
|
||||||
if self._build_spec:
|
if self._build_spec:
|
||||||
d["build_spec"] = syaml.syaml_dict(
|
d["build_spec"] = {
|
||||||
[("name", self.build_spec.name), (hash.name, self.build_spec._cached_hash(hash))]
|
"name": self.build_spec.name,
|
||||||
)
|
hash.name: self.build_spec._cached_hash(hash),
|
||||||
|
}
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def to_dict(self, hash=ht.dag_hash):
|
def to_dict(self, hash=ht.dag_hash):
|
||||||
@@ -2364,10 +2519,7 @@ def to_dict(self, hash=ht.dag_hash):
|
|||||||
node_list.append(node)
|
node_list.append(node)
|
||||||
hash_set.add(node_hash)
|
hash_set.add(node_hash)
|
||||||
|
|
||||||
meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
|
return {"spec": {"_meta": {"version": SPECFILE_FORMAT_VERSION}, "nodes": node_list}}
|
||||||
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
|
|
||||||
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
|
|
||||||
return spec_dict
|
|
||||||
|
|
||||||
def node_dict_with_hashes(self, hash=ht.dag_hash):
|
def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||||
"""Returns a node_dict of this spec with the dag hash added. If this
|
"""Returns a node_dict of this spec with the dag hash added. If this
|
||||||
@@ -2818,44 +2970,16 @@ def ensure_no_deprecated(root):
|
|||||||
raise SpecDeprecatedError(msg)
|
raise SpecDeprecatedError(msg)
|
||||||
|
|
||||||
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
||||||
"""Concretize the current spec.
|
from spack.concretize import concretize_one
|
||||||
|
|
||||||
Args:
|
warnings.warn(
|
||||||
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
"`Spec.concretize` is deprecated and will be removed in version 1.0.0. Use "
|
||||||
the packages in the list, if True activate 'test' dependencies for all packages.
|
"`spack.concretize.concretize_one` instead.",
|
||||||
"""
|
category=spack.error.SpackAPIWarning,
|
||||||
import spack.solver.asp
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
self.replace_hash()
|
self._dup(concretize_one(self, tests))
|
||||||
|
|
||||||
for node in self.traverse():
|
|
||||||
if not node.name:
|
|
||||||
raise spack.error.SpecError(
|
|
||||||
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._concrete:
|
|
||||||
return
|
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
|
||||||
solver = spack.solver.asp.Solver()
|
|
||||||
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
|
|
||||||
|
|
||||||
# take the best answer
|
|
||||||
opt, i, answer = min(result.answers)
|
|
||||||
name = self.name
|
|
||||||
# TODO: Consolidate this code with similar code in solve.py
|
|
||||||
if self.virtual:
|
|
||||||
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
|
||||||
name = providers[0]
|
|
||||||
|
|
||||||
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
|
||||||
assert (
|
|
||||||
node in answer
|
|
||||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
|
||||||
|
|
||||||
concretized = answer[node]
|
|
||||||
self._dup(concretized)
|
|
||||||
|
|
||||||
def _mark_root_concrete(self, value=True):
|
def _mark_root_concrete(self, value=True):
|
||||||
"""Mark just this spec (not dependencies) concrete."""
|
"""Mark just this spec (not dependencies) concrete."""
|
||||||
@@ -2944,20 +3068,17 @@ def _finalize_concretization(self):
|
|||||||
for spec in self.traverse():
|
for spec in self.traverse():
|
||||||
spec._cached_hash(ht.dag_hash)
|
spec._cached_hash(ht.dag_hash)
|
||||||
|
|
||||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "Spec":
|
||||||
"""This is a non-destructive version of concretize().
|
from spack.concretize import concretize_one
|
||||||
|
|
||||||
First clones, then returns a concrete version of this package
|
warnings.warn(
|
||||||
without modifying this package.
|
"`Spec.concretized` is deprecated and will be removed in version 1.0.0. Use "
|
||||||
|
"`spack.concretize.concretize_one` instead.",
|
||||||
|
category=spack.error.SpackAPIWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
Args:
|
return concretize_one(self, tests)
|
||||||
tests (bool or list): if False disregard 'test' dependencies,
|
|
||||||
if a list of names activate them for the packages in the list,
|
|
||||||
if True activate 'test' dependencies for all packages.
|
|
||||||
"""
|
|
||||||
clone = self.copy()
|
|
||||||
clone.concretize(tests=tests)
|
|
||||||
return clone
|
|
||||||
|
|
||||||
def index(self, deptype="all"):
|
def index(self, deptype="all"):
|
||||||
"""Return a dictionary that points to all the dependencies in this
|
"""Return a dictionary that points to all the dependencies in this
|
||||||
@@ -3067,18 +3188,13 @@ def constrain(self, other, deps=True):
|
|||||||
if not self.variants[v].compatible(other.variants[v]):
|
if not self.variants[v].compatible(other.variants[v]):
|
||||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||||
|
|
||||||
# TODO: Check out the logic here
|
|
||||||
sarch, oarch = self.architecture, other.architecture
|
sarch, oarch = self.architecture, other.architecture
|
||||||
if sarch is not None and oarch is not None:
|
if (
|
||||||
if sarch.platform is not None and oarch.platform is not None:
|
sarch is not None
|
||||||
if sarch.platform != oarch.platform:
|
and oarch is not None
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
and not self.architecture.intersects(other.architecture)
|
||||||
if sarch.os is not None and oarch.os is not None:
|
):
|
||||||
if sarch.os != oarch.os:
|
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
|
||||||
if sarch.target is not None and oarch.target is not None:
|
|
||||||
if sarch.target != oarch.target:
|
|
||||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
|
||||||
|
|
||||||
changed = False
|
changed = False
|
||||||
|
|
||||||
@@ -3101,18 +3217,12 @@ def constrain(self, other, deps=True):
|
|||||||
|
|
||||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||||
|
|
||||||
old = str(self.architecture)
|
|
||||||
sarch, oarch = self.architecture, other.architecture
|
sarch, oarch = self.architecture, other.architecture
|
||||||
if sarch is None or other.architecture is None:
|
if sarch is not None and oarch is not None:
|
||||||
self.architecture = sarch or oarch
|
changed |= self.architecture.constrain(other.architecture)
|
||||||
else:
|
elif oarch is not None:
|
||||||
if sarch.platform is None or oarch.platform is None:
|
self.architecture = oarch
|
||||||
self.architecture.platform = sarch.platform or oarch.platform
|
changed = True
|
||||||
if sarch.os is None or oarch.os is None:
|
|
||||||
sarch.os = sarch.os or oarch.os
|
|
||||||
if sarch.target is None or oarch.target is None:
|
|
||||||
sarch.target = sarch.target or oarch.target
|
|
||||||
changed |= str(self.architecture) != old
|
|
||||||
|
|
||||||
if deps:
|
if deps:
|
||||||
changed |= self._constrain_dependencies(other)
|
changed |= self._constrain_dependencies(other)
|
||||||
@@ -3493,25 +3603,16 @@ def patches(self):
|
|||||||
|
|
||||||
return self._patches
|
return self._patches
|
||||||
|
|
||||||
def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, cleardeps=True):
|
def _dup(self, other: "Spec", deps: Union[bool, dt.DepTypes, dt.DepFlag] = True) -> bool:
|
||||||
"""Copy the spec other into self. This is an overwriting
|
"""Copies "other" into self, by overwriting all attributes.
|
||||||
copy. It does not copy any dependents (parents), but by default
|
|
||||||
copies dependencies.
|
|
||||||
|
|
||||||
To duplicate an entire DAG, call _dup() on the root of the DAG.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
other (Spec): spec to be copied onto ``self``
|
other: spec to be copied onto ``self``
|
||||||
deps: if True copies all the dependencies. If
|
deps: if True copies all the dependencies. If False copies None.
|
||||||
False copies None. If deptype/depflag, copy matching types.
|
If deptype, or depflag, copy matching types.
|
||||||
cleardeps (bool): if True clears the dependencies of ``self``,
|
|
||||||
before possibly copying the dependencies of ``other`` onto
|
|
||||||
``self``
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
True if ``self`` changed because of the copy operation,
|
True if ``self`` changed because of the copy operation, False otherwise.
|
||||||
False otherwise.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# We don't count dependencies as changes here
|
# We don't count dependencies as changes here
|
||||||
changed = True
|
changed = True
|
||||||
@@ -3536,14 +3637,15 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
|||||||
self.versions = other.versions.copy()
|
self.versions = other.versions.copy()
|
||||||
self.architecture = other.architecture.copy() if other.architecture else None
|
self.architecture = other.architecture.copy() if other.architecture else None
|
||||||
self.compiler = other.compiler.copy() if other.compiler else None
|
self.compiler = other.compiler.copy() if other.compiler else None
|
||||||
if cleardeps:
|
|
||||||
self._dependents = _EdgeMap(store_by_child=False)
|
|
||||||
self._dependencies = _EdgeMap(store_by_child=True)
|
|
||||||
self.compiler_flags = other.compiler_flags.copy()
|
self.compiler_flags = other.compiler_flags.copy()
|
||||||
self.compiler_flags.spec = self
|
self.compiler_flags.spec = self
|
||||||
self.variants = other.variants.copy()
|
self.variants = other.variants.copy()
|
||||||
self._build_spec = other._build_spec
|
self._build_spec = other._build_spec
|
||||||
|
|
||||||
|
# Clear dependencies
|
||||||
|
self._dependents = _EdgeMap(store_by_child=False)
|
||||||
|
self._dependencies = _EdgeMap(store_by_child=True)
|
||||||
|
|
||||||
# FIXME: we manage _patches_in_order_of_appearance specially here
|
# FIXME: we manage _patches_in_order_of_appearance specially here
|
||||||
# to keep it from leaking out of spec.py, but we should figure
|
# to keep it from leaking out of spec.py, but we should figure
|
||||||
# out how to handle it more elegantly in the Variant classes.
|
# out how to handle it more elegantly in the Variant classes.
|
||||||
@@ -4048,15 +4150,7 @@ def __str__(self):
|
|||||||
if not self._dependencies:
|
if not self._dependencies:
|
||||||
return self.format()
|
return self.format()
|
||||||
|
|
||||||
root_str = [self.format()]
|
return self.long_spec
|
||||||
sorted_dependencies = sorted(
|
|
||||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
|
||||||
)
|
|
||||||
sorted_dependencies = [
|
|
||||||
d.format("{edge_attributes} " + DEFAULT_FORMAT) for d in sorted_dependencies
|
|
||||||
]
|
|
||||||
spec_str = " ^".join(root_str + sorted_dependencies)
|
|
||||||
return spec_str.strip()
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def colored_str(self):
|
def colored_str(self):
|
||||||
@@ -4105,10 +4199,10 @@ def tree(
|
|||||||
depth: bool = False,
|
depth: bool = False,
|
||||||
hashes: bool = False,
|
hashes: bool = False,
|
||||||
hashlen: Optional[int] = None,
|
hashlen: Optional[int] = None,
|
||||||
cover: str = "nodes",
|
cover: spack.traverse.CoverType = "nodes",
|
||||||
indent: int = 0,
|
indent: int = 0,
|
||||||
format: str = DEFAULT_FORMAT,
|
format: str = DEFAULT_FORMAT,
|
||||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
deptypes: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||||
show_types: bool = False,
|
show_types: bool = False,
|
||||||
depth_first: bool = False,
|
depth_first: bool = False,
|
||||||
recurse_dependencies: bool = True,
|
recurse_dependencies: bool = True,
|
||||||
@@ -4434,7 +4528,7 @@ def mask_build_deps(in_spec):
|
|||||||
|
|
||||||
return spec
|
return spec
|
||||||
|
|
||||||
def clear_caches(self, ignore=()):
|
def clear_caches(self, ignore: Tuple[str, ...] = ()) -> None:
|
||||||
"""
|
"""
|
||||||
Clears all cached hashes in a Spec, while preserving other properties.
|
Clears all cached hashes in a Spec, while preserving other properties.
|
||||||
"""
|
"""
|
||||||
@@ -4819,9 +4913,7 @@ def from_node_dict(cls, node):
|
|||||||
spec.external_modules = node["external"]["module"]
|
spec.external_modules = node["external"]["module"]
|
||||||
if spec.external_modules is False:
|
if spec.external_modules is False:
|
||||||
spec.external_modules = None
|
spec.external_modules = None
|
||||||
spec.extra_attributes = node["external"].get(
|
spec.extra_attributes = node["external"].get("extra_attributes", {})
|
||||||
"extra_attributes", syaml.syaml_dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
# specs read in are concrete unless marked abstract
|
# specs read in are concrete unless marked abstract
|
||||||
if node.get("concrete", True):
|
if node.get("concrete", True):
|
||||||
|
@@ -7,35 +7,14 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.solver.asp
|
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
|
from spack.solver.asp import SolverError
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
|
|
||||||
|
|
||||||
class CacheManager:
|
|
||||||
def __init__(self, specs: List[str]) -> None:
|
|
||||||
self.req_specs = specs
|
|
||||||
self.concr_specs: List[Spec]
|
|
||||||
self.concr_specs = []
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.concr_specs = [Spec(s).concretized() for s in self.req_specs]
|
|
||||||
for s in self.concr_specs:
|
|
||||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
for s in self.concr_specs:
|
|
||||||
s.package.do_uninstall()
|
|
||||||
|
|
||||||
|
|
||||||
# MacOS and Windows only work if you pass this function pointer rather than a
|
|
||||||
# closure
|
|
||||||
def _mock_has_runtime_dependencies(_x):
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def _make_specs_non_buildable(specs: List[str]):
|
def _make_specs_non_buildable(specs: List[str]):
|
||||||
output_config = {}
|
output_config = {}
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
@@ -44,203 +23,262 @@ def _make_specs_non_buildable(specs: List[str]):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def splicing_setup(mutable_database, mock_packages, monkeypatch):
|
def install_specs(
|
||||||
spack.config.set("concretizer:reuse", True)
|
mutable_database,
|
||||||
monkeypatch.setattr(
|
mock_packages,
|
||||||
spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies
|
mutable_config,
|
||||||
)
|
do_not_check_runtimes_on_reuse,
|
||||||
|
install_mockery,
|
||||||
|
):
|
||||||
|
"""Returns a function that concretizes and installs a list of abstract specs"""
|
||||||
|
mutable_config.set("concretizer:reuse", True)
|
||||||
|
|
||||||
|
def _impl(*specs_str):
|
||||||
|
concrete_specs = [Spec(s).concretized() for s in specs_str]
|
||||||
|
PackageInstaller([s.package for s in concrete_specs], fake=True, explicit=True).install()
|
||||||
|
return concrete_specs
|
||||||
|
|
||||||
|
return _impl
|
||||||
|
|
||||||
|
|
||||||
def _enable_splicing():
|
def _enable_splicing():
|
||||||
spack.config.set("concretizer:splice", {"automatic": True})
|
spack.config.set("concretizer:splice", {"automatic": True})
|
||||||
|
|
||||||
|
|
||||||
def _has_build_dependency(spec: Spec, name: str):
|
@pytest.mark.parametrize("spec_str", ["splice-z", "splice-h@1"])
|
||||||
return any(s.name == name for s in spec.dependencies(None, dt.BUILD))
|
def test_spec_reuse(spec_str, install_specs, mutable_config):
|
||||||
|
"""Tests reuse of splice-z, without splicing, as a root and as a dependency of splice-h"""
|
||||||
|
splice_z = install_specs("splice-z@1.0.0+compat")[0]
|
||||||
|
mutable_config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||||
|
concrete = spack.concretize.concretize_one(spec_str)
|
||||||
|
assert concrete["splice-z"].satisfies(splice_z)
|
||||||
|
|
||||||
|
|
||||||
def test_simple_reuse(splicing_setup):
|
@pytest.mark.regression("48578")
|
||||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
def test_splice_installed_hash(install_specs, mutable_config):
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
"""Tests splicing the dependency of an installed spec, for another installed spec"""
|
||||||
assert Spec("splice-z").concretized().satisfies(Spec("splice-z"))
|
splice_t, splice_h = install_specs(
|
||||||
|
|
||||||
|
|
||||||
def test_simple_dep_reuse(splicing_setup):
|
|
||||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
|
||||||
assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1"))
|
|
||||||
|
|
||||||
|
|
||||||
def test_splice_installed_hash(splicing_setup):
|
|
||||||
cache = [
|
|
||||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
|
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
|
||||||
"splice-h@1.0.2+compat ^splice-z@1.0.0",
|
"splice-h@1.0.2+compat ^splice-z@1.0.0",
|
||||||
]
|
)
|
||||||
with CacheManager(cache):
|
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
|
||||||
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
|
mutable_config.set("packages", packages_config)
|
||||||
spack.config.set("packages", packages_config)
|
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0"
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(SolverError):
|
||||||
goal_spec.concretized()
|
spack.concretize.concretize_one(goal_spec)
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
assert goal_spec.concretized().satisfies(goal_spec)
|
concrete = spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
# splice-t has a dependency that is changing, thus its hash should be different
|
||||||
|
assert concrete.dag_hash() != splice_t.dag_hash()
|
||||||
|
assert concrete.build_spec.satisfies(splice_t)
|
||||||
|
assert not concrete.satisfies(splice_t)
|
||||||
|
|
||||||
|
# splice-h is reused, so the hash should stay the same
|
||||||
|
assert concrete["splice-h"].satisfies(splice_h)
|
||||||
|
assert concrete["splice-h"].build_spec.satisfies(splice_h)
|
||||||
|
assert concrete["splice-h"].dag_hash() == splice_h.dag_hash()
|
||||||
|
|
||||||
|
|
||||||
def test_splice_build_splice_node(splicing_setup):
|
def test_splice_build_splice_node(install_specs, mutable_config):
|
||||||
with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]):
|
"""Tests splicing the dependency of an installed spec, for a spec that is yet to be built"""
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
splice_t = install_specs("splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat")[0]
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
mutable_config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
||||||
with pytest.raises(Exception):
|
|
||||||
goal_spec.concretized()
|
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat"
|
||||||
_enable_splicing()
|
with pytest.raises(SolverError):
|
||||||
assert goal_spec.concretized().satisfies(goal_spec)
|
spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
_enable_splicing()
|
||||||
|
concrete = spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
# splice-t has a dependency that is changing, thus its hash should be different
|
||||||
|
assert concrete.dag_hash() != splice_t.dag_hash()
|
||||||
|
assert concrete.build_spec.satisfies(splice_t)
|
||||||
|
assert not concrete.satisfies(splice_t)
|
||||||
|
|
||||||
|
# splice-h should be different
|
||||||
|
assert concrete["splice-h"].dag_hash() != splice_t["splice-h"].dag_hash()
|
||||||
|
assert concrete["splice-h"].build_spec.dag_hash() == concrete["splice-h"].dag_hash()
|
||||||
|
|
||||||
|
|
||||||
def test_double_splice(splicing_setup):
|
def test_double_splice(install_specs, mutable_config):
|
||||||
cache = [
|
"""Tests splicing two dependencies of an installed spec, for other installed specs"""
|
||||||
|
splice_t, splice_h, splice_z = install_specs(
|
||||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
|
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
|
||||||
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
|
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
|
||||||
"splice-z@1.0.2+compat",
|
"splice-z@1.0.2+compat",
|
||||||
]
|
)
|
||||||
with CacheManager(cache):
|
mutable_config.set("packages", _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"]))
|
||||||
freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"])
|
|
||||||
spack.config.set("packages", freeze_builds_config)
|
goal_spec = "splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat"
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
with pytest.raises(SolverError):
|
||||||
with pytest.raises(Exception):
|
spack.concretize.concretize_one(goal_spec)
|
||||||
goal_spec.concretized()
|
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
assert goal_spec.concretized().satisfies(goal_spec)
|
concrete = spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
# splice-t and splice-h have a dependency that is changing, thus its hash should be different
|
||||||
|
assert concrete.dag_hash() != splice_t.dag_hash()
|
||||||
|
assert concrete.build_spec.satisfies(splice_t)
|
||||||
|
assert not concrete.satisfies(splice_t)
|
||||||
|
|
||||||
|
assert concrete["splice-h"].dag_hash() != splice_h.dag_hash()
|
||||||
|
assert concrete["splice-h"].build_spec.satisfies(splice_h)
|
||||||
|
assert not concrete["splice-h"].satisfies(splice_h)
|
||||||
|
|
||||||
|
# splice-z is reused, so the hash should stay the same
|
||||||
|
assert concrete["splice-z"].dag_hash() == splice_z.dag_hash()
|
||||||
|
|
||||||
|
|
||||||
# The next two tests are mirrors of one another
|
@pytest.mark.parametrize(
|
||||||
def test_virtual_multi_splices_in(splicing_setup):
|
"original_spec,goal_spec",
|
||||||
cache = [
|
[
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
# `virtual-abi-1` can be spliced for `virtual-abi-multi abi=one` and vice-versa
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
(
|
||||||
]
|
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||||
goal_specs = [
|
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
),
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
(
|
||||||
]
|
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||||
with CacheManager(cache):
|
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
),
|
||||||
for gs in goal_specs:
|
# `virtual-abi-2` can be spliced for `virtual-abi-multi abi=two` and vice-versa
|
||||||
with pytest.raises(Exception):
|
(
|
||||||
Spec(gs).concretized()
|
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||||
_enable_splicing()
|
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||||
for gs in goal_specs:
|
),
|
||||||
assert Spec(gs).concretized().satisfies(gs)
|
(
|
||||||
|
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||||
|
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_virtual_multi_splices_in(original_spec, goal_spec, install_specs, mutable_config):
|
||||||
|
"""Tests that we can splice a virtual dependency with a different, but compatible, provider."""
|
||||||
|
original = install_specs(original_spec)[0]
|
||||||
|
mutable_config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||||
|
|
||||||
|
with pytest.raises(SolverError):
|
||||||
|
spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
_enable_splicing()
|
||||||
|
spliced = spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
assert spliced.dag_hash() != original.dag_hash()
|
||||||
|
assert spliced.build_spec.dag_hash() == original.dag_hash()
|
||||||
|
assert spliced["virtual-with-abi"].name != spliced.build_spec["virtual-with-abi"].name
|
||||||
|
|
||||||
|
|
||||||
def test_virtual_multi_can_be_spliced(splicing_setup):
|
@pytest.mark.parametrize(
|
||||||
cache = [
|
"original_spec,goal_spec",
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
[
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
|
||||||
]
|
|
||||||
goal_specs = [
|
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
|
||||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
|
||||||
]
|
|
||||||
with CacheManager(cache):
|
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
for gs in goal_specs:
|
|
||||||
Spec(gs).concretized()
|
|
||||||
_enable_splicing()
|
|
||||||
for gs in goal_specs:
|
|
||||||
assert Spec(gs).concretized().satisfies(gs)
|
|
||||||
|
|
||||||
|
|
||||||
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
|
||||||
cache = [
|
|
||||||
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
|
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
|
||||||
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
|
(
|
||||||
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
|
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
|
||||||
]
|
"depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2",
|
||||||
goal_specs = [
|
),
|
||||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"),
|
(
|
||||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"),
|
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
|
||||||
]
|
"depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3",
|
||||||
with CacheManager(cache):
|
),
|
||||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
|
||||||
spack.config.set("packages", freeze_build_config)
|
|
||||||
for goal in goal_specs:
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
goal.concretized()
|
|
||||||
_enable_splicing()
|
|
||||||
for goal in goal_specs:
|
|
||||||
assert goal.concretized().satisfies(goal)
|
|
||||||
|
|
||||||
|
|
||||||
def test_manyvariant_limited_matching(splicing_setup):
|
|
||||||
cache = [
|
|
||||||
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
|
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
|
||||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
|
(
|
||||||
|
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
|
||||||
|
"depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2",
|
||||||
|
),
|
||||||
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
|
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
|
||||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
|
(
|
||||||
]
|
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
|
||||||
goal_specs = [
|
"depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3",
|
||||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"),
|
),
|
||||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"),
|
],
|
||||||
]
|
)
|
||||||
with CacheManager(cache):
|
def test_manyvariant_matching_variant_splice(
|
||||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
original_spec, goal_spec, install_specs, mutable_config
|
||||||
spack.config.set("packages", freeze_build_config)
|
):
|
||||||
for s in goal_specs:
|
"""Tests splicing with different kind of matching on variants"""
|
||||||
with pytest.raises(Exception):
|
original = install_specs(original_spec)[0]
|
||||||
s.concretized()
|
mutable_config.set("packages", {"depends-on-manyvariants": {"buildable": False}})
|
||||||
_enable_splicing()
|
|
||||||
for s in goal_specs:
|
with pytest.raises(SolverError):
|
||||||
assert s.concretized().satisfies(s)
|
spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
_enable_splicing()
|
||||||
|
spliced = spack.concretize.concretize_one(goal_spec)
|
||||||
|
|
||||||
|
assert spliced.dag_hash() != original.dag_hash()
|
||||||
|
assert spliced.build_spec.dag_hash() == original.dag_hash()
|
||||||
|
|
||||||
|
# The spliced 'manyvariants' is yet to be built
|
||||||
|
assert spliced["manyvariants"].dag_hash() != original["manyvariants"].dag_hash()
|
||||||
|
assert spliced["manyvariants"].build_spec.dag_hash() == spliced["manyvariants"].dag_hash()
|
||||||
|
|
||||||
|
|
||||||
def test_external_splice_same_name(splicing_setup):
|
def test_external_splice_same_name(install_specs, mutable_config):
|
||||||
cache = [
|
"""Tests that externals can be spliced for non-external specs"""
|
||||||
|
original_splice_h, original_splice_t = install_specs(
|
||||||
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
|
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
|
||||||
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
|
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
|
||||||
]
|
)
|
||||||
packages_yaml = {
|
mutable_config.set("packages", _make_specs_non_buildable(["splice-t", "splice-h"]))
|
||||||
"splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]}
|
mutable_config.set(
|
||||||
}
|
"packages",
|
||||||
goal_specs = [
|
{
|
||||||
Spec("splice-h@1.0.0 ^splice-z@1.0.2"),
|
"splice-z": {
|
||||||
Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"),
|
"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}],
|
||||||
]
|
"buildable": False,
|
||||||
with CacheManager(cache):
|
}
|
||||||
spack.config.set("packages", packages_yaml)
|
},
|
||||||
_enable_splicing()
|
)
|
||||||
for s in goal_specs:
|
|
||||||
assert s.concretized().satisfies(s)
|
_enable_splicing()
|
||||||
|
concrete_splice_h = spack.concretize.concretize_one("splice-h@1.0.0 ^splice-z@1.0.2")
|
||||||
|
concrete_splice_t = spack.concretize.concretize_one(
|
||||||
|
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert concrete_splice_h.dag_hash() != original_splice_h.dag_hash()
|
||||||
|
assert concrete_splice_h.build_spec.dag_hash() == original_splice_h.dag_hash()
|
||||||
|
assert concrete_splice_h["splice-z"].external
|
||||||
|
|
||||||
|
assert concrete_splice_t.dag_hash() != original_splice_t.dag_hash()
|
||||||
|
assert concrete_splice_t.build_spec.dag_hash() == original_splice_t.dag_hash()
|
||||||
|
assert concrete_splice_t["splice-z"].external
|
||||||
|
|
||||||
|
assert concrete_splice_t["splice-z"].dag_hash() == concrete_splice_h["splice-z"].dag_hash()
|
||||||
|
|
||||||
|
|
||||||
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
def test_spliced_build_deps_only_in_build_spec(install_specs):
|
||||||
cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"]
|
"""Tests that build specs are not reported in the spliced spec"""
|
||||||
goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
|
install_specs("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0")
|
||||||
|
|
||||||
with CacheManager(cache):
|
_enable_splicing()
|
||||||
_enable_splicing()
|
spliced = spack.concretize.concretize_one("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
|
||||||
concr_goal = goal_spec.concretized()
|
build_spec = spliced.build_spec
|
||||||
build_spec = concr_goal._build_spec
|
|
||||||
# Spec has been spliced
|
# Spec has been spliced
|
||||||
assert build_spec is not None
|
assert build_spec.dag_hash() != spliced.dag_hash()
|
||||||
# Build spec has spliced build dependencies
|
# Build spec has spliced build dependencies
|
||||||
assert _has_build_dependency(build_spec, "splice-h")
|
assert build_spec.dependencies("splice-h", dt.BUILD)
|
||||||
assert _has_build_dependency(build_spec, "splice-z")
|
assert build_spec.dependencies("splice-z", dt.BUILD)
|
||||||
# Spliced build dependencies are removed
|
# Spliced build dependencies are removed
|
||||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
assert len(spliced.dependencies(None, dt.BUILD)) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_spliced_transitive_dependency(splicing_setup):
|
def test_spliced_transitive_dependency(install_specs, mutable_config):
|
||||||
cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"]
|
"""Tests that build specs are not reported, even for spliced transitive dependencies"""
|
||||||
goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2")
|
install_specs("splice-depends-on-t@1.0 ^splice-h@1.0.1")
|
||||||
|
mutable_config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
||||||
|
|
||||||
with CacheManager(cache):
|
_enable_splicing()
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
spliced = spack.concretize.concretize_one("splice-depends-on-t^splice-h@1.0.2")
|
||||||
_enable_splicing()
|
|
||||||
concr_goal = goal_spec.concretized()
|
# Spec has been spliced
|
||||||
# Spec has been spliced
|
assert spliced.build_spec.dag_hash() != spliced.dag_hash()
|
||||||
assert concr_goal._build_spec is not None
|
assert spliced["splice-t"].build_spec.dag_hash() != spliced["splice-t"].dag_hash()
|
||||||
assert concr_goal["splice-t"]._build_spec is not None
|
|
||||||
assert concr_goal.satisfies(goal_spec)
|
# Spliced build dependencies are removed
|
||||||
# Spliced build dependencies are removed
|
assert len(spliced.dependencies(None, dt.BUILD)) == 0
|
||||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
assert len(spliced["splice-t"].dependencies(None, dt.BUILD)) == 0
|
||||||
|
@@ -133,5 +133,5 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
|
|||||||
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||||
)
|
)
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
spec.concretize()
|
spec = spack.concretize.concretize_one(spec)
|
||||||
assert spec.target == spec["pkg-b"].target == result
|
assert spec.target == spec["pkg-b"].target == result
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user