Compare commits
303 Commits
hs/fix/sho
...
force-ever
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0ffeaa51d6 | ||
![]() |
763b35a2e0 | ||
![]() |
12280f864c | ||
![]() |
253ba05732 | ||
![]() |
195b869e1c | ||
![]() |
393961ffd6 | ||
![]() |
392a58e9be | ||
![]() |
0e8e97a811 | ||
![]() |
43a0cbe7a2 | ||
![]() |
bb35a98079 | ||
![]() |
fa7e0e8230 | ||
![]() |
2c128751f5 | ||
![]() |
fb0493a366 | ||
![]() |
6d1b6e7087 | ||
![]() |
759518182c | ||
![]() |
7ebabfcf0e | ||
![]() |
6203ae31d2 | ||
![]() |
6b13017ded | ||
![]() |
2c51b5853f | ||
![]() |
d0cbd056a8 | ||
![]() |
e1b579a8b4 | ||
![]() |
b02dcf697d | ||
![]() |
6e046b04c7 | ||
![]() |
d196795437 | ||
![]() |
0d444fb4e7 | ||
![]() |
467e631260 | ||
![]() |
f21de698f7 | ||
![]() |
59532986be | ||
![]() |
36fd547b40 | ||
![]() |
b5f9dea6d0 | ||
![]() |
5904834295 | ||
![]() |
2da8a1d1e3 | ||
![]() |
d50eba40d9 | ||
![]() |
8d3a733b77 | ||
![]() |
dfa86dce08 | ||
![]() |
3d82e5c573 | ||
![]() |
a77f903f4d | ||
![]() |
92260b179d | ||
![]() |
196c912b8a | ||
![]() |
0f54995e53 | ||
![]() |
9d1332f1a1 | ||
![]() |
40a1da4a73 | ||
![]() |
82e091e2c2 | ||
![]() |
c86112b0e8 | ||
![]() |
bb25c04845 | ||
![]() |
d69d26d9ce | ||
![]() |
06d660b9ba | ||
![]() |
40b3196412 | ||
![]() |
7e893da4a6 | ||
![]() |
13aa8b6867 | ||
![]() |
b0afb619de | ||
![]() |
7a82c703c7 | ||
![]() |
0d3667175a | ||
![]() |
a754341f6c | ||
![]() |
a50c45f00c | ||
![]() |
87e65e5377 | ||
![]() |
50fe96aaf6 | ||
![]() |
56495a8cd8 | ||
![]() |
c054cb818d | ||
![]() |
bc28ec35d1 | ||
![]() |
e47a6059a7 | ||
![]() |
0d170b9ef3 | ||
![]() |
5174cb9180 | ||
![]() |
22ba366e85 | ||
![]() |
13558269b5 | ||
![]() |
615b7a6ddb | ||
![]() |
0415b21d3d | ||
![]() |
053c9d2846 | ||
![]() |
1e763629f6 | ||
![]() |
7568687f1e | ||
![]() |
3b81c0e6b7 | ||
![]() |
c764400338 | ||
![]() |
4e8a6eec1a | ||
![]() |
ebc9f03dda | ||
![]() |
8ac0bd2825 | ||
![]() |
cc9e0137df | ||
![]() |
b8e448afa0 | ||
![]() |
209d670bf3 | ||
![]() |
c6202842ed | ||
![]() |
b2a75db030 | ||
![]() |
0ec00a9c9a | ||
![]() |
5e3020ad02 | ||
![]() |
a0d0e6321f | ||
![]() |
0afac0beaa | ||
![]() |
6155be8548 | ||
![]() |
611cb98b02 | ||
![]() |
ea5742853f | ||
![]() |
25a3e8ba59 | ||
![]() |
7fbb3df6b0 | ||
![]() |
a728db95de | ||
![]() |
7bc4069b9e | ||
![]() |
51fc195d14 | ||
![]() |
27a0593104 | ||
![]() |
f95e27a159 | ||
![]() |
effe433c96 | ||
![]() |
21988fbb18 | ||
![]() |
2db654bf5a | ||
![]() |
9992b563db | ||
![]() |
daba1a805e | ||
![]() |
832bf95aa4 | ||
![]() |
81e6dcd95c | ||
![]() |
518572e710 | ||
![]() |
6f4ac31a67 | ||
![]() |
e291daaa17 | ||
![]() |
58f1e791a0 | ||
![]() |
aba0a740c2 | ||
![]() |
0fe8e763c3 | ||
![]() |
0e2d261b7e | ||
![]() |
85cb234861 | ||
![]() |
87a83db623 | ||
![]() |
e1e17786c5 | ||
![]() |
68af5cc4c0 | ||
![]() |
70df460fa7 | ||
![]() |
31a1b2fd6c | ||
![]() |
f8fd51e12f | ||
![]() |
12784594aa | ||
![]() |
e0eb0aba37 | ||
![]() |
f47bf5f6b8 | ||
![]() |
9296527775 | ||
![]() |
08c53fa405 | ||
![]() |
0c6f0c090d | ||
![]() |
c623448f81 | ||
![]() |
df71341972 | ||
![]() |
75862c456d | ||
![]() |
e680a0c153 | ||
![]() |
9ad36080ca | ||
![]() |
ecd14f0ad9 | ||
![]() |
c44edf1e8d | ||
![]() |
1eacdca5aa | ||
![]() |
4a8f5efb38 | ||
![]() |
2e753571bd | ||
![]() |
da16336550 | ||
![]() |
1818e70e74 | ||
![]() |
1dde785e9a | ||
![]() |
a7af32c23b | ||
![]() |
6c92ad439b | ||
![]() |
93f555eb14 | ||
![]() |
fa3725e9de | ||
![]() |
870dd6206f | ||
![]() |
b1d411ab06 | ||
![]() |
783eccfbd5 | ||
![]() |
a842332b1b | ||
![]() |
7e41288ca6 | ||
![]() |
3bb375a47f | ||
![]() |
478855728f | ||
![]() |
5e3baeabfa | ||
![]() |
58b9b54066 | ||
![]() |
3918deab74 | ||
![]() |
ceb2ce352f | ||
![]() |
7dc6bff7b1 | ||
![]() |
05fbbd7164 | ||
![]() |
58421866c2 | ||
![]() |
962498095d | ||
![]() |
d0217cf04e | ||
![]() |
65745fa0df | ||
![]() |
9f7cff1780 | ||
![]() |
bb43fa5444 | ||
![]() |
847f560a6e | ||
![]() |
623ff835fc | ||
![]() |
ca19790ff2 | ||
![]() |
f23366e4f8 | ||
![]() |
42fb689501 | ||
![]() |
c33bbdb77d | ||
![]() |
1af6aa22c1 | ||
![]() |
03d9373e5c | ||
![]() |
f469a3d6ab | ||
![]() |
25f24d947a | ||
![]() |
af25a84a56 | ||
![]() |
59a71959e7 | ||
![]() |
00e804a94b | ||
![]() |
db997229f2 | ||
![]() |
6fac041d40 | ||
![]() |
f8b2c65ddf | ||
![]() |
c504304d39 | ||
![]() |
976f1c2198 | ||
![]() |
e7c591a8b8 | ||
![]() |
f3522cba74 | ||
![]() |
0bd9c235a0 | ||
![]() |
335fca7049 | ||
![]() |
ce5ef14fdb | ||
![]() |
2447d16e55 | ||
![]() |
8196c68ff3 | ||
![]() |
308f74fe8b | ||
![]() |
864f09fef0 | ||
![]() |
29b53581e2 | ||
![]() |
e441e780b9 | ||
![]() |
4c642df5ae | ||
![]() |
217774c972 | ||
![]() |
8ec89fc54c | ||
![]() |
66ce93a2e3 | ||
![]() |
116ffe5809 | ||
![]() |
6b0ea2db1d | ||
![]() |
d72b371c8a | ||
![]() |
aa88ced154 | ||
![]() |
d89ae7bcde | ||
![]() |
53d1665a8b | ||
![]() |
9fa1654102 | ||
![]() |
2c692a5755 | ||
![]() |
c0df012b18 | ||
![]() |
66e8523e14 | ||
![]() |
3932299768 | ||
![]() |
3eba6b8379 | ||
![]() |
369928200a | ||
![]() |
81ed0f8d87 | ||
![]() |
194b6311e9 | ||
![]() |
8420898f79 | ||
![]() |
f556ba46d9 | ||
![]() |
ddaa9d5d81 | ||
![]() |
b878fe5555 | ||
![]() |
b600bfc779 | ||
![]() |
612c289c41 | ||
![]() |
e42c76cccf | ||
![]() |
25013bacf2 | ||
![]() |
3d554db198 | ||
![]() |
b6def50dcb | ||
![]() |
bf591c96bd | ||
![]() |
edf1d2ec40 | ||
![]() |
07f607ec9f | ||
![]() |
93747c5e24 | ||
![]() |
b746d4596a | ||
![]() |
8814705936 | ||
![]() |
c989541ebc | ||
![]() |
1759ce05dd | ||
![]() |
c0c1a4aea1 | ||
![]() |
53353ae64e | ||
![]() |
62f7a4c9b1 | ||
![]() |
39679d0882 | ||
![]() |
50e6bf9979 | ||
![]() |
b874c31cc8 | ||
![]() |
04baad90f5 | ||
![]() |
1022527923 | ||
![]() |
7ef19ec1d8 | ||
![]() |
6e45b51f27 | ||
![]() |
5f9cd0991b | ||
![]() |
98c44fc351 | ||
![]() |
b99f850c8e | ||
![]() |
cbbd68d16b | ||
![]() |
e4fbf99497 | ||
![]() |
6a225d5405 | ||
![]() |
af9fd82476 | ||
![]() |
29c1152484 | ||
![]() |
d6a8af6a1d | ||
![]() |
3c3dad0a7a | ||
![]() |
109efdff88 | ||
![]() |
fa318e2c92 | ||
![]() |
064e70990d | ||
![]() |
c40139b7d6 | ||
![]() |
c302e1a768 | ||
![]() |
7171015f1c | ||
![]() |
8ab6f33eb6 | ||
![]() |
a66ab9cc6c | ||
![]() |
70534ac9d4 | ||
![]() |
b369d8b250 | ||
![]() |
4d2319a785 | ||
![]() |
d6a9511f39 | ||
![]() |
dd69b646ad | ||
![]() |
b670205e54 | ||
![]() |
d6d8800466 | ||
![]() |
7a32954f7f | ||
![]() |
92564ecd42 | ||
![]() |
c1258a1431 | ||
![]() |
d46ac9b1e4 | ||
![]() |
2e472a13e5 | ||
![]() |
7edb525599 | ||
![]() |
93cd216603 | ||
![]() |
c1d385ada2 | ||
![]() |
464390962f | ||
![]() |
16734cd8c6 | ||
![]() |
1dd9eeb0c6 | ||
![]() |
f4ef0aec28 | ||
![]() |
ea2c70a21a | ||
![]() |
72ddc03da9 | ||
![]() |
32de71b0b6 | ||
![]() |
e94d5b935f | ||
![]() |
85649be232 | ||
![]() |
c23d2cdb2b | ||
![]() |
dc5dd896a2 | ||
![]() |
43f23589ef | ||
![]() |
5085f635dd | ||
![]() |
46da7952d3 | ||
![]() |
72783bcb0a | ||
![]() |
f4d2ff0068 | ||
![]() |
a2b7fee3fe | ||
![]() |
2ebf2df421 | ||
![]() |
e725aa527e | ||
![]() |
7455c8d173 | ||
![]() |
99e2bce99f | ||
![]() |
4204d16fd3 | ||
![]() |
e76677cbd5 | ||
![]() |
57357a540f | ||
![]() |
97e0b39b32 | ||
![]() |
247da9ea7a | ||
![]() |
07f89a73d1 | ||
![]() |
60cfdcb6cc | ||
![]() |
1c9b042d3a | ||
![]() |
c424611010 | ||
![]() |
35963d7d7d | ||
![]() |
7e62ca864a | ||
![]() |
fa9ef0ac89 | ||
![]() |
55d9fe20e5 | ||
![]() |
434a8d54d4 | ||
![]() |
7328c64fc7 | ||
![]() |
4be7b98fd2 |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
|
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
|
@@ -2,6 +2,6 @@ black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
mypy==1.11.2
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
|
29
.github/workflows/valid-style.yml
vendored
29
.github/workflows/valid-style.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
python_version: '3.13'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -121,31 +121,14 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
ref: 9a2c728c97f594dec0210e7f85cb7167eaf29176
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Problematic imports before
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
|
@@ -25,7 +25,6 @@ exit 1
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
min_python3 = (3, 6)
|
||||
|
@@ -36,7 +36,7 @@ packages:
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
java: [openjdk, jdk]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
@@ -73,15 +73,27 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-fftw:
|
||||
buildable: false
|
||||
cray-libsci:
|
||||
buildable: false
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
cray-pmi:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
essl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
fujitsu-ssl2:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
mpt:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -170,7 +170,7 @@ bootstrapping.
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
% spack buildcache update-index /opt/bootstrap/bootstrap_cache
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
|
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
||||
"environment variables" section lists environment variables that the
|
||||
build system uses to pass flags to the compiler and linker.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Addings flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For most of the flags you encounter, you will want a variant to
|
||||
optionally enable/disable them. You can then optionally pass these
|
||||
@@ -285,7 +285,7 @@ function like so:
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
...
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
else:
|
||||
@@ -299,7 +299,10 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return [self.enable_or_disable("mpi")]
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("mpi"))
|
||||
return args
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
@@ -344,7 +347,14 @@ typically used to enable or disable some feature within the package.
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
)
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
...
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
return args
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
|
@@ -56,13 +56,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
phases = ("bootstrap", "build", "install")
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
|
@@ -25,14 +25,23 @@ These settings can be overridden in ``etc/spack/config.yaml`` or
|
||||
The location where Spack will install packages and their dependencies.
|
||||
Default is ``$spack/opt/spack``.
|
||||
|
||||
---------------------------------------------------
|
||||
``install_hash_length`` and ``install_path_scheme``
|
||||
---------------------------------------------------
|
||||
---------------
|
||||
``projections``
|
||||
---------------
|
||||
|
||||
The default Spack installation path can be very long and can create problems
|
||||
for scripts with hardcoded shebangs. Additionally, when using the Intel
|
||||
compiler, and if there is also a long list of dependencies, the compiler may
|
||||
segfault. If you see the following:
|
||||
.. warning::
|
||||
|
||||
Modifying projections of the install tree is strongly discouraged.
|
||||
|
||||
By default Spack installs all packages into a unique directory relative to the install
|
||||
tree root with the following layout:
|
||||
|
||||
.. code-block::
|
||||
|
||||
{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}
|
||||
|
||||
In very rare cases, it may be necessary to reduce the length of this path. For example,
|
||||
very old versions of the Intel compiler are known to segfault when input paths are too long:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -40,36 +49,25 @@ segfault. If you see the following:
|
||||
** Segmentation violation signal raised. **
|
||||
Access violation or stack overflow. Please contact Intel Support for assistance.
|
||||
|
||||
it may be because variables containing dependency specs may be too long. There
|
||||
are two parameters to help with long path names. Firstly, the
|
||||
``install_hash_length`` parameter can set the length of the hash in the
|
||||
installation path from 1 to 32. The default path uses the full 32 characters.
|
||||
Another case is Python and R packages with many runtime dependencies, which can result
|
||||
in very large ``PYTHONPATH`` and ``R_LIBS`` environment variables. This can cause the
|
||||
``execve`` system call to fail with ``E2BIG``, preventing processes from starting.
|
||||
|
||||
Secondly, it is also possible to modify the entire installation
|
||||
scheme. By default Spack uses
|
||||
``{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}``
|
||||
where the tokens that are available for use in this directive are the
|
||||
same as those understood by the :meth:`~spack.spec.Spec.format`
|
||||
method. Using this parameter it is possible to use a different package
|
||||
layout or reduce the depth of the installation paths. For example
|
||||
For this reason, Spack allows users to modify the installation layout through custom
|
||||
projections. For example
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_path_scheme: '{name}/{version}/{hash:7}'
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{name}/{version}/{hash:16}"
|
||||
|
||||
would install packages into sub-directories using only the package
|
||||
name, version and a hash length of 7 characters.
|
||||
would install packages into sub-directories using only the package name, version and a
|
||||
hash length of 16 characters.
|
||||
|
||||
When using either parameter to set the hash length it only affects the
|
||||
representation of the hash in the installation directory. You
|
||||
should be aware that the smaller the hash length the more likely
|
||||
naming conflicts will occur. These parameters are independent of those
|
||||
used to configure module names.
|
||||
|
||||
.. warning:: Modifying the installation hash length or path scheme after
|
||||
packages have been installed will prevent Spack from being
|
||||
able to find the old installation directories.
|
||||
Notice that reducing the hash length increases the likelihood of hash collisions.
|
||||
|
||||
--------------------
|
||||
``build_stage``
|
||||
|
@@ -361,7 +361,6 @@ and the tags associated with the class of runners to build on.
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
@@ -543,10 +543,10 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
$ spack python -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
$ spack python -i ipython -c 'from spack.concretize import concretize_one; concretize_one("python")'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
@@ -112,6 +112,19 @@ the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Environments created from a manifest will copy any included configs
|
||||
from relative paths inside the environment. Relative paths from
|
||||
outside the environment will cause errors, and absolute paths will be
|
||||
kept absolute. For example, if ``spack.yaml`` includes:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include: [./config.yaml]
|
||||
|
||||
then the created environment will have its own copy of the file
|
||||
``config.yaml`` copied from the location in the original environment.
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -160,7 +173,7 @@ accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
@@ -424,8 +437,8 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
@@ -435,9 +448,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
is to check if ``mtime`` is newer than the installation.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
|
||||
Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
@@ -453,7 +466,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
|
@@ -456,14 +456,13 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3: 'python{^python.version}'
|
||||
^python@3: 'python{^python.version.up_to_2}'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
will add a ``python3.12`` to module names of packages compiled with Python 3.12, and similarly for
|
||||
all specs depending on ``python@3``. This is useful to know which version of Python a set of Python
|
||||
extensions is associated with. Likewise, the ``openblas`` string is attached to any program that
|
||||
has openblas in the spec, most likely via the ``+blas`` variant specification.
|
||||
|
||||
The most heavyweight solution to module naming is to change the entire
|
||||
naming convention for module files. This uses the projections format
|
||||
|
@@ -4,7 +4,7 @@ sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
pygments==2.19.1
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
@@ -75,7 +75,6 @@
|
||||
"install_tree",
|
||||
"is_exe",
|
||||
"join_path",
|
||||
"last_modification_time_recursive",
|
||||
"library_extensions",
|
||||
"mkdirp",
|
||||
"partition_path",
|
||||
@@ -669,7 +668,7 @@ def copy(src, dest, _permissions=False):
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -680,7 +679,7 @@ def copy(src, dest, _permissions=False):
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||
@@ -711,7 +710,7 @@ def install(src, dest):
|
||||
dest (str): the destination file or directory
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -749,7 +748,7 @@ def copy_tree(
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if _permissions:
|
||||
@@ -763,7 +762,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
@@ -844,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
@@ -1470,15 +1469,36 @@ def set_executable(path):
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def last_modification_time_recursive(path):
|
||||
path = os.path.abspath(path)
|
||||
times = [os.stat(path).st_mtime]
|
||||
times.extend(
|
||||
os.lstat(os.path.join(root, name)).st_mtime
|
||||
for root, dirs, files in os.walk(path)
|
||||
for name in dirs + files
|
||||
)
|
||||
return max(times)
|
||||
def recursive_mtime_greater_than(path: str, time: float) -> bool:
|
||||
"""Returns true if any file or dir recursively under `path` has mtime greater than `time`."""
|
||||
# use bfs order to increase likelihood of early return
|
||||
queue: Deque[str] = collections.deque([path])
|
||||
|
||||
if os.stat(path).st_mtime > time:
|
||||
return True
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
|
||||
try:
|
||||
entries = os.scandir(current)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
with entries:
|
||||
for entry in entries:
|
||||
try:
|
||||
st = entry.stat(follow_symlinks=False)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if st.st_mtime > time:
|
||||
return True
|
||||
|
||||
if entry.is_dir(follow_symlinks=False):
|
||||
queue.append(entry.path)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1740,8 +1760,7 @@ def find(
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
tty.debug(f"find must skip {path}: {e}")
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
|
@@ -308,7 +308,7 @@ class LinkTree:
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
raise OSError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
|
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return True
|
||||
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# EAGAIN and EACCES == locked by another process (so try again)
|
||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||
raise
|
||||
|
@@ -918,7 +918,7 @@ def _writer_daemon(
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
# to let the caller know the read failed b/c it
|
||||
# was in the bg. Ignore that too.
|
||||
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
||||
while True:
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
if e.errno == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
@@ -1356,14 +1356,8 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str):
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1379,7 +1373,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if isinstance(_expected, dict):
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1394,6 +1388,10 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -23,7 +24,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -91,6 +92,9 @@
|
||||
CURRENT_BUILD_CACHE_LAYOUT_VERSION = 2
|
||||
|
||||
|
||||
INDEX_HASH_FILE = "index.json.hash"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
@@ -502,7 +506,7 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
|
||||
scheme = urllib.parse.urlparse(mirror_url).scheme
|
||||
|
||||
if scheme != "oci" and not web_util.url_exists(
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_util.join(mirror_url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
):
|
||||
return False
|
||||
|
||||
@@ -591,32 +595,18 @@ def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def deps_to_relocate(spec):
|
||||
"""Return the transitive link and direct run dependencies of the spec.
|
||||
|
||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
||||
|
||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
||||
we need to rewrite those locations when dependencies are in a different place at install time
|
||||
than they were at build time.
|
||||
|
||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
||||
|
||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
||||
dependency libraries.
|
||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
||||
|
||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
||||
"""
|
||||
deps = [
|
||||
def specs_to_relocate(spec: spack.spec.Spec) -> List[spack.spec.Spec]:
|
||||
"""Return the set of specs that may be referenced in the install prefix of the provided spec.
|
||||
We currently include non-external transitive link and direct run dependencies."""
|
||||
specs = [
|
||||
s
|
||||
for s in itertools.chain(
|
||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||
spec.traverse(root=True, deptype="link", order="breadth", key=traverse.by_dag_hash),
|
||||
spec.dependencies(deptype="run"),
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
||||
return list(llnl.util.lang.dedupe(specs, key=lambda s: s.dag_hash()))
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
@@ -630,7 +620,7 @@ def get_buildinfo_dict(spec):
|
||||
# "relocate_binaries": [],
|
||||
# "relocate_links": [],
|
||||
"hardlinks_deduped": True,
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in specs_to_relocate(spec)},
|
||||
}
|
||||
|
||||
|
||||
@@ -683,19 +673,24 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
|
||||
|
||||
def _read_specs_and_push_index(
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
file_list: List[str],
|
||||
read_method: Callable,
|
||||
cache_prefix: str,
|
||||
db: BuildCacheDatabase,
|
||||
temp_dir: str,
|
||||
concurrency: int,
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
file_list: List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
cache_prefix: prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
temp_dir: Location to write index.json and hash for pushing
|
||||
concurrency: Number of parallel processes to use when fetching
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
@@ -713,7 +708,7 @@ def _read_specs_and_push_index(
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
index_json_path = os.path.join(temp_dir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -723,14 +718,14 @@ def _read_specs_and_push_index(
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
index_hash_path = os.path.join(temp_dir, INDEX_HASH_FILE)
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, "index.json"),
|
||||
url_util.join(cache_prefix, spack_db.INDEX_JSON_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -738,7 +733,7 @@ def _read_specs_and_push_index(
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, "index.json.hash"),
|
||||
url_util.join(cache_prefix, INDEX_HASH_FILE),
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
|
||||
)
|
||||
@@ -807,7 +802,7 @@ def url_read_method(url):
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except web_util.SpackWebError as e:
|
||||
except (web_util.SpackWebError, OSError) as e:
|
||||
tty.error(f"Error reading specfile: {url}: {e}")
|
||||
return contents
|
||||
|
||||
@@ -875,9 +870,12 @@ def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
tty.debug(f"Retrieving spec descriptor files from {url} to build index")
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db._write()
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, url, db, db.database_directory, concurrency)
|
||||
_read_specs_and_push_index(
|
||||
file_list, read_fn, url, db, str(db.database_directory), concurrency
|
||||
)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(f"Encountered problem pushing package index to {url}: {e}") from e
|
||||
|
||||
@@ -1112,7 +1110,7 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
||||
|
||||
|
||||
def prefixes_to_relocate(spec):
|
||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||
prefixes = [s.prefix for s in specs_to_relocate(spec)]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
return prefixes
|
||||
@@ -1791,7 +1789,7 @@ def _oci_update_index(
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
index_json_path = os.path.join(tmpdir, spack_db.INDEX_JSON_FILE)
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -2012,7 +2010,7 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
# Download the config = spec.json and the relevant tarball
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
manifest = json.load(response)
|
||||
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
|
||||
tarball_digest = spack.oci.image.Digest.from_string(
|
||||
manifest["layers"][-1]["digest"]
|
||||
@@ -2139,10 +2137,9 @@ def fetch_url_to_mirror(url):
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
when relocating files in parallel and in-place. This
|
||||
means we must preserve inodes when relocating."""
|
||||
"""Updates a buildinfo dict for old archives that did not dedupe hardlinks. De-duping hardlinks
|
||||
is necessary when relocating files in parallel and in-place. This means we must preserve inodes
|
||||
when relocating."""
|
||||
|
||||
# New archives don't need this.
|
||||
if buildinfo.get("hardlinks_deduped", False):
|
||||
@@ -2171,65 +2168,48 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
buildinfo[key] = new_list
|
||||
|
||||
|
||||
def relocate_package(spec):
|
||||
"""
|
||||
Relocate the given package
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
|
||||
old_sbang_install_path = None
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
def relocate_package(spec: spack.spec.Spec) -> None:
|
||||
"""Relocate binaries and text files in the given spec prefix, based on its buildinfo file."""
|
||||
spec_prefix = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(spec_prefix)
|
||||
old_layout_root = str(buildinfo["buildpath"])
|
||||
old_spack_prefix = str(buildinfo.get("spackprefix"))
|
||||
old_rel_prefix = buildinfo.get("relative_prefix")
|
||||
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
|
||||
rel = buildinfo.get("relative_rpaths", False)
|
||||
|
||||
# In the past prefix_to_hash was the default and externals were not dropped, so prefixes
|
||||
# were not unique.
|
||||
# Warn about old style tarballs created with the --rel flag (removed in Spack v0.20)
|
||||
if buildinfo.get("relative_rpaths", False):
|
||||
tty.warn(
|
||||
f"Tarball for {spec} uses relative rpaths, which can cause library loading issues."
|
||||
)
|
||||
|
||||
# In Spack 0.19 and older prefix_to_hash was the default and externals were not dropped, so
|
||||
# prefixes were not unique.
|
||||
if "hash_to_prefix" in buildinfo:
|
||||
hash_to_old_prefix = buildinfo["hash_to_prefix"]
|
||||
elif "prefix_to_hash" in buildinfo:
|
||||
hash_to_old_prefix = dict((v, k) for (k, v) in buildinfo["prefix_to_hash"].items())
|
||||
hash_to_old_prefix = {v: k for (k, v) in buildinfo["prefix_to_hash"].items()}
|
||||
else:
|
||||
hash_to_old_prefix = dict()
|
||||
raise NewLayoutException(
|
||||
"Package tarball was created from an install prefix with a different directory layout "
|
||||
"and an older buildcache create implementation. It cannot be relocated."
|
||||
)
|
||||
|
||||
if old_rel_prefix != new_rel_prefix and not hash_to_old_prefix:
|
||||
msg = "Package tarball was created from an install "
|
||||
msg += "prefix with a different directory layout and an older "
|
||||
msg += "buildcache create implementation. It cannot be relocated."
|
||||
raise NewLayoutException(msg)
|
||||
prefix_to_prefix: Dict[str, str] = {}
|
||||
|
||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||
# For example, the new sbang can be longer than the old one.
|
||||
# Hence 2 dictionaries are maintained here.
|
||||
prefix_to_prefix_text = collections.OrderedDict()
|
||||
prefix_to_prefix_bin = collections.OrderedDict()
|
||||
if "sbang_install_path" in buildinfo:
|
||||
old_sbang_install_path = str(buildinfo["sbang_install_path"])
|
||||
prefix_to_prefix[old_sbang_install_path] = spack.hooks.sbang.sbang_install_path()
|
||||
|
||||
if old_sbang_install_path:
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
# First match specific prefix paths. Possibly the *local* install prefix of some dependency is
|
||||
# in an upstream, so we cannot assume the original spack store root can be mapped uniformly to
|
||||
# the new spack store root.
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
#
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
||||
# from the old install_tree to the new install_tree and from the build_spec
|
||||
# to the spliced spec.
|
||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
||||
# without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
||||
# is the spec that s replaced when we spliced.
|
||||
relocation_specs = deps_to_relocate(spec)
|
||||
# If the spec is spliced, we need to handle the simultaneous mapping from the old install_tree
|
||||
# to the new install_tree and from the build_spec to the spliced spec. Because foo.build_spec
|
||||
# is foo for any non-spliced spec, we can simplify by checking for spliced-in nodes by checking
|
||||
# for nodes not in the build_spec without any explicit check for whether the spec is spliced.
|
||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals in
|
||||
# the context of the relevant root spec. This ensures that the analog for a spec s is the spec
|
||||
# that s replaced when we spliced.
|
||||
relocation_specs = specs_to_relocate(spec)
|
||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
||||
for s in relocation_specs:
|
||||
analog = s
|
||||
@@ -2248,98 +2228,66 @@ def relocate_package(spec):
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
if lookup_dag_hash in hash_to_old_prefix:
|
||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
||||
prefix_to_prefix[old_dep_prefix] = str(s.prefix)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix[old_layout_root] = str(spack.store.STORE.layout.root)
|
||||
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
# will have the old sbang location in their shebangs.
|
||||
orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
|
||||
new_sbang = spack.hooks.sbang.sbang_shebang_line()
|
||||
prefix_to_prefix_text[orig_sbang] = new_sbang
|
||||
# Delete identity mappings from prefix_to_prefix
|
||||
prefix_to_prefix = {k: v for k, v in prefix_to_prefix.items() if k != v}
|
||||
|
||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||
# If there's nothing to relocate, we're done.
|
||||
if not prefix_to_prefix:
|
||||
return
|
||||
|
||||
# Old archives maybe have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||
for old, new in prefix_to_prefix.items():
|
||||
tty.debug(f"Relocating: {old} => {new}.")
|
||||
|
||||
def is_backup_file(file):
|
||||
return file.endswith("~")
|
||||
# Old archives may have hardlinks repeated.
|
||||
dedupe_hardlinks_if_necessary(spec_prefix, buildinfo)
|
||||
|
||||
# Text files containing the prefix text
|
||||
text_names = list()
|
||||
for filename in buildinfo["relocate_textfiles"]:
|
||||
text_name = os.path.join(workdir, filename)
|
||||
# Don't add backup files generated by filter_file during install step.
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
textfiles = [os.path.join(spec_prefix, f) for f in buildinfo["relocate_textfiles"]]
|
||||
binaries = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_binaries")]
|
||||
links = [os.path.join(spec_prefix, f) for f in buildinfo.get("relocate_links", [])]
|
||||
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
elif "elf" in platform.binary_formats and not rel:
|
||||
# The new ELF dynamic section relocation logic only handles absolute to
|
||||
# absolute relocation.
|
||||
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||
elif "elf" in platform.binary_formats and rel:
|
||||
relocate.relocate_elf_binaries(
|
||||
files_to_relocate,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix_bin,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
)
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(binaries, prefix_to_prefix)
|
||||
elif "elf" in platform.binary_formats:
|
||||
relocate.relocate_elf_binaries(binaries, prefix_to_prefix)
|
||||
|
||||
# Relocate links to the new install prefix
|
||||
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||
relocate.relocate_links(links, prefix_to_prefix)
|
||||
relocate.relocate_text(textfiles, prefix_to_prefix)
|
||||
changed_files = relocate.relocate_text_bin(binaries, prefix_to_prefix)
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
install_manifest = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(install_manifest):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
# overwrite old metadata with new
|
||||
if spec.spliced:
|
||||
# rewrite spec on disk
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
# de-cache the install manifest
|
||||
with contextlib.suppress(FileNotFoundError):
|
||||
os.unlink(install_manifest)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, signature_required: bool, remote_checksum):
|
||||
@@ -2507,15 +2455,6 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
@@ -2620,10 +2559,6 @@ def install_root_node(
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
if spec.spliced: # overwrite old metadata with new
|
||||
spack.store.STORE.layout.write_spec(
|
||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
||||
)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
||||
|
||||
@@ -2661,11 +2596,14 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = True
|
||||
except web_util.SpackWebError as e1:
|
||||
except (web_util.SpackWebError, OSError) as e1:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except web_util.SpackWebError as e2:
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
specfile_is_signed = False
|
||||
except (web_util.SpackWebError, OSError) as e2:
|
||||
tty.debug(
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||
e1,
|
||||
@@ -2675,7 +2613,6 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||
)
|
||||
continue
|
||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
@@ -2769,8 +2706,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
try:
|
||||
_, _, json_file = web_util.read_from_url(keys_index)
|
||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||
except web_util.SpackWebError as url_err:
|
||||
json_index = sjson.load(json_file)
|
||||
except (web_util.SpackWebError, OSError, ValueError) as url_err:
|
||||
# TODO: avoid repeated request
|
||||
if web_util.url_exists(keys_index):
|
||||
tty.error(
|
||||
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||
@@ -3017,14 +2955,14 @@ def __init__(self, url, local_hash, urlopen=web_util.urlopen):
|
||||
|
||||
def get_remote_hash(self):
|
||||
# Failure to fetch index.json.hash is not fatal
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, INDEX_HASH_FILE)
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError):
|
||||
remote_hash = response.read(64)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
# Validate the hash
|
||||
remote_hash = response.read(64)
|
||||
if not re.match(rb"[a-f\d]{64}$", remote_hash):
|
||||
return None
|
||||
return remote_hash.decode("utf-8")
|
||||
@@ -3038,17 +2976,17 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise, download index.json
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url_index = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
|
||||
try:
|
||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch index from {url_index}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_index} is invalid") from e
|
||||
|
||||
computed_hash = compute_hash(result)
|
||||
|
||||
@@ -3082,7 +3020,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Just do a conditional fetch immediately
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, spack_db.INDEX_JSON_FILE)
|
||||
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||
|
||||
try:
|
||||
@@ -3092,12 +3030,12 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
# Not modified; that means fresh.
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except OSError as e: # URLError, socket.timeout, etc.
|
||||
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||
|
||||
try:
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except ValueError as e:
|
||||
except (ValueError, OSError) as e:
|
||||
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||
|
||||
headers = response.headers
|
||||
@@ -3129,11 +3067,11 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||
)
|
||||
)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except OSError as e:
|
||||
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||
|
||||
try:
|
||||
manifest = json.loads(response.read())
|
||||
manifest = json.load(response)
|
||||
except Exception as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
@@ -3148,14 +3086,16 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||
|
||||
# Otherwise fetch the blob / index.json
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
url=self.ref.blob_url(index_digest),
|
||||
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
result = codecs.getreader("utf-8")(response).read()
|
||||
except (OSError, ValueError) as e:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||
|
||||
# Make sure the blob we download has the advertised hash
|
||||
if compute_hash(result) != index_digest.digest:
|
||||
|
@@ -5,12 +5,14 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from typing import Dict, Optional, Sequence, Union
|
||||
from typing import Optional, Sequence, Union
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -18,13 +20,17 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||
|
||||
class QueryInfo(TypedDict, total=False):
|
||||
spec: spack.spec.Spec
|
||||
command: spack.util.executable.Executable
|
||||
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
@@ -211,7 +217,9 @@ def _executables_in_store(
|
||||
):
|
||||
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||
if query_info is not None:
|
||||
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||
query_info["command"] = spack.util.executable.which(
|
||||
*executables, path=bin_dir, required=True
|
||||
)
|
||||
query_info["spec"] = concrete_spec
|
||||
return True
|
||||
return False
|
||||
|
@@ -27,9 +27,9 @@
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_os = self.host_platform.default_operating_system()
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
|
@@ -4,7 +4,7 @@
|
||||
"""Manage configuration swapping for bootstrapping purposes"""
|
||||
|
||||
import contextlib
|
||||
import os.path
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
@@ -141,7 +141,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
arch = spack.spec.ArchSpec.default_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
spack.compilers.find_compilers()
|
||||
|
||||
|
@@ -25,7 +25,6 @@
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
@@ -34,8 +33,10 @@
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
@@ -44,10 +45,17 @@
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from ._common import (
|
||||
QueryInfo,
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
@@ -89,8 +97,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
# Check for relative paths, and turn them into absolute paths
|
||||
# root is the metadata_dir
|
||||
maybe_url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(maybe_url) and not os.path.isabs(maybe_url):
|
||||
maybe_url = os.path.join(self.metadata_dir, maybe_url)
|
||||
self.url = spack.mirrors.mirror.Mirror(maybe_url).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -134,7 +146,7 @@ class BuildcacheBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
@@ -211,14 +223,14 @@ def _install_and_test(
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: ConfigDictionary = {}
|
||||
info: QueryInfo = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary
|
||||
info: QueryInfo
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
@@ -231,7 +243,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary
|
||||
info: QueryInfo
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -249,11 +261,11 @@ class SourceBootstrapper(Bootstrapper):
|
||||
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.last_search: Optional[QueryInfo] = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary = {}
|
||||
info: QueryInfo = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -270,10 +282,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
)
|
||||
concrete_spec.concretize()
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec)
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
@@ -288,7 +300,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary = {}
|
||||
info: QueryInfo = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -299,7 +311,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
concrete_spec = spack.concretize.concretize_one(abstract_spec_str)
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
@@ -316,11 +328,9 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
def source_is_enabled(conf: ConfigDictionary) -> bool:
|
||||
"""Returns true if the source is not enabled for bootstrapping"""
|
||||
return spack.config.get("bootstrap:trusted").get(conf["name"], False)
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -350,24 +360,23 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
if create_bootstrapper(current_config).try_import(module, abstract_spec):
|
||||
return
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {module}"
|
||||
)
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -405,8 +414,9 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
@@ -414,6 +424,7 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
assert cmd is not None, "expected an Executable"
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
@@ -421,18 +432,17 @@ def ensure_executables_in_path_or_raise(
|
||||
)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
f"expected at least one exception to have been raised at this point: "
|
||||
f"while bootstrapping {executables_str}"
|
||||
)
|
||||
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
if tty.is_debug():
|
||||
|
||||
if not exception_handler:
|
||||
msg += ": no bootstrapping sources are enabled"
|
||||
elif spack.error.debug or spack.error.SHOW_BACKTRACE:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
msg += "\nRun `spack --backtrace ...` for more detailed errors"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
@@ -63,7 +63,6 @@ def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
"tar": _missing("tar", "required to manage code archives"),
|
||||
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||
|
@@ -44,7 +44,19 @@
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
TextIO,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -146,48 +158,128 @@ def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
'parallel' to the call will override whatever the package's
|
||||
global setting is, so you can either default to true or false and
|
||||
override particular calls. Specifying 'jobs_env' to a particular
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
"""Special callable executable object for make so the user can specify parallelism options
|
||||
on a per-invocation basis.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super().__init__(name, **kwargs)
|
||||
def __init__(self, name: str, *, jobs: int, supports_jobserver: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
parallel = kwargs.pop("parallel", True)
|
||||
jobs_env = kwargs.pop("jobs_env", None)
|
||||
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str] = ...,
|
||||
error: Union[Optional[TextIO], str] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Type[str], Callable] = ...,
|
||||
error: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = ...,
|
||||
jobs_env: Optional[str] = ...,
|
||||
jobs_env_supports_jobserver: bool = ...,
|
||||
fail_on_error: bool = ...,
|
||||
ignore_errors: Union[int, Sequence[int]] = ...,
|
||||
ignore_quotes: Optional[bool] = ...,
|
||||
timeout: Optional[int] = ...,
|
||||
env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
extra_env: Optional[Union[Dict[str, str], EnvironmentModifications]] = ...,
|
||||
input: Optional[TextIO] = ...,
|
||||
output: Union[Optional[TextIO], str, Type[str], Callable] = ...,
|
||||
error: Union[Type[str], Callable] = ...,
|
||||
_dump_env: Optional[Dict[str, str]] = ...,
|
||||
) -> str: ...
|
||||
|
||||
def __call__(
|
||||
self,
|
||||
*args: str,
|
||||
parallel: bool = True,
|
||||
jobs_env: Optional[str] = None,
|
||||
jobs_env_supports_jobserver: bool = False,
|
||||
**kwargs,
|
||||
) -> Optional[str]:
|
||||
"""Runs this "make" executable in a subprocess.
|
||||
|
||||
Args:
|
||||
parallel: if False, parallelism is disabled
|
||||
jobs_env: environment variable that will be set to the current level of parallelism
|
||||
jobs_env_supports_jobserver: whether the jobs env supports a job server
|
||||
|
||||
For all the other **kwargs, refer to the base class.
|
||||
"""
|
||||
jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||
)
|
||||
if jobs is not None:
|
||||
args = ("-j{0}".format(jobs),) + args
|
||||
args = (f"-j{jobs}",) + args
|
||||
|
||||
if jobs_env:
|
||||
# Caller wants us to set an environment variable to
|
||||
# control the parallelism.
|
||||
# Caller wants us to set an environment variable to control the parallelism
|
||||
jobs_env_jobs = get_effective_jobs(
|
||||
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||
)
|
||||
if jobs_env_jobs is not None:
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
extra_env = kwargs.setdefault("extra_env", {})
|
||||
extra_env.update({jobs_env: str(jobs_env_jobs)})
|
||||
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
class UndeclaredDependencyError(spack.error.SpackError):
|
||||
"""Raised if a dependency is invoking an executable through a module global, without
|
||||
declaring a dependency on it.
|
||||
"""
|
||||
|
||||
|
||||
class DeprecatedExecutable:
|
||||
def __init__(self, pkg: str, exe: str, exe_pkg: str) -> None:
|
||||
self.pkg = pkg
|
||||
self.exe = exe
|
||||
self.exe_pkg = exe_pkg
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise UndeclaredDependencyError(
|
||||
f"{self.pkg} is using {self.exe} without declaring a dependency on {self.exe_pkg}"
|
||||
)
|
||||
|
||||
def add_default_env(self, key: str, value: str):
|
||||
self.__call__()
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -209,11 +301,13 @@ def clean_environment():
|
||||
env.unset("CPLUS_INCLUDE_PATH")
|
||||
env.unset("OBJC_INCLUDE_PATH")
|
||||
|
||||
# prevent configure scripts from sourcing variables from config site file (AC_SITE_LOAD).
|
||||
env.set("CONFIG_SITE", os.devnull)
|
||||
env.unset("CMAKE_PREFIX_PATH")
|
||||
|
||||
env.unset("PYTHONPATH")
|
||||
env.unset("R_HOME")
|
||||
env.unset("R_ENVIRON")
|
||||
|
||||
env.unset("LUA_PATH")
|
||||
env.unset("LUA_CPATH")
|
||||
|
||||
@@ -621,10 +715,9 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
module.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||
module.make = DeprecatedExecutable(pkg.name, "make", "gmake")
|
||||
module.gmake = DeprecatedExecutable(pkg.name, "gmake", "gmake")
|
||||
module.ninja = DeprecatedExecutable(pkg.name, "ninja", "ninja")
|
||||
# TODO: johnwparent: add package or builder support to define these build tools
|
||||
# for now there is no entrypoint for builders to define these on their
|
||||
# own
|
||||
|
@@ -6,7 +6,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
|
||||
@@ -17,19 +19,18 @@ class AspellBuilder(AutotoolsBuilder):
|
||||
to the Aspell extensions.
|
||||
"""
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: "AspellDictPackage", # type: ignore[override]
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
):
|
||||
aspell = spec["aspell"].prefix.bin.aspell
|
||||
prezip = spec["aspell"].prefix.bin.prezip
|
||||
destdir = prefix
|
||||
|
||||
sh = spack.util.executable.which("sh")
|
||||
sh(
|
||||
"./configure",
|
||||
"--vars",
|
||||
"ASPELL={0}".format(aspell),
|
||||
"PREZIP={0}".format(prezip),
|
||||
"DESTDIR={0}".format(destdir),
|
||||
)
|
||||
sh = spack.util.executable.Executable("/bin/sh")
|
||||
sh("./configure", "--vars", f"ASPELL={aspell}", f"PREZIP={prezip}", f"DESTDIR={destdir}")
|
||||
|
||||
|
||||
# Aspell dictionaries install their bits into their prefix.lib
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
@@ -356,6 +355,13 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@@ -527,7 +533,7 @@ def build_directory(self) -> str:
|
||||
return build_dir
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
def _delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@@ -540,7 +546,7 @@ def autoreconf_search_path_args(self) -> List[str]:
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
def _set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
@@ -564,10 +570,7 @@ def configure_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
@@ -596,10 +599,7 @@ def autoreconf(
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
@@ -612,10 +612,7 @@ def configure(
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
@@ -625,10 +622,7 @@ def build(
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: AutotoolsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -825,7 +819,7 @@ def installcheck(self) -> None:
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
def _remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
|
@@ -10,6 +10,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -293,12 +295,26 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
if spec.satisfies("%gcc"):
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_HIP_FLAGS", f"--gcc-toolchain={self.pkg.compiler.prefix}"
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
|
||||
complete_rpath_list = ";".join(
|
||||
[
|
||||
self.pkg.spec.prefix.lib,
|
||||
self.pkg.spec.prefix.lib64,
|
||||
*os.environ.get("SPACK_COMPILER_EXTRA_RPATHS", "").split(":"),
|
||||
*os.environ.get("SPACK_COMPILER_IMPLICIT_RPATHS", "").split(":"),
|
||||
]
|
||||
)
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||
@@ -307,6 +323,8 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
@@ -314,7 +332,9 @@ def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, pkg, spec, prefix):
|
||||
def initconfig(
|
||||
self, pkg: "CachedCMakePackage", spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
cache_entries = (
|
||||
self.std_initconfig_entries()
|
||||
+ self.initconfig_compiler_entries()
|
||||
|
@@ -7,6 +7,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -81,12 +83,16 @@ def check_args(self):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("CARGO_HOME", self.stage.path)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
@@ -454,16 +454,21 @@ def cmake_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
# These are the files that will re-run CMake that are generated from a successful
|
||||
# configure step
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
if primary_generator == "Unix Makefiles":
|
||||
configure_artifact = "Makefile"
|
||||
elif primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
os.path.join(self.build_directory, configure_artifact)
|
||||
):
|
||||
return
|
||||
|
||||
@@ -474,10 +479,7 @@ def cmake(
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
@@ -488,10 +490,7 @@ def build(
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: CMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
@@ -48,3 +50,8 @@ class GenericBuilder(BuilderWithDefaults):
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def install(
|
||||
self, pkg: Package, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
@@ -7,7 +7,9 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
@@ -26,9 +28,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
build_system("go")
|
||||
|
||||
with when("build_system=go"):
|
||||
# TODO: this seems like it should be depends_on, see
|
||||
# setup_dependent_build_environment in go for why I kept it like this
|
||||
extends("go@1.14:", type="build")
|
||||
depends_on("go", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
@@ -71,6 +71,7 @@ class GoBuilder(BuilderWithDefaults):
|
||||
def setup_build_environment(self, env):
|
||||
env.set("GO111MODULE", "on")
|
||||
env.set("GOTOOLCHAIN", "local")
|
||||
env.set("GOPATH", fs.join_path(self.pkg.stage.path, "go"))
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
@@ -81,19 +82,31 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
return [
|
||||
"-p",
|
||||
str(self.pkg.module.make_jobs),
|
||||
"-modcacherw",
|
||||
"-ldflags",
|
||||
"-s -w",
|
||||
"-o",
|
||||
f"{self.pkg.name}",
|
||||
]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
"""Argument for ``go test`` during check phase"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: GoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install built binaries into prefix bin."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.mkdirp(prefix.bin)
|
||||
|
@@ -7,7 +7,9 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -55,7 +57,9 @@ class LuaBuilder(spack.builder.Builder):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def unpack(self, pkg, spec, prefix):
|
||||
def unpack(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||
dirlines = directory.split("\n")
|
||||
@@ -66,15 +70,16 @@ def unpack(self, pkg, spec, prefix):
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
def generate_luarocks_config(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
spec = self.pkg.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(deptype=("build", "run")):
|
||||
if d.package.extends(self.pkg.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
with open(self._luarocks_config_path(), "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
@@ -85,23 +90,26 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def preprocess(self, pkg, spec, prefix):
|
||||
def preprocess(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: LuaPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
rock = "."
|
||||
specs = find(".", "*.rockspec", recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||
|
@@ -98,29 +98,20 @@ def build_directory(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MakefilePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
@@ -58,16 +60,20 @@ def build_args(self):
|
||||
"""List of args to pass to build phase."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Compile code and package into a JAR file."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
mvn = which("mvn")
|
||||
mvn = which("mvn", required=True)
|
||||
if self.pkg.run_tests:
|
||||
mvn("verify", *self.build_args())
|
||||
else:
|
||||
mvn("package", "-DskipTests", *self.build_args())
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MavenPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Copy to installation prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree(".", prefix)
|
||||
|
@@ -188,10 +188,7 @@ def meson_args(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
@@ -204,10 +201,7 @@ def meson(
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
@@ -216,10 +210,7 @@ def build(
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
self, pkg: MesonPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -99,7 +101,9 @@ def msbuild_install_args(self):
|
||||
as `msbuild_args` by default."""
|
||||
return self.msbuild_args()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.msbuild(
|
||||
@@ -108,7 +112,9 @@ def build(self, pkg, spec, prefix):
|
||||
self.define_targets(*self.build_targets),
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: MSBuildPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -123,7 +125,9 @@ def nmake_install_args(self):
|
||||
Individual packages should override to specify NMake args to command line"""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
@@ -132,7 +136,9 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: NMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
opts = self.std_nmake_args
|
||||
|
@@ -3,6 +3,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -42,7 +44,9 @@ class OctaveBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: OctavePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package from the archive file"""
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
|
@@ -10,8 +10,11 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -28,7 +31,9 @@ class PerlPackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("perl")
|
||||
|
||||
extends("perl", when="build_system=perl")
|
||||
with when("build_system=perl"):
|
||||
extends("perl")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
@memoized
|
||||
@@ -146,7 +151,9 @@ def configure_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||
an appropriate installation base directory followed by the
|
||||
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||
@@ -170,7 +177,9 @@ def fix_shebang(self):
|
||||
repl = "#!/usr/bin/env perl"
|
||||
filter_file(pattern, repl, "Build", backup=False)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Builds a Perl package."""
|
||||
self.build_executable()
|
||||
|
||||
@@ -181,6 +190,8 @@ def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
self.build_executable("test")
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: PerlPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs a Perl package."""
|
||||
self.build_executable("install")
|
||||
|
@@ -28,6 +28,7 @@
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import test_part
|
||||
@@ -263,16 +264,17 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
platform = spack.platforms.by_name(python.architecture.platform)
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
python.architecture.os = platform.default_operating_system()
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -27,6 +29,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
@@ -61,17 +64,23 @@ def qmake_args(self):
|
||||
"""List of arguments passed to qmake."""
|
||||
return []
|
||||
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
def qmake(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: QMakePackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
pkg.module.make("install")
|
||||
|
@@ -94,7 +94,7 @@ def list_url(cls):
|
||||
if cls.cran:
|
||||
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{self.bioc}"
|
||||
@lang.classproperty
|
||||
def git(cls):
|
||||
if cls.bioc:
|
||||
return f"https://git.bioconductor.org/packages/{cls.bioc}"
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.config import determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
@@ -74,18 +76,22 @@ def build_directory(self):
|
||||
ret = os.path.join(ret, self.subdirectory)
|
||||
return ret
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RacketPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install everything from build directory."""
|
||||
raco = Executable("raco")
|
||||
with fs.working_dir(self.build_directory):
|
||||
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
parallel = pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||
name = pkg.racket_name
|
||||
assert name is not None, "Racket package name is not set"
|
||||
args = [
|
||||
"pkg",
|
||||
"install",
|
||||
"-t",
|
||||
"dir",
|
||||
"-n",
|
||||
self.pkg.racket_name,
|
||||
name,
|
||||
"--deps",
|
||||
"fail",
|
||||
"--ignore-implies",
|
||||
@@ -101,8 +107,7 @@ def install(self, pkg, spec, prefix):
|
||||
except ProcessError:
|
||||
args.insert(-2, "--skip-installed")
|
||||
raco(*args)
|
||||
msg = (
|
||||
"Racket package {0} was already installed, uninstalling via "
|
||||
tty.warn(
|
||||
f"Racket package {name} was already installed, uninstalling via "
|
||||
"Spack may make someone unhappy!"
|
||||
)
|
||||
tty.warn(msg.format(self.pkg.racket_name))
|
||||
|
@@ -140,7 +140,7 @@ class ROCmPackage(PackageBase):
|
||||
when="+rocm",
|
||||
)
|
||||
|
||||
depends_on("llvm-amdgpu", when="+rocm")
|
||||
depends_on("llvm-amdgpu", type="build", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BuilderWithDefaults
|
||||
@@ -42,7 +44,9 @@ class RubyBuilder(BuilderWithDefaults):
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build a Ruby gem."""
|
||||
|
||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||
@@ -58,7 +62,9 @@ def build(self, pkg, spec, prefix):
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: RubyPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install a Ruby gem.
|
||||
|
||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||
|
@@ -4,6 +4,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
@@ -59,7 +61,9 @@ def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
|
||||
@@ -67,7 +71,9 @@ def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SConsPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
|
||||
|
@@ -11,6 +11,8 @@
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
@@ -41,6 +43,7 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
with when("build_system=sip"):
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
depends_on("gmake", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -130,7 +133,9 @@ class SIPBuilder(BuilderWithDefaults):
|
||||
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
@@ -148,7 +153,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -159,7 +166,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: SIPPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -6,6 +6,8 @@
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
@@ -97,7 +99,9 @@ def waf(self, *args, **kwargs):
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Configures the project."""
|
||||
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||
args += self.configure_args()
|
||||
@@ -108,7 +112,9 @@ def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Executes the build."""
|
||||
args = self.build_args()
|
||||
|
||||
@@ -118,7 +124,9 @@ def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self, pkg: WafPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix
|
||||
) -> None:
|
||||
"""Installs the targets on the system."""
|
||||
args = self.install_args()
|
||||
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -472,12 +471,9 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
||||
# Use all unpruned specs to populate the build group for this set
|
||||
cdash_config = cfg.get("cdash")
|
||||
if options.cdash_handler and options.cdash_handler.auth_token:
|
||||
try:
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
options.cdash_handler.populate_buildgroup(
|
||||
[options.cdash_handler.build_name(s) for s in pipeline_specs]
|
||||
)
|
||||
elif cdash_config:
|
||||
# warn only if there was actually a CDash configuration.
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
@@ -1,23 +1,21 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
from enum import Enum
|
||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config as cfg
|
||||
@@ -35,32 +33,11 @@
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
# this exists purely for testing purposes
|
||||
_urlopen = web_util.urlopen
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
@@ -279,26 +256,25 @@ def copy_test_results(self, source, dest):
|
||||
reports = fs.join_path(source, "*_Test*.xml")
|
||||
copy_files_to_artifacts(reports, dest)
|
||||
|
||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
def create_buildgroup(self, headers, url, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||
tty.warn(msg)
|
||||
try:
|
||||
response_text = _urlopen(request, timeout=SPACK_CDASH_TIMEOUT).read()
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to create CDash buildgroup: {e}")
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
try:
|
||||
response_json = json.loads(response_text)
|
||||
return response_json["id"]
|
||||
except (json.JSONDecodeError, KeyError) as e:
|
||||
tty.warn(f"Failed to parse CDash response: {e}")
|
||||
return None
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = f"{self.url}/api/v1/buildgroup.php"
|
||||
@@ -308,16 +284,11 @@ def populate_buildgroup(self, job_names):
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||
)
|
||||
parent_group_id = self.create_buildgroup(headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(headers, url, f"Latest {self.build_group}", "Latest")
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||
tty.warn(msg)
|
||||
tty.warn(f"Failed to create or retrieve buildgroups for {self.build_group}")
|
||||
return
|
||||
|
||||
data = {
|
||||
@@ -329,15 +300,12 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
request = Request(url, data=enc_data, headers=headers, method="PUT")
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||
tty.warn(msg)
|
||||
try:
|
||||
_urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
except OSError as e:
|
||||
tty.warn(f"Failed to populate CDash buildgroup: {e}")
|
||||
|
||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||
@@ -735,9 +703,6 @@ def _apply_section(dest, src):
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
@@ -771,19 +736,15 @@ def job_query(job):
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
response = _urlopen(request)
|
||||
config = json.load(response)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}: {e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
|
@@ -202,7 +202,7 @@ def _concretize_spec_pairs(
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
return [concrete or spack.concretize.concretize_one(abstract, tests=tests)]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
@@ -254,9 +254,9 @@ def matching_spec_from_env(spec):
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
return env.matching_spec(spec) or spec.concretized()
|
||||
return env.matching_spec(spec) or spack.concretize.concretize_one(spec)
|
||||
else:
|
||||
return spec.concretized()
|
||||
return spack.concretize.concretize_one(spec)
|
||||
|
||||
|
||||
def matching_specs_from_env(specs):
|
||||
@@ -297,7 +297,7 @@ def disambiguate_spec(
|
||||
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: List[str],
|
||||
hashes: Optional[List[str]],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
|
@@ -3,6 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
import warnings
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -51,10 +52,10 @@ def setup_parser(subparser):
|
||||
"-t", "--target", action="store_true", default=False, help="print only the target"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend"
|
||||
"-f", "--frontend", action="store_true", default=False, help="print frontend (DEPRECATED)"
|
||||
)
|
||||
parts2.add_argument(
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend"
|
||||
"-b", "--backend", action="store_true", default=False, help="print backend (DEPRECATED)"
|
||||
)
|
||||
|
||||
|
||||
@@ -98,15 +99,14 @@ def arch(parser, args):
|
||||
display_targets(archspec.cpu.TARGETS)
|
||||
return
|
||||
|
||||
os_args, target_args = "default_os", "default_target"
|
||||
if args.frontend:
|
||||
os_args, target_args = "frontend", "frontend"
|
||||
warnings.warn("the argument --frontend is deprecated, and will be removed in Spack v1.0")
|
||||
elif args.backend:
|
||||
os_args, target_args = "backend", "backend"
|
||||
warnings.warn("the argument --backend is deprecated, and will be removed in Spack v1.0")
|
||||
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system(os_args)
|
||||
host_target = host_platform.target(target_args)
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
if args.family:
|
||||
host_target = host_target.family
|
||||
elif args.generic:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
@@ -14,9 +14,9 @@
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.mirrors.utils
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
@@ -397,7 +397,7 @@ def _mirror(args):
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
spec = spack.concretize.concretize_one(spec_str)
|
||||
for node in spec.traverse():
|
||||
spack.mirrors.utils.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
@@ -436,6 +436,7 @@ def write_metadata(subdir, metadata):
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
instructions += " % spack buildcache update-index <final-path>/bootstrap_cache\n"
|
||||
print(instructions)
|
||||
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
@@ -554,8 +555,7 @@ def check_fn(args: argparse.Namespace):
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
|
||||
for spec in specs:
|
||||
spec.concretize()
|
||||
specs = [spack.concretize.concretize_one(s) for s in specs]
|
||||
|
||||
# Next see if there are any configured binary mirrors
|
||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||
@@ -623,7 +623,7 @@ def save_specfile_fn(args):
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
root = spack.concretize.concretize_one(root)
|
||||
|
||||
save_dependency_specfiles(
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import os
|
||||
import textwrap
|
||||
|
||||
from llnl.util.lang import stable_partition
|
||||
@@ -504,11 +504,22 @@ class ConfigSetAction(argparse.Action):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, option_strings, dest, const, default=None, required=False, help=None, metavar=None
|
||||
self,
|
||||
option_strings,
|
||||
dest,
|
||||
const,
|
||||
default=None,
|
||||
required=False,
|
||||
help=None,
|
||||
metavar=None,
|
||||
require_environment=False,
|
||||
):
|
||||
# save the config option we're supposed to set
|
||||
self.config_path = dest
|
||||
|
||||
# save whether the option requires an active env
|
||||
self.require_environment = require_environment
|
||||
|
||||
# destination is translated to a legal python identifier by
|
||||
# substituting '_' for ':'.
|
||||
dest = dest.replace(":", "_")
|
||||
@@ -524,6 +535,11 @@ def __init__(
|
||||
)
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string):
|
||||
if self.require_environment and not ev.active_environment():
|
||||
raise argparse.ArgumentTypeError(
|
||||
f"argument '{self.option_strings[-1]}' requires an environment"
|
||||
)
|
||||
|
||||
# Retrieve the name of the config option and set it to
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
@@ -545,6 +561,16 @@ def add_concretizer_args(subparser):
|
||||
Just substitute ``_`` for ``:``.
|
||||
"""
|
||||
subgroup = subparser.add_argument_group("concretizer arguments")
|
||||
subgroup.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action=ConfigSetAction,
|
||||
require_environment=True,
|
||||
dest="concretizer:force",
|
||||
const=True,
|
||||
default=False,
|
||||
help="allow changes to concretized specs in spack.lock (in an env)",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"-U",
|
||||
"--fresh",
|
||||
|
@@ -15,9 +15,6 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="re-concretize even if already concretized"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
@@ -43,7 +40,7 @@ def concretize(parser, args):
|
||||
tests = False
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
if not args.quiet:
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
|
@@ -86,8 +86,8 @@ def create_db_tarball(args):
|
||||
|
||||
def report(args):
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("frontend")
|
||||
host_target = host_platform.target("frontend")
|
||||
host_os = host_platform.default_operating_system()
|
||||
host_target = host_platform.default_target()
|
||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||
print("* **Spack:**", spack.get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
|
@@ -18,6 +18,7 @@
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.environment as ev
|
||||
import spack.installer
|
||||
import spack.store
|
||||
@@ -103,7 +104,7 @@ def deprecate(parser, args):
|
||||
)
|
||||
|
||||
if args.install:
|
||||
deprecator = specs[1].concretized()
|
||||
deprecator = spack.concretize.concretize_one(specs[1])
|
||||
else:
|
||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.repo
|
||||
from spack.cmd.common import arguments
|
||||
@@ -113,8 +114,8 @@ def dev_build(self, args):
|
||||
source_path = os.path.abspath(source_path)
|
||||
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain("dev_path=%s" % source_path)
|
||||
spec.concretize()
|
||||
spec.constrain(f'dev_path="{source_path}"')
|
||||
spec = spack.concretize.concretize_one(spec)
|
||||
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
|
@@ -110,10 +110,7 @@ def external_find(args):
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
|
@@ -54,10 +54,6 @@
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
||||
cross-compiling:
|
||||
@m{os=backend} or @m{os=be} build for compute node (backend)
|
||||
@m{os=frontend} or @m{os=fe} build for login node (frontend)
|
||||
|
||||
dependencies:
|
||||
^dependency [constraints] specify constraints on dependencies
|
||||
^@K{/hash} build with a specific installed
|
||||
|
@@ -13,6 +13,7 @@
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
@@ -450,7 +451,7 @@ def concrete_specs_from_file(args):
|
||||
else:
|
||||
s = spack.spec.Spec.from_json(f)
|
||||
|
||||
concretized = s.concretized()
|
||||
concretized = spack.concretize.concretize_one(s)
|
||||
if concretized.dag_hash() != s.dag_hash():
|
||||
msg = 'skipped invalid file "{0}". '
|
||||
msg += "The file does not contain a concrete spec."
|
||||
|
@@ -7,9 +7,9 @@
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.concretize
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
@@ -65,8 +65,7 @@ def make_installer(parser, args):
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
output_dir = args.output_dir
|
||||
cmake_spec = Spec("cmake")
|
||||
cmake_spec.concretize()
|
||||
cmake_spec = spack.concretize.concretize_one("cmake")
|
||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||
spack_source = args.spack_source
|
||||
|
@@ -492,7 +492,7 @@ def extend_with_additional_versions(specs, num_versions):
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
else:
|
||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||
mirror_specs = [spack.concretize.concretize_one(x) for x in mirror_specs]
|
||||
return mirror_specs
|
||||
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"""Implementation details of the ``spack module`` command."""
|
||||
|
||||
import collections
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
@@ -177,16 +177,15 @@ def test_run(args):
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
TODO: Need to write out a log message and/or CDASH Testing
|
||||
output that package not installed IF continue to process
|
||||
these issues here.
|
||||
|
||||
if args.log_format:
|
||||
# Proceed with the spec assuming the test process
|
||||
# to ensure report package as skipped (e.g., for CI)
|
||||
specs_to_test.append(spec)
|
||||
"""
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
# these issues here.
|
||||
|
||||
# if args.log_format:
|
||||
# # Proceed with the spec assuming the test process
|
||||
# # to ensure report package as skipped (e.g., for CI)
|
||||
# specs_to_test.append(spec)
|
||||
|
||||
specs_to_test.extend(matching)
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os.path
|
||||
import os
|
||||
import shutil
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@@ -144,7 +144,7 @@ def is_installed(spec):
|
||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
return record and record.installed
|
||||
|
||||
specs = traverse.traverse_nodes(
|
||||
all_specs = traverse.traverse_nodes(
|
||||
specs,
|
||||
root=False,
|
||||
order="breadth",
|
||||
@@ -155,7 +155,7 @@ def is_installed(spec):
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
return [spec for spec in all_specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import argparse
|
||||
import collections
|
||||
import io
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
@@ -801,17 +801,17 @@ def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target")
|
||||
operating_system = host_platform.default_operating_system()
|
||||
target = host_platform.default_target()
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = spack.platforms.host().default_target()
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
operating_system = host_platform.default_operating_system()
|
||||
return operating_system, target
|
||||
|
||||
|
||||
|
@@ -37,13 +37,12 @@ def enable_compiler_existence_check():
|
||||
|
||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
|
||||
|
||||
def concretize_specs_together(
|
||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
||||
) -> Sequence[Spec]:
|
||||
def _concretize_specs_together(
|
||||
abstract_specs: Sequence[Spec], tests: TestsType = False
|
||||
) -> List[Spec]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
@@ -51,11 +50,10 @@ def concretize_specs_together(
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
from spack.solver.asp import Solver
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||
result = Solver().solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
@@ -72,7 +70,7 @@ def concretize_together(
|
||||
"""
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
||||
concrete_specs = _concretize_specs_together(to_concretize, tests=tests)
|
||||
return list(zip(abstract_specs, concrete_specs))
|
||||
|
||||
|
||||
@@ -90,7 +88,7 @@ def concretize_together_when_possible(
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
from spack.solver.asp import Solver
|
||||
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
old_concrete_to_abstract = {
|
||||
@@ -98,9 +96,8 @@ def concretize_together_when_possible(
|
||||
}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
for result in Solver().solve_in_rounds(
|
||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
@@ -124,7 +121,7 @@ def concretize_separately(
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
from spack.bootstrap import ensure_bootstrap_configuration, ensure_clingo_importable_or_raise
|
||||
|
||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||
args = [
|
||||
@@ -134,8 +131,8 @@ def concretize_separately(
|
||||
]
|
||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
with ensure_bootstrap_configuration():
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -190,10 +187,52 @@ def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int,
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
spec = concretize_one(Spec(spec_str), tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
|
||||
"""Return a concretized copy of the given spec.
|
||||
|
||||
Args:
|
||||
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
||||
the packages in the list, if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
from spack.solver.asp import Solver, SpecBuilder
|
||||
|
||||
if isinstance(spec, str):
|
||||
spec = Spec(spec)
|
||||
spec = spec.lookup_hash()
|
||||
|
||||
if spec.concrete:
|
||||
return spec.copy()
|
||||
|
||||
for node in spec.traverse():
|
||||
if not node.name:
|
||||
raise spack.error.SpecError(
|
||||
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
||||
)
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
result = Solver().solve([spec], tests=tests, allow_deprecated=allow_deprecated)
|
||||
|
||||
# take the best answer
|
||||
opt, i, answer = min(result.answers)
|
||||
name = spec.name
|
||||
# TODO: Consolidate this code with similar code in solve.py
|
||||
if spec.virtual:
|
||||
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
||||
name = providers[0]
|
||||
|
||||
node = SpecBuilder.make_node(pkg=name)
|
||||
assert (
|
||||
node in answer
|
||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||
|
||||
concretized = answer[node]
|
||||
return concretized
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
|
@@ -36,6 +36,8 @@
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
|
||||
import jsonschema
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.error
|
||||
@@ -51,6 +53,7 @@
|
||||
import spack.schema.definitions
|
||||
import spack.schema.develop
|
||||
import spack.schema.env
|
||||
import spack.schema.env_vars
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.modules
|
||||
import spack.schema.packages
|
||||
@@ -68,6 +71,7 @@
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"definitions": spack.schema.definitions.schema,
|
||||
"env_vars": spack.schema.env_vars.schema,
|
||||
"view": spack.schema.view.schema,
|
||||
"develop": spack.schema.develop.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -1048,8 +1052,6 @@ def validate(
|
||||
This leverages the line information (start_mark, end_mark) stored
|
||||
on Spack YAML structures.
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
try:
|
||||
spack.schema.Validator(schema).validate(data)
|
||||
except jsonschema.ValidationError as e:
|
||||
|
@@ -6,6 +6,8 @@
|
||||
"""
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.schema.env as env
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -30,8 +32,6 @@ def validate(configuration_file):
|
||||
Returns:
|
||||
A sanitized copy of the configuration stored in the input file
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
with open(configuration_file, encoding="utf-8") as f:
|
||||
config = syaml.load(f)
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Manages the details on the images used in the various stages."""
|
||||
import json
|
||||
import os.path
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
|
@@ -9,6 +9,8 @@
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import jsonschema
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.schema.env
|
||||
@@ -188,8 +190,6 @@ def paths(self):
|
||||
@tengine.context_property
|
||||
def manifest(self):
|
||||
"""The spack.yaml file that should be used in the image"""
|
||||
import jsonschema
|
||||
|
||||
# Copy in the part of spack.yaml prescribed in the configuration file
|
||||
manifest = copy.deepcopy(self.config)
|
||||
manifest.pop("container")
|
||||
|
@@ -123,6 +123,15 @@
|
||||
"deprecated_for",
|
||||
)
|
||||
|
||||
#: File where the database is written
|
||||
INDEX_JSON_FILE = "index.json"
|
||||
|
||||
# Verifier file to check last modification of the DB
|
||||
_INDEX_VERIFIER_FILE = "index_verifier"
|
||||
|
||||
# Lockfile for the database
|
||||
_LOCK_FILE = "lock"
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _getfqdn():
|
||||
@@ -260,7 +269,7 @@ class ForbiddenLockError(SpackError):
|
||||
|
||||
class ForbiddenLock:
|
||||
def __getattr__(self, name):
|
||||
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
|
||||
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
|
||||
|
||||
def __reduce__(self):
|
||||
return ForbiddenLock, tuple()
|
||||
@@ -419,14 +428,25 @@ class FailureTracker:
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
#: root directory of the failure tracker
|
||||
dir: pathlib.Path
|
||||
|
||||
#: File for locking particular concrete spec hashes
|
||||
locker: SpecLocker
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def _ensure_parent_directories(self) -> None:
|
||||
"""Ensure that parent directories of the FailureTracker exist.
|
||||
|
||||
Accesses the filesystem only once, the first time it's called on a given FailureTracker.
|
||||
"""
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
@@ -469,13 +489,18 @@ def clear_all(self) -> None:
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
marks = os.listdir(str(self.dir))
|
||||
except FileNotFoundError:
|
||||
return # directory doesn't exist yet
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
return
|
||||
|
||||
for fail_mark in marks:
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
@@ -483,6 +508,8 @@ def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
@@ -567,17 +594,13 @@ def __init__(
|
||||
Relevant only if the repository is not an upstream.
|
||||
"""
|
||||
self.root = root
|
||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||
self.database_directory = pathlib.Path(self.root) / _DB_DIRNAME
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
self._index_path = self.database_directory / INDEX_JSON_FILE
|
||||
self._verifier_path = self.database_directory / _INDEX_VERIFIER_FILE
|
||||
self._lock_path = self.database_directory / _LOCK_FILE
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
@@ -592,14 +615,14 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
self.lock = ForbiddenLock()
|
||||
else:
|
||||
self.lock = lk.Lock(
|
||||
self._lock_path,
|
||||
str(self._lock_path),
|
||||
default_timeout=self.db_lock_timeout,
|
||||
desc="database",
|
||||
enable=lock_cfg.enable,
|
||||
@@ -616,6 +639,11 @@ def __init__(
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
|
||||
def _ensure_parent_directories(self):
|
||||
"""Create the parent directory for the DB, if necessary."""
|
||||
if not self.is_upstream:
|
||||
self.database_directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
|
||||
@@ -630,6 +658,8 @@ def _write_to_file(self, stream):
|
||||
|
||||
This function does not do any locking or transactions.
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# map from per-spec hash code to installation record.
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
|
||||
@@ -759,7 +789,7 @@ def _read_from_file(self, filename):
|
||||
Does not do any locking.
|
||||
"""
|
||||
try:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
with open(str(filename), "r", encoding="utf-8") as f:
|
||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||
except Exception as e:
|
||||
@@ -860,11 +890,13 @@ def reindex(self):
|
||||
if self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
||||
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Special transaction to avoid recursive reindex calls and to
|
||||
# ignore errors if we need to rebuild a corrupt database.
|
||||
def _read_suppress_error():
|
||||
try:
|
||||
if os.path.isfile(self._index_path):
|
||||
if self._index_path.is_file():
|
||||
self._read_from_file(self._index_path)
|
||||
except CorruptDatabaseError as e:
|
||||
tty.warn(f"Reindexing corrupt database, error was: {e}")
|
||||
@@ -1007,7 +1039,7 @@ def _check_ref_counts(self):
|
||||
% (key, found, expected, self._index_path)
|
||||
)
|
||||
|
||||
def _write(self, type, value, traceback):
|
||||
def _write(self, type=None, value=None, traceback=None):
|
||||
"""Write the in-memory database index to its file path.
|
||||
|
||||
This is a helper function called by the WriteTransaction context
|
||||
@@ -1018,6 +1050,8 @@ def _write(self, type, value, traceback):
|
||||
|
||||
This routine does no locking.
|
||||
"""
|
||||
self._ensure_parent_directories()
|
||||
|
||||
# Do not write if exceptions were raised
|
||||
if type is not None:
|
||||
# A failure interrupted a transaction, so we should record that
|
||||
@@ -1026,16 +1060,16 @@ def _write(self, type, value, traceback):
|
||||
self._state_is_inconsistent = True
|
||||
return
|
||||
|
||||
temp_file = self._index_path + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||
temp_file = str(self._index_path) + (".%s.%s.temp" % (_getfqdn(), os.getpid()))
|
||||
|
||||
# Write a temporary database file them move it into place
|
||||
try:
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
self._write_to_file(f)
|
||||
fs.rename(temp_file, self._index_path)
|
||||
fs.rename(temp_file, str(self._index_path))
|
||||
|
||||
if _use_uuid:
|
||||
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
||||
with self._verifier_path.open("w", encoding="utf-8") as f:
|
||||
new_verifier = str(uuid.uuid4())
|
||||
f.write(new_verifier)
|
||||
self.last_seen_verifier = new_verifier
|
||||
@@ -1048,11 +1082,11 @@ def _write(self, type, value, traceback):
|
||||
|
||||
def _read(self):
|
||||
"""Re-read Database from the data in the set location. This does no locking."""
|
||||
if os.path.isfile(self._index_path):
|
||||
if self._index_path.is_file():
|
||||
current_verifier = ""
|
||||
if _use_uuid:
|
||||
try:
|
||||
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
||||
with self._verifier_path.open("r", encoding="utf-8") as f:
|
||||
current_verifier = f.read()
|
||||
except BaseException:
|
||||
pass
|
||||
@@ -1065,7 +1099,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
tty.warn("upstream not found: {0}".format(self._index_path))
|
||||
tty.warn(f"upstream not found: {self._index_path}")
|
||||
|
||||
def _add(
|
||||
self,
|
||||
@@ -1330,7 +1364,7 @@ def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> N
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: str = "children",
|
||||
direction: tr.DirectionType = "children",
|
||||
transitive: bool = True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
@@ -1681,7 +1715,7 @@ def query(
|
||||
)
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
results.sort()
|
||||
results.sort() # type: ignore[call-overload]
|
||||
return results
|
||||
|
||||
def query_one(
|
||||
|
@@ -15,7 +15,6 @@
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
|
@@ -32,7 +32,7 @@ class OpenMpi(Package):
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import readlink
|
||||
@@ -17,7 +17,6 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.projections
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
@@ -69,10 +68,9 @@ def specs_from_metadata_dirs(root: str) -> List["spack.spec.Spec"]:
|
||||
|
||||
|
||||
class DirectoryLayout:
|
||||
"""A directory layout is used to associate unique paths with specs.
|
||||
Different installations are going to want different layouts for their
|
||||
install, and they can use this to customize the nesting structure of
|
||||
spack installs. The default layout is:
|
||||
"""A directory layout is used to associate unique paths with specs. Different installations are
|
||||
going to want different layouts for their install, and they can use this to customize the
|
||||
nesting structure of spack installs. The default layout is:
|
||||
|
||||
* <install root>/
|
||||
|
||||
@@ -82,35 +80,30 @@ class DirectoryLayout:
|
||||
|
||||
* <name>-<version>-<hash>
|
||||
|
||||
The hash here is a SHA-1 hash for the full DAG plus the build
|
||||
spec.
|
||||
The installation directory projections can be modified with the projections argument."""
|
||||
|
||||
The installation directory projections can be modified with the
|
||||
projections argument.
|
||||
"""
|
||||
|
||||
def __init__(self, root, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
root,
|
||||
*,
|
||||
projections: Optional[Dict[str, str]] = None,
|
||||
hash_length: Optional[int] = None,
|
||||
) -> None:
|
||||
self.root = root
|
||||
self.check_upstream = True
|
||||
projections = kwargs.get("projections") or default_projections
|
||||
self.projections = dict(
|
||||
(key, projection.lower()) for key, projection in projections.items()
|
||||
)
|
||||
projections = projections or default_projections
|
||||
self.projections = {key: projection.lower() for key, projection in projections.items()}
|
||||
|
||||
# apply hash length as appropriate
|
||||
self.hash_length = kwargs.get("hash_length", None)
|
||||
self.hash_length = hash_length
|
||||
if self.hash_length is not None:
|
||||
for when_spec, projection in self.projections.items():
|
||||
if "{hash}" not in projection:
|
||||
if "{hash" in projection:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash" " length"
|
||||
)
|
||||
else:
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Cannot specify hash length when the hash is not"
|
||||
" part of all install_tree projections"
|
||||
)
|
||||
raise InvalidDirectoryLayoutParametersError(
|
||||
"Conflicting options for installation layout hash length"
|
||||
if "{hash" in projection
|
||||
else "Cannot specify hash length when the hash is not part of all "
|
||||
"install_tree projections"
|
||||
)
|
||||
self.projections[when_spec] = projection.replace(
|
||||
"{hash}", "{hash:%d}" % self.hash_length
|
||||
)
|
||||
@@ -279,13 +272,6 @@ def path_for_spec(self, spec):
|
||||
|
||||
if spec.external:
|
||||
return spec.external_path
|
||||
if self.check_upstream:
|
||||
upstream, record = spack.store.STORE.db.query_by_spec_hash(spec.dag_hash())
|
||||
if upstream:
|
||||
raise SpackError(
|
||||
"Internal error: attempted to call path_for_spec on"
|
||||
" upstream-installed package."
|
||||
)
|
||||
|
||||
path = self.relative_path_for_spec(spec)
|
||||
assert not path.startswith(self.root)
|
||||
|
@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
|
||||
# Check if the target path lexists
|
||||
try:
|
||||
st = os.lstat(new_root)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
return
|
||||
|
||||
# Empty directories are fine
|
||||
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
):
|
||||
try:
|
||||
shutil.rmtree(old_root)
|
||||
except (IOError, OSError) as e:
|
||||
except OSError as e:
|
||||
msg = "Failed to remove old view at %s\n" % old_root
|
||||
msg += str(e)
|
||||
tty.warn(msg)
|
||||
@@ -1427,7 +1427,7 @@ def is_develop(self, spec):
|
||||
"""Returns true when the spec is built from local sources"""
|
||||
return spec.name in self.dev_specs
|
||||
|
||||
def concretize(self, force=False, tests=False):
|
||||
def concretize(self, tests=False):
|
||||
"""Concretize user_specs in this environment.
|
||||
|
||||
Only concretizes specs that haven't been concretized yet unless
|
||||
@@ -1437,8 +1437,6 @@ def concretize(self, force=False, tests=False):
|
||||
write out a lockfile containing concretized specs.
|
||||
|
||||
Arguments:
|
||||
force (bool): re-concretize ALL specs, even those that were
|
||||
already concretized
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
|
||||
@@ -1446,7 +1444,7 @@ def concretize(self, force=False, tests=False):
|
||||
List of specs that have been concretized. Each entry is a tuple of
|
||||
the user spec and the corresponding concretized spec.
|
||||
"""
|
||||
if force:
|
||||
if spack.config.get("concretizer:force", False):
|
||||
# Clear previously concretized specs
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
@@ -2554,7 +2552,7 @@ def is_latest_format(manifest):
|
||||
try:
|
||||
with open(manifest, encoding="utf-8") as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
return True
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
@@ -2634,6 +2632,32 @@ def _ensure_env_dir():
|
||||
|
||||
shutil.copy(envfile, target_manifest)
|
||||
|
||||
# Copy relative path includes that live inside the environment dir
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(environment_dir)
|
||||
except Exception:
|
||||
# error handling for bad manifests is handled on other code paths
|
||||
return
|
||||
|
||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
for include in includes:
|
||||
if os.path.isabs(include):
|
||||
continue
|
||||
|
||||
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||
if common_path != environment_dir:
|
||||
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
||||
continue
|
||||
|
||||
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||
if not os.path.exists(orig_abspath):
|
||||
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
||||
continue
|
||||
|
||||
fs.touchp(abspath)
|
||||
shutil.copy(orig_abspath, abspath)
|
||||
|
||||
|
||||
class EnvironmentManifestFile(collections.abc.Mapping):
|
||||
"""Manages the in-memory representation of a manifest file, and its synchronization
|
||||
|
@@ -10,6 +10,7 @@
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.store
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
@@ -156,6 +157,11 @@ def activate(
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
|
||||
env_vars_yaml = env.manifest.configuration.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml))
|
||||
|
||||
try:
|
||||
if view and env.has_view(view):
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
@@ -189,6 +195,10 @@ def deactivate() -> EnvironmentModifications:
|
||||
if active is None:
|
||||
return env_mods
|
||||
|
||||
env_vars_yaml = active.manifest.configuration.get("env_vars", None)
|
||||
if env_vars_yaml:
|
||||
env_mods.extend(spack.schema.environment.parse(env_vars_yaml).reversed())
|
||||
|
||||
active_view = os.getenv(ev.spack_env_view_var)
|
||||
|
||||
if active_view and active.has_view(active_view):
|
||||
|
@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
||||
if name == target_name:
|
||||
return path
|
||||
else:
|
||||
raise IOError('extension "{0}" not found'.format(target_name))
|
||||
raise OSError('extension "{0}" not found'.format(target_name))
|
||||
|
||||
|
||||
def get_module(cmd_name):
|
||||
|
@@ -25,7 +25,6 @@
|
||||
import functools
|
||||
import http.client
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import urllib.error
|
||||
@@ -321,9 +320,15 @@ def _fetch_urllib(self, url):
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
tty.msg(f"Fetching {url}")
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -331,14 +336,6 @@ def _fetch_urllib(self, url):
|
||||
os.remove(save_file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
# Save the redirected URL for error messages. Sometimes we're redirected to an arbitrary
|
||||
# mirror that is broken, leading to spurious download failures. In that case it's helpful
|
||||
# for users to know which URL was actually fetched.
|
||||
@@ -535,11 +532,16 @@ def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
file = self.stage.save_filename
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
tty.msg(f"Fetching {self.url}")
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
except OSError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
@@ -547,12 +549,6 @@ def fetch(self):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
|
||||
with open(file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
"""Superclass for version control system fetch strategies.
|
||||
|
@@ -35,7 +35,6 @@
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.paths
|
||||
import spack.projections
|
||||
import spack.relocate
|
||||
import spack.schema.projections
|
||||
@@ -44,7 +43,6 @@
|
||||
import spack.util.spack_json as s_json
|
||||
import spack.util.spack_yaml as s_yaml
|
||||
from spack.error import SpackError
|
||||
from spack.hooks import sbang
|
||||
|
||||
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
||||
|
||||
@@ -91,16 +89,10 @@ def view_copy(
|
||||
if stat.S_ISLNK(src_stat.st_mode):
|
||||
spack.relocate.relocate_links(links=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
elif spack.relocate.is_binary(dst):
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.STORE.layout.root] = view._root
|
||||
|
||||
# This is vestigial code for the *old* location of sbang.
|
||||
prefix_to_projection[f"#!/bin/bash {spack.paths.spack_root}/bin/sbang"] = (
|
||||
sbang.sbang_shebang_line()
|
||||
)
|
||||
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text(files=[dst], prefix_to_prefix=prefix_to_projection)
|
||||
|
||||
# The os module on Windows does not have a chown function.
|
||||
if sys.platform != "win32":
|
||||
@@ -435,7 +427,7 @@ def needs_file(spec, file):
|
||||
try:
|
||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||
manifest = s_json.load(f)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
# if we can't load it, assume it doesn't know about the file.
|
||||
manifest = {}
|
||||
return test_path in manifest
|
||||
@@ -839,7 +831,7 @@ def get_spec_from_file(filename):
|
||||
try:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
return spack.spec.Spec.from_yaml(f)
|
||||
except IOError:
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
|
@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
|
||||
with open(filepath, "rb") as f:
|
||||
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
||||
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
||||
except (IOError, OSError, ElfParsingError):
|
||||
except (OSError, ElfParsingError):
|
||||
return False
|
||||
|
||||
|
||||
|
@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
|
||||
# Only look at executable, non-symlink files.
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
||||
|
@@ -275,7 +275,7 @@ def _do_fake_install(pkg: "spack.package_base.PackageBase") -> None:
|
||||
fs.mkdirp(pkg.prefix.bin)
|
||||
fs.touch(os.path.join(pkg.prefix.bin, command))
|
||||
if sys.platform != "win32":
|
||||
chmod = which("chmod")
|
||||
chmod = which("chmod", required=True)
|
||||
chmod("+x", os.path.join(pkg.prefix.bin, command))
|
||||
|
||||
# Install fake header file
|
||||
@@ -539,7 +539,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Note that we copy them in as they are in the *install* directory
|
||||
# NOT as they are in the repository, because we want a snapshot of
|
||||
# how *this* particular build was done.
|
||||
for node in spec.traverse(deptype=all):
|
||||
for node in spec.traverse(deptype="all"):
|
||||
if node is not spec:
|
||||
# Locate the dependency package in the install tree and find
|
||||
# its provenance information.
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import io
|
||||
import operator
|
||||
import os
|
||||
import os.path
|
||||
import pstats
|
||||
import re
|
||||
import shlex
|
||||
@@ -164,7 +163,7 @@ def format_help_sections(self, level):
|
||||
# lazily add all commands to the parser when needed.
|
||||
add_all_commands(self)
|
||||
|
||||
"""Print help on subcommands in neatly formatted sections."""
|
||||
# Print help on subcommands in neatly formatted sections.
|
||||
formatter = self._get_formatter()
|
||||
|
||||
# Create a list of subcommand actions. Argparse internals are nasty!
|
||||
@@ -503,7 +502,7 @@ def make_argument_parser(**kwargs):
|
||||
return parser
|
||||
|
||||
|
||||
def send_warning_to_tty(message, category, filename, lineno, file=None, line=None):
|
||||
def showwarning(message, category, filename, lineno, file=None, line=None):
|
||||
"""Redirects messages to tty.warn."""
|
||||
if category is spack.error.SpackAPIWarning:
|
||||
tty.warn(f"{filename}:{lineno}: {message}")
|
||||
@@ -513,9 +512,6 @@ def send_warning_to_tty(message, category, filename, lineno, file=None, line=Non
|
||||
|
||||
def setup_main_options(args):
|
||||
"""Configure spack globals based on the basic options."""
|
||||
# Assign a custom function to show warnings
|
||||
warnings.showwarning = send_warning_to_tty
|
||||
|
||||
# Set up environment based on args.
|
||||
tty.set_verbose(args.verbose)
|
||||
tty.set_debug(args.debug)
|
||||
@@ -732,7 +728,7 @@ def _compatible_sys_types():
|
||||
with the current host.
|
||||
"""
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = str(host_platform.operating_system("default_os"))
|
||||
host_os = str(host_platform.default_operating_system())
|
||||
host_target = archspec.cpu.host()
|
||||
compatible_targets = [host_target] + host_target.ancestors
|
||||
|
||||
@@ -906,9 +902,10 @@ def _main(argv=None):
|
||||
# main() is tricky to get right, so be careful where you put things.
|
||||
#
|
||||
# Things in this first part of `main()` should *not* require any
|
||||
# configuration. This doesn't include much -- setting up th parser,
|
||||
# configuration. This doesn't include much -- setting up the parser,
|
||||
# restoring some key environment variables, very simple CLI options, etc.
|
||||
# ------------------------------------------------------------------------
|
||||
warnings.showwarning = showwarning
|
||||
|
||||
# Create a parser with a simple positional argument first. We'll
|
||||
# lazily load the subcommand(s) we need later. This allows us to
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
from typing import Optional
|
||||
|
||||
import llnl.url
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import os.path
|
||||
import traceback
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
@@ -31,7 +31,7 @@
|
||||
import copy
|
||||
import datetime
|
||||
import inspect
|
||||
import os.path
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
from typing import List, Optional
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import os.path
|
||||
import os
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
@@ -5,7 +5,7 @@
|
||||
"""This module implements the classes necessary to generate Tcl
|
||||
non-hierarchical modules.
|
||||
"""
|
||||
import os.path
|
||||
import os
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import spack.config
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import base64
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
import time
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
@@ -382,6 +383,7 @@ def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.ProxyHandler(),
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
@@ -410,7 +412,7 @@ def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except (urllib.error.URLError, TimeoutError) as e:
|
||||
except OSError as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
@@ -420,9 +422,10 @@ def wrapper(*args, **kwargs):
|
||||
and (500 <= e.code < 600 or e.code == 429)
|
||||
)
|
||||
or (
|
||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||
isinstance(e, urllib.error.URLError)
|
||||
and isinstance(e.reason, socket.timeout)
|
||||
)
|
||||
or isinstance(e, TimeoutError)
|
||||
or isinstance(e, socket.timeout)
|
||||
):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user