Compare commits
484 Commits
v0.23.1
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
05de2b1ba1 | ||
![]() |
f57d7fb952 | ||
![]() |
e76f8fda2d | ||
![]() |
66a00fe103 | ||
![]() |
15124d7c73 | ||
![]() |
a14f10e882 | ||
![]() |
16d3de4540 | ||
![]() |
78bd905fe0 | ||
![]() |
e9cdcc4af0 | ||
![]() |
aca469b329 | ||
![]() |
c65a0e03c4 | ||
![]() |
b02340724d | ||
![]() |
250de0cdf2 | ||
![]() |
da97e9eaa7 | ||
![]() |
a3322459fe | ||
![]() |
88d5393570 | ||
![]() |
d1d61820f4 | ||
![]() |
858c7ca1a2 | ||
![]() |
5b1394f1cf | ||
![]() |
9554f8802b | ||
![]() |
b78c517582 | ||
![]() |
aaf0319731 | ||
![]() |
21d5fd6ec1 | ||
![]() |
d6f71a2afd | ||
![]() |
2edbed3a9d | ||
![]() |
e0035bd658 | ||
![]() |
64207e8fe8 | ||
![]() |
fdc85572f3 | ||
![]() |
75162be4b6 | ||
![]() |
adbbb91b41 | ||
![]() |
55eeff7eb0 | ||
![]() |
6de1ebd71a | ||
![]() |
fd865efe87 | ||
![]() |
93c09ed3b4 | ||
![]() |
9db8f8ea88 | ||
![]() |
eb178e6840 | ||
![]() |
8487842e11 | ||
![]() |
2286b2ad5a | ||
![]() |
ea0d99baf8 | ||
![]() |
60be9ea068 | ||
![]() |
5640861aeb | ||
![]() |
d8fa6eb559 | ||
![]() |
ec7436be6b | ||
![]() |
48f6a4ffb4 | ||
![]() |
96a0b0eb08 | ||
![]() |
8d8e36d7e2 | ||
![]() |
1c843b99ae | ||
![]() |
93a0c0eafd | ||
![]() |
0850e0bf08 | ||
![]() |
6263f75303 | ||
![]() |
c184a68512 | ||
![]() |
69b17ea602 | ||
![]() |
5547b7b552 | ||
![]() |
ae6d1538d5 | ||
![]() |
cdb0e80598 | ||
![]() |
233e57c4bc | ||
![]() |
918afd6385 | ||
![]() |
83af81a14a | ||
![]() |
2b2538e82c | ||
![]() |
b6715bde32 | ||
![]() |
0db3b36874 | ||
![]() |
0bc54a4640 | ||
![]() |
7057ca3c0c | ||
![]() |
40ac1613e3 | ||
![]() |
d3ab84e5d8 | ||
![]() |
15197b1868 | ||
![]() |
de45c90056 | ||
![]() |
82fc0c702d | ||
![]() |
51e889ea3f | ||
![]() |
ad8d1eddde | ||
![]() |
ebb3736de7 | ||
![]() |
4d7a637788 | ||
![]() |
8e163c3565 | ||
![]() |
f1fbf11b33 | ||
![]() |
be3a33ecf7 | ||
![]() |
4be528448c | ||
![]() |
8b11918c1e | ||
![]() |
5add010c71 | ||
![]() |
e77e1d6528 | ||
![]() |
6ede4e9f13 | ||
![]() |
c50ac5ac25 | ||
![]() |
e7e5352e93 | ||
![]() |
36e74f360b | ||
![]() |
f362d45802 | ||
![]() |
9719220e8a | ||
![]() |
30e2b15eea | ||
![]() |
7ee1e518b0 | ||
![]() |
4af8fbeddf | ||
![]() |
2b85b706f1 | ||
![]() |
eadf8727e7 | ||
![]() |
de739db153 | ||
![]() |
a3bed44bf5 | ||
![]() |
3da04ccb19 | ||
![]() |
f921b28032 | ||
![]() |
3d50d7173d | ||
![]() |
5a5f555fe2 | ||
![]() |
bb30c726a4 | ||
![]() |
0894180cc1 | ||
![]() |
f211e2f9c4 | ||
![]() |
f04ea573fa | ||
![]() |
364f70c16d | ||
![]() |
5da1adad3a | ||
![]() |
dfb529fc6e | ||
![]() |
6e2625ae65 | ||
![]() |
7f24b11675 | ||
![]() |
bb9bb905a0 | ||
![]() |
60b4882d4e | ||
![]() |
19734832eb | ||
![]() |
51fb1ed05b | ||
![]() |
69faa41c3f | ||
![]() |
72ef5b9010 | ||
![]() |
795809f31b | ||
![]() |
5db597ff87 | ||
![]() |
b54227d5e9 | ||
![]() |
94cf51875f | ||
![]() |
2f6e30fd24 | ||
![]() |
06eae96ef9 | ||
![]() |
557083c33b | ||
![]() |
f6ab2f5b99 | ||
![]() |
6005813518 | ||
![]() |
1df506959e | ||
![]() |
0d0ff44e3e | ||
![]() |
f4bfeb7ed8 | ||
![]() |
a16350df69 | ||
![]() |
a2981cff1f | ||
![]() |
d2372f8eee | ||
![]() |
c310c2911a | ||
![]() |
d68747912d | ||
![]() |
107e4515bd | ||
![]() |
af6526bb82 | ||
![]() |
dd8dff7872 | ||
![]() |
82d4b391bf | ||
![]() |
a07e372770 | ||
![]() |
d35202d83e | ||
![]() |
1c1d439a01 | ||
![]() |
d52be82c06 | ||
![]() |
2a0fc464c9 | ||
![]() |
cd26331b19 | ||
![]() |
f5934db96b | ||
![]() |
11b86ca75c | ||
![]() |
0c2b546825 | ||
![]() |
ef615bcc7e | ||
![]() |
0f21f24356 | ||
![]() |
e59ee0768f | ||
![]() |
6cbd9dcf13 | ||
![]() |
92dbb55703 | ||
![]() |
e84631473c | ||
![]() |
c213a8c2a7 | ||
![]() |
526af1cbe7 | ||
![]() |
334a8b0991 | ||
![]() |
1581922c9e | ||
![]() |
9cd2f0a536 | ||
![]() |
687766b8ab | ||
![]() |
396a701860 | ||
![]() |
7105cc8c01 | ||
![]() |
0ce38ed109 | ||
![]() |
c548bcc9ef | ||
![]() |
f018e0fe42 | ||
![]() |
9aefbb0e96 | ||
![]() |
9265991767 | ||
![]() |
25cfea48f3 | ||
![]() |
fc4316cafa | ||
![]() |
de1416b3de | ||
![]() |
ba52c4f05d | ||
![]() |
501ee68606 | ||
![]() |
283eaaf323 | ||
![]() |
a3543008d9 | ||
![]() |
f760e16688 | ||
![]() |
e9d2732e00 | ||
![]() |
03525528d6 | ||
![]() |
a3985e7538 | ||
![]() |
ae28528ec7 | ||
![]() |
cb8880b388 | ||
![]() |
316dcc1609 | ||
![]() |
84ea7dbddf | ||
![]() |
b6e4ff0242 | ||
![]() |
c23ffbbd7a | ||
![]() |
accd3ca860 | ||
![]() |
d47629a521 | ||
![]() |
7bb6c9b828 | ||
![]() |
7e5b5f8c57 | ||
![]() |
3a1c0f5c5f | ||
![]() |
b50dbb8604 | ||
![]() |
30c00353d4 | ||
![]() |
466c3abaeb | ||
![]() |
478647f873 | ||
![]() |
15f3851a92 | ||
![]() |
5232ee1ed1 | ||
![]() |
855943ff29 | ||
![]() |
449a462cde | ||
![]() |
f3c6f00cc1 | ||
![]() |
42333ad66e | ||
![]() |
36f3566257 | ||
![]() |
24fc720c0b | ||
![]() |
fe0f4c1815 | ||
![]() |
d68462ae8e | ||
![]() |
0189e92329 | ||
![]() |
8d83baa35e | ||
![]() |
12dd1208f3 | ||
![]() |
728c5e0e9d | ||
![]() |
c3e92a3d01 | ||
![]() |
49efa711d0 | ||
![]() |
ab4a645cbe | ||
![]() |
7c74247f23 | ||
![]() |
728f13d4b2 | ||
![]() |
4d6347c99c | ||
![]() |
b2a86fcaba | ||
![]() |
da83ab35e8 | ||
![]() |
9cb2070eeb | ||
![]() |
a72490fc91 | ||
![]() |
f15e5f7163 | ||
![]() |
fc105a1a26 | ||
![]() |
8a9e16dc3b | ||
![]() |
0b7fc360fa | ||
![]() |
79d79969bb | ||
![]() |
422f829e4e | ||
![]() |
f54c101b44 | ||
![]() |
05acd29f38 | ||
![]() |
77e2187e13 | ||
![]() |
5c88e035f2 | ||
![]() |
94bd7b9afb | ||
![]() |
f181ac199a | ||
![]() |
a8da7993ad | ||
![]() |
b808338792 | ||
![]() |
112e47cc23 | ||
![]() |
901cea7a54 | ||
![]() |
c1b2ac549d | ||
![]() |
4693b323ac | ||
![]() |
1f2a68f2b6 | ||
![]() |
3fcc38ef04 | ||
![]() |
22d104d7a9 | ||
![]() |
8b1009a4a0 | ||
![]() |
f54526957a | ||
![]() |
175a4bf101 | ||
![]() |
aa81d59958 | ||
![]() |
6aafefd43d | ||
![]() |
ac82f344bd | ||
![]() |
16fd77f9da | ||
![]() |
f82554a39b | ||
![]() |
2aaf50b8f7 | ||
![]() |
b0b9cf15f7 | ||
![]() |
8898e14e69 | ||
![]() |
63c72634ea | ||
![]() |
a7eacd77e3 | ||
![]() |
09b7ea0400 | ||
![]() |
b31dd46ab8 | ||
![]() |
ad7417dee9 | ||
![]() |
c3de3b0b6f | ||
![]() |
6da9bf226a | ||
![]() |
b3ee954e5b | ||
![]() |
db090b0cad | ||
![]() |
3a6c361a85 | ||
![]() |
bb5bd030d4 | ||
![]() |
b9c60f96ea | ||
![]() |
6b16c64c0e | ||
![]() |
3ea970746d | ||
![]() |
d8f2e080e6 | ||
![]() |
ecb8a48376 | ||
![]() |
30176582e4 | ||
![]() |
ac17e8bea4 | ||
![]() |
c30c85a99c | ||
![]() |
2ae8eb6686 | ||
![]() |
b5cc5b701c | ||
![]() |
8e7641e584 | ||
![]() |
e692d401eb | ||
![]() |
99319b1d91 | ||
![]() |
839ed9447c | ||
![]() |
8e5a040985 | ||
![]() |
5ddbb1566d | ||
![]() |
eb17680d28 | ||
![]() |
f4d81be9cf | ||
![]() |
ea5ffe35f5 | ||
![]() |
1e37a77e72 | ||
![]() |
29427d3e9e | ||
![]() |
2a2d1989c1 | ||
![]() |
c6e292f55f | ||
![]() |
bf5e6b4aaf | ||
![]() |
9760089089 | ||
![]() |
da7c5c551d | ||
![]() |
a575fa8529 | ||
![]() |
39a65d88f6 | ||
![]() |
06ff8c88ac | ||
![]() |
a96b67ce3d | ||
![]() |
67d494fa0b | ||
![]() |
e37e53cfe8 | ||
![]() |
cf31d20d4c | ||
![]() |
b74db341c8 | ||
![]() |
e88a3f6f85 | ||
![]() |
9bd7483e73 | ||
![]() |
04c76fab63 | ||
![]() |
ecbf9fcacf | ||
![]() |
69fb594699 | ||
![]() |
d28614151f | ||
![]() |
f1d6af6c94 | ||
![]() |
192821f361 | ||
![]() |
18790ca397 | ||
![]() |
c22d77a38e | ||
![]() |
d82bdb3bf7 | ||
![]() |
a042bdfe0b | ||
![]() |
60e3e645e8 | ||
![]() |
51785437bc | ||
![]() |
2e8db0815d | ||
![]() |
8a6428746f | ||
![]() |
6b9c099af8 | ||
![]() |
30814fb4e0 | ||
![]() |
3194be2e92 | ||
![]() |
41be2f5899 | ||
![]() |
02af41ebb3 | ||
![]() |
9d33c89030 | ||
![]() |
51ab7bad3b | ||
![]() |
0b094f2473 | ||
![]() |
cd306d0bc6 | ||
![]() |
fdb9cf2412 | ||
![]() |
a546441d2e | ||
![]() |
141cdb6810 | ||
![]() |
f2ab74efe5 | ||
![]() |
38b838e405 | ||
![]() |
c037188b59 | ||
![]() |
0835a3c5f2 | ||
![]() |
38a2f9c2f2 | ||
![]() |
eecd4afe58 | ||
![]() |
83624551e0 | ||
![]() |
741652caa1 | ||
![]() |
8e914308f0 | ||
![]() |
3c220d0989 | ||
![]() |
8094fa1e2f | ||
![]() |
5c67051980 | ||
![]() |
c01fb9a6d2 | ||
![]() |
bf12bb57e7 | ||
![]() |
406c73ae11 | ||
![]() |
3f50ccfcdd | ||
![]() |
9883a2144d | ||
![]() |
94815d2227 | ||
![]() |
a15563f890 | ||
![]() |
ac2ede8d2f | ||
![]() |
b256a7c50d | ||
![]() |
21e10d6d98 | ||
![]() |
ed39967848 | ||
![]() |
eda0c6888e | ||
![]() |
66055f903c | ||
![]() |
a1c57d86c3 | ||
![]() |
9da8dcae97 | ||
![]() |
c93f223a73 | ||
![]() |
f1faf31735 | ||
![]() |
8957ef0df5 | ||
![]() |
347ec87fc5 | ||
![]() |
cd8c46e54e | ||
![]() |
75b03bc12f | ||
![]() |
58511a3352 | ||
![]() |
325873a4c7 | ||
![]() |
9156e4be04 | ||
![]() |
12d3abc736 | ||
![]() |
4208aa6291 | ||
![]() |
0bad754e23 | ||
![]() |
cde2620f41 | ||
![]() |
a35aa038b0 | ||
![]() |
150416919e | ||
![]() |
281c274e0b | ||
![]() |
16e130ece1 | ||
![]() |
7586303fba | ||
![]() |
6501880fbf | ||
![]() |
c76098038c | ||
![]() |
124b616b27 | ||
![]() |
1148c8f195 | ||
![]() |
c57452dd08 | ||
![]() |
a7e57c9a14 | ||
![]() |
85d83f9c26 | ||
![]() |
39a081d7fd | ||
![]() |
71b65bb424 | ||
![]() |
3dcbd118df | ||
![]() |
5dacb774f6 | ||
![]() |
cb3d6549c9 | ||
![]() |
559c2f1eb9 | ||
![]() |
ed1dbea77b | ||
![]() |
6ebafe4631 | ||
![]() |
7f0bb7147d | ||
![]() |
f41b38e93d | ||
![]() |
5fd12b7bea | ||
![]() |
fe746bdebb | ||
![]() |
453af4b9f7 | ||
![]() |
29cf1559cc | ||
![]() |
a9b3e1670b | ||
![]() |
4f9aa6004b | ||
![]() |
aa2c18e4df | ||
![]() |
0ff3e86315 | ||
![]() |
df208c1095 | ||
![]() |
853f70edc8 | ||
![]() |
50970f866e | ||
![]() |
8821300985 | ||
![]() |
adc8e1d996 | ||
![]() |
1e0aac6ac3 | ||
![]() |
99e2313d81 | ||
![]() |
22690a7576 | ||
![]() |
5325cfe865 | ||
![]() |
5333925dd7 | ||
![]() |
2db99e1ff6 | ||
![]() |
68aa712a3e | ||
![]() |
2e71bc640c | ||
![]() |
661f3621a7 | ||
![]() |
f182032337 | ||
![]() |
066666b7b1 | ||
![]() |
73316c3e28 | ||
![]() |
c8e4ae08da | ||
![]() |
44225caade | ||
![]() |
8d325d3e30 | ||
![]() |
d0fd112006 | ||
![]() |
50f43ca71d | ||
![]() |
2546fb6afa | ||
![]() |
10f6863d91 | ||
![]() |
63ea528606 | ||
![]() |
89d2b9553d | ||
![]() |
278326b4d9 | ||
![]() |
43c1a5e0ec | ||
![]() |
8feb506b3a | ||
![]() |
627544191a | ||
![]() |
cf672ea8af | ||
![]() |
2c4ac02adf | ||
![]() |
7f76490b31 | ||
![]() |
46e4c1fd30 | ||
![]() |
85c5533e62 | ||
![]() |
c47cafd11a | ||
![]() |
8e33cc158b | ||
![]() |
f07173e5ee | ||
![]() |
118f5d2683 | ||
![]() |
8fb2abc3cd | ||
![]() |
3bcb8a9236 | ||
![]() |
a6fdd7608f | ||
![]() |
1ffd7125a6 | ||
![]() |
d1166fd316 | ||
![]() |
b8eba1c677 | ||
![]() |
e3c0515076 | ||
![]() |
97406f241c | ||
![]() |
e1dfbbf611 | ||
![]() |
52147348c7 | ||
![]() |
aeb0ab6acf | ||
![]() |
6cd26b7603 | ||
![]() |
1c75d07f05 | ||
![]() |
15b3ff2a0a | ||
![]() |
e9f94d9bf2 | ||
![]() |
299324c7ca | ||
![]() |
dfab174f31 | ||
![]() |
a86953fcb1 | ||
![]() |
5f262eb5d3 | ||
![]() |
00f179ee6d | ||
![]() |
da4f7c2952 | ||
![]() |
fdedb6f95d | ||
![]() |
067fefc46a | ||
![]() |
42c9961bbe | ||
![]() |
fe2bf4c0f9 | ||
![]() |
4d3b85c4d4 | ||
![]() |
f05cbfbf44 | ||
![]() |
448049ccfc | ||
![]() |
e56057fd79 | ||
![]() |
26d80e7bc5 | ||
![]() |
60eb0e9c80 | ||
![]() |
7443a3b572 | ||
![]() |
a5ba4f8d91 | ||
![]() |
6ef0f495a9 | ||
![]() |
e91b8c291a | ||
![]() |
6662046aca | ||
![]() |
db83c62fb1 | ||
![]() |
d4adfda385 | ||
![]() |
e8a8e2d98b | ||
![]() |
55c770c556 | ||
![]() |
33a796801c | ||
![]() |
b90ac6441c | ||
![]() |
68b69aa9e3 | ||
![]() |
ac0ed2c4cc | ||
![]() |
66a93b5433 | ||
![]() |
b7993317ea | ||
![]() |
66622ec4d0 | ||
![]() |
9b2cd1b208 | ||
![]() |
9888683a21 | ||
![]() |
fb46c7a72d | ||
![]() |
c0196cde39 | ||
![]() |
d091172d67 | ||
![]() |
ab51369087 | ||
![]() |
1cea82b629 | ||
![]() |
2abb711337 | ||
![]() |
6f948eb847 | ||
![]() |
93bf0634f3 | ||
![]() |
badb3cedcd | ||
![]() |
be918817d6 | ||
![]() |
41d9f687f6 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -66,7 +66,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
|
12
.github/workflows/bootstrap.yml
vendored
12
.github/workflows/bootstrap.yml
vendored
@@ -161,11 +161,7 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
|
||||
|
||||
windows:
|
||||
runs-on: "windows-latest"
|
||||
@@ -196,9 +192,3 @@ jobs:
|
||||
spack -d gpg list
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
|
17
.github/workflows/build-containers.yml
vendored
17
.github/workflows/build-containers.yml
vendored
@@ -57,7 +57,13 @@ jobs:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
- name: Determine latest release tag
|
||||
id: latest
|
||||
run: |
|
||||
git fetch --quiet --tags
|
||||
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
@@ -71,6 +77,7 @@ jobs:
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
@@ -87,7 +94,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -96,7 +103,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
@@ -113,7 +120,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +133,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
3
.github/workflows/coverage.yml
vendored
3
.github/workflows/coverage.yml
vendored
@@ -29,6 +29,7 @@ jobs:
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
|
||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
|
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241105
|
||||
types-six==1.17.0.20241205
|
||||
vermin==1.6.0
|
||||
|
38
.github/workflows/unit_tests.yaml
vendored
38
.github/workflows/unit_tests.yaml
vendored
@@ -15,17 +15,17 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
on_develop: false
|
||||
os: ubuntu-22.04
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
on_develop: false
|
||||
@@ -52,7 +52,13 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
cmake bison libbison-dev kcov
|
||||
cmake bison libbison-dev subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
@@ -80,7 +86,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -99,7 +105,13 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
@@ -113,7 +125,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
@@ -128,13 +140,13 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
bzip2 curl gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -175,7 +187,7 @@ jobs:
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
@@ -213,7 +225,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
@@ -244,7 +256,7 @@ jobs:
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||
with:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
|
42
.github/workflows/valid-style.yml
vendored
42
.github/workflows/valid-style.yml
vendored
@@ -13,8 +13,7 @@ concurrency:
|
||||
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
# Validate that the code can be run on all the Python versions supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -74,7 +73,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git config --global --add safe.directory '*'
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -87,6 +86,7 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -121,28 +121,46 @@ jobs:
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Import cycles before
|
||||
- name: Problematic imports before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old && cp solution solution.old
|
||||
- name: Import cycles after
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
||||
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
||||
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.13'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pylint
|
||||
- name: Pylint (Spack Core)
|
||||
run: |
|
||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||
|
@@ -102,6 +102,6 @@ PackageName: sbang
|
||||
PackageHomePage: https://github.com/spack/sbang
|
||||
PackageLicenseDeclared: Apache-2.0 OR MIT
|
||||
|
||||
PackageName: six
|
||||
PackageHomePage: https://pypi.python.org/pypi/six
|
||||
PackageLicenseDeclared: MIT
|
||||
PackageName: typing_extensions
|
||||
PackageHomePage: https://pypi.org/project/typing-extensions/
|
||||
PackageLicenseDeclared: Python-2.0
|
||||
|
@@ -70,7 +70,7 @@ Tutorial
|
||||
----------------
|
||||
|
||||
We maintain a
|
||||
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
|
||||
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
|
||||
It covers basic to advanced usage, packaging, developer features, and large HPC
|
||||
deployments. You can do all of the exercises on your own laptop using a
|
||||
Docker container.
|
||||
|
@@ -55,3 +55,11 @@ concretizer:
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
|
||||
timeout: 0
|
||||
# If set to true, exceeding the timeout will always result in a concretization error. If false,
|
||||
# the best (suboptimal) model computed before the timeout is used.
|
||||
#
|
||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
||||
error_on_timeout: true
|
||||
|
@@ -194,6 +194,12 @@ config:
|
||||
# executables with many dependencies, in particular on slow filesystems.
|
||||
bind: false
|
||||
|
||||
# Controls the handling of missing dynamic libraries after installation.
|
||||
# Options are ignore (default), warn, or error. If set to error, the
|
||||
# installation fails if installed binaries reference dynamic libraries that
|
||||
# are not found in their specified rpaths.
|
||||
missing_library_policy: ignore
|
||||
|
||||
|
||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
|
@@ -76,6 +76,8 @@ packages:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
egl:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
|
@@ -265,25 +265,30 @@ infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify a username and password (or personal access token):
|
||||
and optionally specify variables that hold the username and password (or
|
||||
personal access token) for the registry:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
$ spack mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
$ spack mirror add ... my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export REGISTRY_USER=...
|
||||
$ export REGISTRY_TOKEN=...
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
$ spack install <specs...> # or install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
|
@@ -25,6 +25,14 @@ QMake does not appear to have a standardized way of specifying
|
||||
the installation directory, so you may have to set environment
|
||||
variables or edit ``*.pro`` files to get things working properly.
|
||||
|
||||
QMake packages will depend on the virtual ``qmake`` package which
|
||||
is provided by multiple versions of Qt: ``qt`` provides Qt up to
|
||||
Qt5, and ``qt-base`` provides Qt from version Qt6 onwards. This
|
||||
split was motivated by the desire to split the single Qt package
|
||||
into its components to allow for more fine-grained installation.
|
||||
To depend on a specific version, refer to the documentation on
|
||||
:ref:`virtual-dependencies`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
@@ -210,7 +210,7 @@ def setup(sphinx):
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
|
@@ -38,9 +38,11 @@ just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
spack -e . mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image
|
||||
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
@@ -178,8 +178,8 @@ Spec-related modules
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
:mod:`spack.spec_parser`
|
||||
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
|
@@ -1042,7 +1042,7 @@ file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
excludes those built with the GCC compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
@@ -1056,7 +1056,7 @@ directories.
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
exclude: ['%gcc@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
apt install bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -148,20 +148,22 @@ The first time you concretize a spec, Spack will bootstrap automatically:
|
||||
--------------------------------
|
||||
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||
|
||||
The default bootstrap behavior is to use pre-built binaries. You can verify the
|
||||
active bootstrap repositories with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap disable github-actions-v0.4
|
||||
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.3
|
||||
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
$ spack bootstrap disable github-actions-v0.6
|
||||
==> "github-actions-v0.6" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.5
|
||||
==> "github-actions-v0.5" is now disabled and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with ``spack bootstrap list``.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -283,10 +285,6 @@ compilers`` or ``spack compiler list``:
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -806,65 +804,6 @@ flags to the ``icc`` command:
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
PGI
|
||||
^^^
|
||||
|
||||
PGI comes with two sets of compilers for C++ and Fortran,
|
||||
distinguishable by their names. "Old" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||
|
||||
"New" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
|
||||
Older installations of PGI contains just the old compilers; whereas
|
||||
newer installations contain the old and the new. The new compiler is
|
||||
considered preferable, as some packages
|
||||
(``hdf``) will not build with the old compiler.
|
||||
|
||||
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||
find the old compilers, when you really want it to find the new
|
||||
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||
``pgfortran``.
|
||||
|
||||
Other issues:
|
||||
|
||||
* There are reports that some packages will not build with PGI,
|
||||
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||
build these packages with another compiler and then use them as
|
||||
dependencies for PGI-build packages. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||
|
||||
|
||||
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||
information on installation.
|
||||
|
||||
.. note::
|
||||
|
||||
It is believed the problem with HDF 4 is that everything is
|
||||
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||
work.
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
@@ -1389,6 +1328,7 @@ Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Git
|
||||
* 7z
|
||||
|
||||
Optional:
|
||||
* Intel Fortran (needed for some packages)
|
||||
@@ -1454,6 +1394,13 @@ as the project providing Git support on Windows. This is additionally the recomm
|
||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||
utilities vendored by this project.
|
||||
|
||||
"""
|
||||
7zip
|
||||
"""
|
||||
|
||||
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
|
||||
can be located at https://sourceforge.net/projects/sevenzip/.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 2: Install and setup Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -1928,71 +1928,29 @@ to the empty list.
|
||||
String. A URL pointing to license setup instructions for the software.
|
||||
Defaults to the empty string.
|
||||
|
||||
For example, let's take a look at the package for the PGI compilers.
|
||||
For example, let's take a look at the Arm Forge package.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# Licensing
|
||||
license_required = True
|
||||
license_comment = "#"
|
||||
license_files = ["license.dat"]
|
||||
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
|
||||
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
|
||||
license_comment = "#"
|
||||
license_files = ["licences/Licence"]
|
||||
license_vars = [
|
||||
"ALLINEA_LICENSE_DIR",
|
||||
"ALLINEA_LICENCE_DIR",
|
||||
"ALLINEA_LICENSE_FILE",
|
||||
"ALLINEA_LICENCE_FILE",
|
||||
]
|
||||
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
||||
|
||||
As you can see, PGI requires a license. Its license manager, FlexNet, uses
|
||||
the ``#`` symbol to denote a comment. It expects the license file to be
|
||||
named ``license.dat`` and to be located directly in the installation prefix.
|
||||
If you would like the installation file to be located elsewhere, simply set
|
||||
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
|
||||
further instructions on installation and licensing, see the URL provided.
|
||||
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
|
||||
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
|
||||
in the installation prefix.
|
||||
|
||||
Let's walk through a sample PGI installation to see exactly what Spack is
|
||||
and isn't capable of. Since PGI does not provide a download URL, it must
|
||||
be downloaded manually. It can either be added to a mirror or located in
|
||||
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
|
||||
for instructions on setting up a mirror.
|
||||
|
||||
After running ``spack install pgi``, the first thing that will happen is
|
||||
Spack will create a global license file located at
|
||||
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
|
||||
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
|
||||
this:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# A license is required to use pgi.
|
||||
#
|
||||
# The recommended solution is to store your license key in this global
|
||||
# license file. After installation, the following symlink(s) will be
|
||||
# added to point to this file (relative to the installation prefix):
|
||||
#
|
||||
# license.dat
|
||||
#
|
||||
# Alternatively, use one of the following environment variable(s):
|
||||
#
|
||||
# PGROUPD_LICENSE_FILE
|
||||
# LM_LICENSE_FILE
|
||||
#
|
||||
# If you choose to store your license in a non-standard location, you may
|
||||
# set one of these variable(s) to the full pathname to the license file, or
|
||||
# port@host if you store your license keys on a dedicated license server.
|
||||
# You will likely want to set this variable in a module file so that it
|
||||
# gets loaded every time someone tries to use pgi.
|
||||
#
|
||||
# For further information on how to acquire a license, please refer to:
|
||||
#
|
||||
# http://www.pgroup.com/doc/pgiinstall.pdf
|
||||
#
|
||||
# You may enter your license below.
|
||||
|
||||
You can add your license directly to this file, or tell FlexNet to use a
|
||||
license stored on a separate license server. Here is an example that
|
||||
points to a license server called licman1:
|
||||
|
||||
.. code-block:: none
|
||||
|
||||
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||
USE_SERVER
|
||||
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
|
||||
one of the other license variables after installation. For further instructions on installation and
|
||||
licensing, see the URL provided.
|
||||
|
||||
If your package requires the license to install, you can reference the
|
||||
location of this global license using ``self.global_license_file``.
|
||||
@@ -2967,9 +2925,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
@@ -5179,7 +5137,7 @@ other checks.
|
||||
- Not applicable
|
||||
* - :ref:`PythonPackage <pythonpackage>`
|
||||
- Not applicable
|
||||
- ``test`` (module imports)
|
||||
- ``test_imports`` (module imports)
|
||||
* - :ref:`QMakePackage <qmakepackage>`
|
||||
- ``check`` (``make check``)
|
||||
- Not applicable
|
||||
@@ -5188,7 +5146,7 @@ other checks.
|
||||
- Not applicable
|
||||
* - :ref:`SIPPackage <sippackage>`
|
||||
- Not applicable
|
||||
- ``test`` (module imports)
|
||||
- ``test_imports`` (module imports)
|
||||
* - :ref:`WafPackage <wafpackage>`
|
||||
- ``build_test`` (must be overridden)
|
||||
- ``install_test`` (must be overridden)
|
||||
|
@@ -1,12 +1,12 @@
|
||||
sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinxcontrib-programoutput==0.18
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
urllib3==2.3.0
|
||||
pytest==8.3.4
|
||||
isort==5.13.2
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
|
@@ -8,7 +8,6 @@ unzip, , , Compress/Decompress archives
|
||||
bzip2, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
lsb-release, , , Linux: identify operating system version
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
|
|
254
lib/spack/external/_vendoring/typing_extensions.LICENSE
vendored
Normal file
254
lib/spack/external/_vendoring/typing_extensions.LICENSE
vendored
Normal file
@@ -0,0 +1,254 @@
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2 and above 2.1.1 2001-now PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
|
||||
retained in Python alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
Normal file
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
lib/spack/external/_vendoring/typing_extensions.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/typing_extensions.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from typing_extensions import *
|
1
lib/spack/external/vendor.txt
vendored
1
lib/spack/external/vendor.txt
vendored
@@ -8,3 +8,4 @@ six==1.16.0
|
||||
macholib==1.16.2
|
||||
altgraph==0.17.3
|
||||
ruamel.yaml==0.17.21
|
||||
typing_extensions==4.1.1
|
||||
|
@@ -66,7 +66,7 @@ def _is_url(path_or_url: str) -> bool:
|
||||
return result
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
def _system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
"""Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
@@ -100,6 +100,16 @@ def path_filter_caller(*args, **kwargs):
|
||||
return holder_func
|
||||
|
||||
|
||||
def _noop_decorator(_func=None, arg_slice: Optional[slice] = None):
|
||||
return _func if _func else lambda x: x
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
system_path_filter = _system_path_filter
|
||||
else:
|
||||
system_path_filter = _noop_decorator
|
||||
|
||||
|
||||
def sanitize_win_longpath(path: str) -> str:
|
||||
"""Strip Windows extended path prefix from strings
|
||||
Returns sanitized string.
|
||||
|
@@ -24,6 +24,7 @@
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
@@ -300,35 +301,32 @@ def filter_file(
|
||||
ignore_absent: bool = False,
|
||||
start_at: Optional[str] = None,
|
||||
stop_at: Optional[str] = None,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> None:
|
||||
r"""Like sed, but uses python regular expressions.
|
||||
|
||||
Filters every line of each file through regex and replaces the file
|
||||
with a filtered version. Preserves mode of filtered files.
|
||||
Filters every line of each file through regex and replaces the file with a filtered version.
|
||||
Preserves mode of filtered files.
|
||||
|
||||
As with re.sub, ``repl`` can be either a string or a callable.
|
||||
If it is a callable, it is passed the match object and should
|
||||
return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution
|
||||
as sed would allow.
|
||||
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
|
||||
passed the match object and should return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
|
||||
|
||||
Args:
|
||||
regex (str): The regular expression to search for
|
||||
repl (str): The string to replace matches with
|
||||
*filenames: One or more files to search and replace
|
||||
string (bool): Treat regex as a plain string. Default it False
|
||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent (bool): Ignore any files that don't exist.
|
||||
Default is False
|
||||
start_at (str): Marker used to start applying the replacements. If a
|
||||
text line matches this marker filtering is started at the next line.
|
||||
All contents before the marker and the marker itself are copied
|
||||
verbatim. Default is to start filtering from the first line of the
|
||||
file.
|
||||
stop_at (str): Marker used to stop scanning the file further. If a text
|
||||
line matches this marker filtering is stopped and the rest of the
|
||||
file is copied verbatim. Default is to filter until the end of the
|
||||
file.
|
||||
regex: The regular expression to search for
|
||||
repl: The string to replace matches with
|
||||
*filenames: One or more files to search and replace string: Treat regex as a plain string.
|
||||
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent: Ignore any files that don't exist. Default is False
|
||||
start_at: Marker used to start applying the replacements. If a text line matches this
|
||||
marker filtering is started at the next line. All contents before the marker and the
|
||||
marker itself are copied verbatim. Default is to start filtering from the first line of
|
||||
the file.
|
||||
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
|
||||
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
|
||||
until the end of the file.
|
||||
encoding: The encoding to use when reading and writing the files. Default is None, which
|
||||
uses the system's default encoding.
|
||||
"""
|
||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||
if not callable(repl):
|
||||
@@ -344,72 +342,56 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
backup_filename = filename + "~"
|
||||
tmp_filename = filename + ".spack~"
|
||||
|
||||
if ignore_absent and not os.path.exists(filename):
|
||||
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
|
||||
tty.debug(msg.format(filename))
|
||||
regex_compiled = re.compile(regex)
|
||||
for path in path_to_os_path(*filenames):
|
||||
if ignore_absent and not os.path.exists(path):
|
||||
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
|
||||
continue
|
||||
else:
|
||||
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
|
||||
|
||||
# Create backup file. Don't overwrite an existing backup
|
||||
# file in case this file is being filtered multiple times.
|
||||
if not os.path.exists(backup_filename):
|
||||
shutil.copy(filename, backup_filename)
|
||||
fd, temp_path = tempfile.mkstemp(
|
||||
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
|
||||
)
|
||||
os.close(fd)
|
||||
|
||||
# Create a temporary file to read from. We cannot use backup_filename
|
||||
# in case filter_file is invoked multiple times on the same file.
|
||||
shutil.copy(filename, tmp_filename)
|
||||
shutil.copy(path, temp_path)
|
||||
errored = False
|
||||
|
||||
try:
|
||||
# Open as a text file and filter until the end of the file is
|
||||
# reached, or we found a marker in the line if it was specified
|
||||
#
|
||||
# To avoid translating line endings (\n to \r\n and vice-versa)
|
||||
# we force os.open to ignore translations and use the line endings
|
||||
# the file comes with
|
||||
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
|
||||
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
|
||||
do_filtering = start_at is None
|
||||
# Using iter and readline is a workaround needed not to
|
||||
# disable input_file.tell(), which will happen if we call
|
||||
# input_file.next() implicitly via the for loop
|
||||
for line in iter(input_file.readline, ""):
|
||||
if stop_at is not None:
|
||||
current_position = input_file.tell()
|
||||
# Open as a text file and filter until the end of the file is reached, or we found a
|
||||
# marker in the line if it was specified. To avoid translating line endings (\n to
|
||||
# \r\n and vice-versa) use newline="".
|
||||
with open(
|
||||
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as input_file, open(
|
||||
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as output_file:
|
||||
if start_at is None and stop_at is None: # common case, avoids branching in loop
|
||||
for line in input_file:
|
||||
output_file.write(re.sub(regex_compiled, repl, line))
|
||||
else:
|
||||
# state is -1 before start_at; 0 between; 1 after stop_at
|
||||
state = 0 if start_at is None else -1
|
||||
for line in input_file:
|
||||
if state == 0:
|
||||
if stop_at == line.strip():
|
||||
output_file.write(line)
|
||||
break
|
||||
if do_filtering:
|
||||
filtered_line = re.sub(regex, repl, line)
|
||||
output_file.write(filtered_line)
|
||||
else:
|
||||
do_filtering = start_at == line.strip()
|
||||
output_file.write(line)
|
||||
else:
|
||||
current_position = None
|
||||
|
||||
# If we stopped filtering at some point, reopen the file in
|
||||
# binary mode and copy verbatim the remaining part
|
||||
if current_position and stop_at:
|
||||
with open(tmp_filename, mode="rb") as input_binary_buffer:
|
||||
input_binary_buffer.seek(current_position)
|
||||
with open(filename, mode="ab") as output_binary_buffer:
|
||||
output_binary_buffer.writelines(input_binary_buffer.readlines())
|
||||
state = 1
|
||||
else:
|
||||
line = re.sub(regex_compiled, repl, line)
|
||||
elif state == -1 and start_at == line.strip():
|
||||
state = 0
|
||||
output_file.write(line)
|
||||
|
||||
except BaseException:
|
||||
# clean up the original file on failure.
|
||||
shutil.move(backup_filename, filename)
|
||||
# restore the original file
|
||||
os.rename(temp_path, path)
|
||||
errored = True
|
||||
raise
|
||||
|
||||
finally:
|
||||
os.remove(tmp_filename)
|
||||
if not backup and os.path.exists(backup_filename):
|
||||
os.remove(backup_filename)
|
||||
if not errored and not backup:
|
||||
os.unlink(temp_path)
|
||||
|
||||
|
||||
class FileFilter:
|
||||
@@ -1114,12 +1096,12 @@ def hash_directory(directory, ignore=[]):
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def write_tmp_and_move(filename):
|
||||
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
|
||||
"""Write to a temporary file, then move into place."""
|
||||
dirname = os.path.dirname(filename)
|
||||
basename = os.path.basename(filename)
|
||||
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
||||
with open(tmp, "w") as f:
|
||||
with open(tmp, "w", encoding=encoding) as f:
|
||||
yield f
|
||||
shutil.move(tmp, filename)
|
||||
|
||||
@@ -2772,22 +2754,6 @@ def prefixes(path):
|
||||
return paths
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def md5sum(file):
|
||||
"""Compute the MD5 sum of a file.
|
||||
|
||||
Args:
|
||||
file (str): file to be checksummed
|
||||
|
||||
Returns:
|
||||
MD5 sum of the file's content
|
||||
"""
|
||||
md5 = hashlib.md5()
|
||||
with open(file, "rb") as f:
|
||||
md5.update(f.read())
|
||||
return md5.digest()
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
@@ -2838,6 +2804,25 @@ def temporary_dir(
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
||||
for functions or external tools that do not support in-place editing. Notice that this function
|
||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
||||
since we assume the call site will create a new inode at the same path."""
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
||||
)
|
||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
||||
os.close(tmp_fd)
|
||||
try:
|
||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
||||
yield tmp_path
|
||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
@@ -863,8 +863,10 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
PatternBytes = re.Pattern[bytes]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
PatternBytes = typing.Pattern[bytes]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
|
@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
|
||||
Arguments:
|
||||
path: path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
# Open writable files as rb+ so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
|
||||
|
||||
pid = os.getpid()
|
||||
open_file = None # OpenFile object, if there is one
|
||||
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
|
||||
# we know path exists but not if it's writable. If it's read-only,
|
||||
# only open the file for reading (and fail if we're trying to get
|
||||
# an exclusive (write) lock on it)
|
||||
os_mode, fh_mode = os.O_RDONLY, "r"
|
||||
os_mode, fh_mode = os.O_RDONLY, "rb"
|
||||
|
||||
fd = os.open(path, os_mode)
|
||||
fh = os.fdopen(fd, fh_mode)
|
||||
@@ -243,7 +243,7 @@ def __init__(
|
||||
helpful for distinguishing between different Spack locks.
|
||||
"""
|
||||
self.path = path
|
||||
self._file: Optional[IO] = None
|
||||
self._file: Optional[IO[bytes]] = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
self._ensure_parent_directory()
|
||||
self._file = FILE_TRACKER.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
# If the file were writable, we'd have opened it rb+
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
self._log_debug(
|
||||
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
pid, host = line.decode("utf-8").strip().split(",")
|
||||
_, _, pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
self.pid = int(pid)
|
||||
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
|
||||
|
||||
# write pid, host to disk to sync over FS
|
||||
self._file.seek(0)
|
||||
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
|
||||
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
|
||||
self._file.truncate()
|
||||
self._file.flush()
|
||||
os.fsync(self._file.fileno())
|
||||
|
@@ -161,7 +161,7 @@ def _err_check(result, func, args):
|
||||
)
|
||||
# Use conout$ here to handle a redirectired stdout/get active console associated
|
||||
# with spack
|
||||
with open(r"\\.\CONOUT$", "w") as conout:
|
||||
with open(r"\\.\CONOUT$", "w", encoding="utf-8") as conout:
|
||||
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
|
||||
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
|
||||
# which is defined as conout$ on Windows.
|
||||
|
@@ -762,7 +762,7 @@ def __enter__(self):
|
||||
self.reader = open(self.logfile, mode="rb+")
|
||||
|
||||
# Dup stdout so we can still write to it after redirection
|
||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w", encoding=sys.stdout.encoding)
|
||||
# Redirect stdout and stderr to write to logfile
|
||||
self.stderr.redirect_stream(self.writer.fileno())
|
||||
self.stdout.redirect_stream(self.writer.fileno())
|
||||
@@ -879,10 +879,13 @@ def _writer_daemon(
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
# that prevents unbuffered text I/O. [needs citation]
|
||||
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
|
||||
# The downside is that the log file will not contain the exact output of the build process.
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
read_file = os.fdopen(
|
||||
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
|
||||
)
|
||||
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
@@ -928,11 +931,7 @@ def _writer_daemon(
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
line = _retry(read_file.readline)()
|
||||
|
||||
if not line:
|
||||
return
|
||||
@@ -946,6 +945,13 @@ def _writer_daemon(
|
||||
output_line = clean_line
|
||||
if filter_fn:
|
||||
output_line = filter_fn(clean_line)
|
||||
enc = sys.stdout.encoding
|
||||
if enc != "utf-8":
|
||||
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
|
||||
# may not be able to handle utf-8 output. We do an inefficient
|
||||
# dance of re-encoding with errors replaced, so stdout.write
|
||||
# does not raise.
|
||||
output_line = output_line.encode(enc, "replace").decode(enc)
|
||||
sys.stdout.write(output_line)
|
||||
|
||||
# Stripped output to log file.
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.24.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -571,8 +571,13 @@ def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
reserved_names = ("all",)
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if pkg_name in reserved_names:
|
||||
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
@@ -651,7 +656,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
with open(filename, "r", encoding="utf-8") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
if pattern:
|
||||
@@ -688,19 +693,19 @@ def invalid_sha256_digest(fetcher):
|
||||
return h, True
|
||||
return None, False
|
||||
|
||||
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
|
||||
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
|
||||
details = []
|
||||
for v, args in pkg.versions.items():
|
||||
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
||||
digest, is_bad = invalid_sha256_digest(fetcher)
|
||||
if is_bad:
|
||||
details.append("{}@{} uses {}".format(pkg_name, v, digest))
|
||||
details.append(f"{pkg_name}@{v} uses {digest}")
|
||||
|
||||
for _, resources in pkg.resources.items():
|
||||
for resource in resources:
|
||||
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
||||
if is_bad:
|
||||
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
|
||||
details.append(f"Resource in '{pkg_name}' uses {digest}")
|
||||
if details:
|
||||
errors.append(error_cls(error_msg, details))
|
||||
|
||||
@@ -804,7 +809,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
continue
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
tree = ast.parse(open(file, "rb").read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
@@ -1004,20 +1009,6 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
|
||||
for when, deps_by_name in pkg_cls.dependencies.items():
|
||||
for dep_name, dep in deps_by_name.items():
|
||||
# Check if there are nested dependencies declared. We don't want directives like:
|
||||
#
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
nested_dependencies = dep.spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||
ndir = len(nested_dependencies) + 1
|
||||
details = [
|
||||
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
|
||||
f"in {filename}",
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
def check_virtual_with_variants(spec, msg):
|
||||
if not spec.virtual or not spec.variants:
|
||||
|
@@ -24,13 +24,12 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
import spack.caches
|
||||
import spack.config as config
|
||||
@@ -40,7 +39,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.oci.image
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -54,7 +53,6 @@
|
||||
import spack.util.archive
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.filesystem as ssys
|
||||
import spack.util.gpg
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
@@ -69,10 +67,8 @@
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
ensure_valid_tag,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
@@ -83,10 +79,11 @@
|
||||
)
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
from .enums import InstallRecordStatus
|
||||
|
||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||
|
||||
@@ -252,7 +249,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
spec_list = [
|
||||
s
|
||||
for s in db.query_local(installed=any)
|
||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||
]
|
||||
|
||||
@@ -367,7 +364,7 @@ def update(self, with_cooldown=False):
|
||||
on disk under ``_index_cache_root``)."""
|
||||
self._init_local_index_cache()
|
||||
configured_mirror_urls = [
|
||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
||||
]
|
||||
items_to_remove = []
|
||||
spec_cache_clear_needed = False
|
||||
@@ -584,133 +581,15 @@ def buildinfo_file_name(prefix):
|
||||
|
||||
def read_buildinfo_file(prefix):
|
||||
"""Read buildinfo file"""
|
||||
with open(buildinfo_file_name(prefix), "r") as f:
|
||||
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
|
||||
return syaml.load(f)
|
||||
|
||||
|
||||
class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
"""Visitor that collects a list of files and symlinks
|
||||
that can be checked for need of relocation. It knows how
|
||||
to dedupe hardlinks and deal with symlinks to files and
|
||||
directories."""
|
||||
|
||||
def __init__(self):
|
||||
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
||||
self.visited = set()
|
||||
|
||||
# Lists of files we will check
|
||||
self.files = []
|
||||
self.symlinks = []
|
||||
|
||||
def seen_before(self, root, rel_path):
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
if stat_result.st_nlink == 1:
|
||||
return False
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in self.visited:
|
||||
return True
|
||||
else:
|
||||
self.visited.add(identifier)
|
||||
return False
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
if self.seen_before(root, rel_path):
|
||||
return
|
||||
self.files.append(rel_path)
|
||||
|
||||
def visit_symlinked_file(self, root, rel_path, depth):
|
||||
# Note: symlinks *can* be hardlinked, but it is unclear if
|
||||
# symlinks can be relinked in-place (preserving inode).
|
||||
# Therefore, we do *not* de-dupe hardlinked symlinks.
|
||||
self.symlinks.append(rel_path)
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
return os.path.basename(rel_path) not in (".spack", "man")
|
||||
|
||||
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||
# Treat symlinked directories simply as symlinks.
|
||||
self.visit_symlinked_file(root, rel_path, depth)
|
||||
# Never recurse into symlinked directories.
|
||||
return False
|
||||
|
||||
|
||||
def file_matches(path, regex):
|
||||
with open(path, "rb") as f:
|
||||
contents = f.read()
|
||||
return bool(regex.search(contents))
|
||||
|
||||
|
||||
def get_buildfile_manifest(spec):
|
||||
"""
|
||||
Return a data structure with information about a build, including
|
||||
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
|
||||
link_to_relocate, and other, which means it doesn't fit any of previous
|
||||
checks (and should not be relocated). We exclude docs (man) and
|
||||
metadata (.spack). This can be used to find a particular kind of file
|
||||
in spack, or to generate the build metadata.
|
||||
"""
|
||||
data = {
|
||||
"text_to_relocate": [],
|
||||
"binary_to_relocate": [],
|
||||
"link_to_relocate": [],
|
||||
"other": [],
|
||||
"binary_to_relocate_fullpath": [],
|
||||
"hardlinks_deduped": True,
|
||||
}
|
||||
|
||||
# Guard against filesystem footguns of hardlinks and symlinks by using
|
||||
# a visitor to retrieve a list of files and symlinks, so we don't have
|
||||
# to worry about hardlinks of symlinked dirs and what not.
|
||||
visitor = BuildManifestVisitor()
|
||||
root = spec.prefix
|
||||
visit_directory_tree(root, visitor)
|
||||
|
||||
# Collect a list of prefixes for this package and it's dependencies, Spack will
|
||||
# look for them to decide if text file needs to be relocated or not
|
||||
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
|
||||
# Create a giant regex that matches all prefixes
|
||||
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||
|
||||
# Symlinks.
|
||||
|
||||
# Obvious bugs:
|
||||
# 1. relative links are not relocated.
|
||||
# 2. paths are used as strings.
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
# Non-symlinks.
|
||||
for rel_path in visitor.files:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
m_type, m_subtype = ssys.mime_type(abs_path)
|
||||
|
||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||
# Why is this branch not part of needs_binary_relocation? :(
|
||||
if (
|
||||
(
|
||||
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
|
||||
and sys.platform != "darwin"
|
||||
)
|
||||
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
|
||||
or (not rel_path.endswith(".o"))
|
||||
):
|
||||
data["binary_to_relocate"].append(rel_path)
|
||||
data["binary_to_relocate_fullpath"].append(abs_path)
|
||||
continue
|
||||
|
||||
elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex):
|
||||
data["text_to_relocate"].append(rel_path)
|
||||
continue
|
||||
|
||||
data["other"].append(abs_path)
|
||||
|
||||
return data
|
||||
def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
||||
try:
|
||||
return bool(regex.search(f.read()))
|
||||
finally:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def deps_to_relocate(spec):
|
||||
@@ -743,17 +622,15 @@ def deps_to_relocate(spec):
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"buildpath": spack.store.STORE.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
||||
"relocate_textfiles": manifest["text_to_relocate"],
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||
# "relocate_textfiles": [],
|
||||
# "relocate_binaries": [],
|
||||
# "relocate_links": [],
|
||||
"hardlinks_deduped": True,
|
||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||
}
|
||||
|
||||
@@ -825,10 +702,10 @@ def _read_specs_and_push_index(
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
fetched_spec = spack.spec.Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
@@ -838,17 +715,17 @@ def _read_specs_and_push_index(
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute its hash
|
||||
with open(index_json_path) as f:
|
||||
with open(index_json_path, encoding="utf-8") as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w") as f:
|
||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
@@ -882,7 +759,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
|
||||
aws = which("aws")
|
||||
|
||||
def file_read_method(file_path):
|
||||
with open(file_path) as fd:
|
||||
with open(file_path, encoding="utf-8") as fd:
|
||||
return fd.read()
|
||||
|
||||
tmpspecsdir = tempfile.mkdtemp()
|
||||
@@ -1027,7 +904,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
||||
target = os.path.join(tmpdir, "index.json")
|
||||
|
||||
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
||||
with open(target, "w") as f:
|
||||
with open(target, "w", encoding="utf-8") as f:
|
||||
sjson.dump(index, f)
|
||||
|
||||
try:
|
||||
@@ -1043,7 +920,55 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
||||
) from e
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
class FileTypes:
|
||||
BINARY = 0
|
||||
TEXT = 1
|
||||
UNKNOWN = 2
|
||||
|
||||
|
||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
||||
|
||||
|
||||
def file_type(f: IO[bytes]) -> int:
|
||||
try:
|
||||
# first check if this is an ELF or mach-o binary.
|
||||
magic = f.read(8)
|
||||
if len(magic) < 8:
|
||||
return FileTypes.UNKNOWN
|
||||
elif relocate.is_elf_magic(magic) or relocate.is_macho_magic(magic):
|
||||
return FileTypes.BINARY
|
||||
|
||||
f.seek(0)
|
||||
|
||||
# Then try utf-8, which has a fast exponential decay in false positive rate with file size.
|
||||
# Use chunked reads for fast early exit.
|
||||
f_txt = io.TextIOWrapper(f, encoding="utf-8", errors="strict")
|
||||
try:
|
||||
while f_txt.read(1024):
|
||||
pass
|
||||
return FileTypes.TEXT
|
||||
except UnicodeError:
|
||||
f_txt.seek(0)
|
||||
pass
|
||||
finally:
|
||||
f_txt.detach()
|
||||
# Finally try iso-8859-1 heuristically. In Python, all possible 256 byte values are valid.
|
||||
# We classify it as text if it does not contain any control characters / null bytes.
|
||||
data = f.read(1024)
|
||||
while data:
|
||||
if NOT_ISO8859_1_TEXT.search(data):
|
||||
break
|
||||
data = f.read(1024)
|
||||
else:
|
||||
return FileTypes.TEXT
|
||||
return FileTypes.UNKNOWN
|
||||
finally:
|
||||
f.seek(0)
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(
|
||||
tar: tarfile.TarFile, prefix: str, prefixes_to_relocate: List[str]
|
||||
) -> dict:
|
||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||
|
||||
Args:
|
||||
@@ -1059,6 +984,33 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
except OSError:
|
||||
skip = lambda entry: False
|
||||
|
||||
binary_regex = utf8_paths_to_single_binary_regex(prefixes_to_relocate)
|
||||
|
||||
relocate_binaries = []
|
||||
relocate_links = []
|
||||
relocate_textfiles = []
|
||||
|
||||
# use callbacks to add files and symlinks, so we can register which files need relocation upon
|
||||
# extraction.
|
||||
def add_file(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
|
||||
with open(path, "rb") as f:
|
||||
relpath = os.path.relpath(path, prefix)
|
||||
# no need to relocate anything in the .spack directory
|
||||
if relpath.split(os.sep, 1)[0] == ".spack":
|
||||
tar.addfile(info, f)
|
||||
return
|
||||
f_type = file_type(f)
|
||||
if f_type == FileTypes.BINARY:
|
||||
relocate_binaries.append(os.path.relpath(path, prefix))
|
||||
elif f_type == FileTypes.TEXT and file_matches(f, binary_regex):
|
||||
relocate_textfiles.append(os.path.relpath(path, prefix))
|
||||
tar.addfile(info, f)
|
||||
|
||||
def add_symlink(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
|
||||
if os.path.isabs(info.linkname) and binary_regex.match(info.linkname.encode("utf-8")):
|
||||
relocate_links.append(os.path.relpath(path, prefix))
|
||||
tar.addfile(info)
|
||||
|
||||
spack.util.archive.reproducible_tarfile_from_prefix(
|
||||
tar,
|
||||
prefix,
|
||||
@@ -1066,29 +1018,51 @@ def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
# used in runtimes like AWS lambda.
|
||||
include_parent_directories=True,
|
||||
skip=skip,
|
||||
add_file=add_file,
|
||||
add_symlink=add_symlink,
|
||||
)
|
||||
|
||||
return {
|
||||
"relocate_binaries": relocate_binaries,
|
||||
"relocate_links": relocate_links,
|
||||
"relocate_textfiles": relocate_textfiles,
|
||||
}
|
||||
|
||||
|
||||
def create_tarball(spec: spack.spec.Spec, tarfile_path: str) -> Tuple[str, str]:
|
||||
"""Create a tarball of a spec and return the checksums of the compressed tarfile and the
|
||||
uncompressed tarfile."""
|
||||
return _do_create_tarball(
|
||||
tarfile_path,
|
||||
spec.prefix,
|
||||
buildinfo=get_buildinfo_dict(spec),
|
||||
prefixes_to_relocate=prefixes_to_relocate(spec),
|
||||
)
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||
def _do_create_tarball(
|
||||
tarfile_path: str, prefix: str, buildinfo: dict, prefixes_to_relocate: List[str]
|
||||
) -> Tuple[str, str]:
|
||||
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
|
||||
tar,
|
||||
inner_checksum,
|
||||
outer_checksum,
|
||||
tar_gz_checksum,
|
||||
tar_checksum,
|
||||
):
|
||||
# Tarball the install prefix
|
||||
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||
files_to_relocate = tarfile_of_spec_prefix(tar, prefix, prefixes_to_relocate)
|
||||
buildinfo.update(files_to_relocate)
|
||||
|
||||
# Serialize buildinfo for the tarball
|
||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||
tarinfo = tarfile.TarInfo(
|
||||
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
|
||||
name=spack.util.archive.default_path_to_name(buildinfo_file_name(prefix))
|
||||
)
|
||||
tarinfo.type = tarfile.REGTYPE
|
||||
tarinfo.size = len(bstring)
|
||||
tarinfo.mode = 0o644
|
||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
||||
|
||||
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
|
||||
return tar_gz_checksum.hexdigest(), tar_checksum.hexdigest()
|
||||
|
||||
|
||||
class ExistsInBuildcache(NamedTuple):
|
||||
@@ -1098,7 +1072,7 @@ class ExistsInBuildcache(NamedTuple):
|
||||
|
||||
|
||||
class BuildcacheFiles:
|
||||
def __init__(self, spec: Spec, local: str, remote: str):
|
||||
def __init__(self, spec: spack.spec.Spec, local: str, remote: str):
|
||||
"""
|
||||
Args:
|
||||
spec: The spec whose tarball and specfile are being managed.
|
||||
@@ -1128,7 +1102,7 @@ def local_tarball(self) -> str:
|
||||
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
|
||||
|
||||
|
||||
def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
||||
def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
||||
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
|
||||
exist in the buildcache"""
|
||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||
@@ -1138,12 +1112,23 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
|
||||
return ExistsInBuildcache(signed, unsigned, tarball)
|
||||
|
||||
|
||||
def prefixes_to_relocate(spec):
|
||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
return prefixes
|
||||
|
||||
|
||||
def _url_upload_tarball_and_specfile(
|
||||
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
|
||||
spec: spack.spec.Spec,
|
||||
tmpdir: str,
|
||||
out_url: str,
|
||||
exists: ExistsInBuildcache,
|
||||
signing_key: Optional[str],
|
||||
):
|
||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||
tarball = files.local_tarball()
|
||||
checksum, _ = _do_create_tarball(tarball, spec.prefix, get_buildinfo_dict(spec))
|
||||
checksum, _ = create_tarball(spec, tarball)
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
@@ -1157,7 +1142,7 @@ def _url_upload_tarball_and_specfile(
|
||||
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
||||
|
||||
specfile = files.local_specfile()
|
||||
with open(specfile, "w") as f:
|
||||
with open(specfile, "w", encoding="utf-8") as f:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||
# So, here we still add newlines, but no indent, so save on file size and
|
||||
@@ -1174,7 +1159,7 @@ def _url_upload_tarball_and_specfile(
|
||||
|
||||
|
||||
class Uploader:
|
||||
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
|
||||
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
|
||||
self.mirror = mirror
|
||||
self.force = force
|
||||
self.update_index = update_index
|
||||
@@ -1222,7 +1207,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
class OCIUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirror.Mirror,
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
base_image: Optional[str],
|
||||
@@ -1271,7 +1256,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
class URLUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirror.Mirror,
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
signing_key: Optional[str],
|
||||
@@ -1295,7 +1280,7 @@ def push(
|
||||
|
||||
|
||||
def make_uploader(
|
||||
mirror: spack.mirror.Mirror,
|
||||
mirror: spack.mirrors.mirror.Mirror,
|
||||
force: bool = False,
|
||||
update_index: bool = False,
|
||||
signing_key: Optional[str] = None,
|
||||
@@ -1312,7 +1297,7 @@ def make_uploader(
|
||||
)
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
def _format_spec(spec: spack.spec.Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
@@ -1335,7 +1320,7 @@ def _progress(self):
|
||||
return f"[{self.n:{digits}}/{self.total}] "
|
||||
return ""
|
||||
|
||||
def start(self, spec: Spec, running: bool) -> None:
|
||||
def start(self, spec: spack.spec.Spec, running: bool) -> None:
|
||||
self.n += 1
|
||||
self.running = running
|
||||
self.pre = self._progress()
|
||||
@@ -1354,18 +1339,18 @@ def fail(self) -> None:
|
||||
|
||||
|
||||
def _url_push(
|
||||
specs: List[Spec],
|
||||
specs: List[spack.spec.Spec],
|
||||
out_url: str,
|
||||
signing_key: Optional[str],
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
tmpdir: str,
|
||||
executor: concurrent.futures.Executor,
|
||||
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
|
||||
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
||||
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
||||
present (when force=False), and a list of errors. Does not raise on error."""
|
||||
skipped: List[Spec] = []
|
||||
errors: List[Tuple[Spec, BaseException]] = []
|
||||
skipped: List[spack.spec.Spec] = []
|
||||
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
||||
|
||||
exists_futures = [
|
||||
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
|
||||
@@ -1438,7 +1423,7 @@ def _url_push(
|
||||
return skipped, errors
|
||||
|
||||
|
||||
def _oci_upload_success_msg(spec: Spec, digest: Digest, size: int, elapsed: float):
|
||||
def _oci_upload_success_msg(spec: spack.spec.Spec, digest: Digest, size: int, elapsed: float):
|
||||
elapsed = max(elapsed, 0.001) # guard against division by zero
|
||||
return (
|
||||
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
|
||||
@@ -1467,13 +1452,11 @@ def _oci_push_pkg_blob(
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
|
||||
filename, spec.prefix, get_buildinfo_dict(spec)
|
||||
)
|
||||
tar_gz_checksum, tar_checksum = create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
Digest.from_sha256(tar_gz_checksum),
|
||||
Digest.from_sha256(tar_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
@@ -1524,7 +1507,7 @@ def _oci_put_manifest(
|
||||
):
|
||||
architecture = _oci_archspec_to_gooarch(specs[0])
|
||||
|
||||
expected_blobs: List[Spec] = [
|
||||
expected_blobs: List[spack.spec.Spec] = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
|
||||
if not s.external
|
||||
@@ -1568,7 +1551,7 @@ def _oci_put_manifest(
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
with open(config_file, "w", encoding="utf-8") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
@@ -1638,19 +1621,33 @@ def _oci_update_base_images(
|
||||
)
|
||||
|
||||
|
||||
def _oci_default_tag(spec: spack.spec.Spec) -> str:
|
||||
"""Return a valid, default image tag for a spec."""
|
||||
return ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
||||
|
||||
|
||||
#: Default OCI index tag
|
||||
default_index_tag = "index.spack"
|
||||
|
||||
|
||||
def tag_is_spec(tag: str) -> bool:
|
||||
"""Check if a tag is likely a Spec"""
|
||||
return tag.endswith(".spack") and tag != default_index_tag
|
||||
|
||||
|
||||
def _oci_push(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
installed_specs_with_deps: List[spack.spec.Spec],
|
||||
tmpdir: str,
|
||||
executor: concurrent.futures.Executor,
|
||||
force: bool = False,
|
||||
) -> Tuple[
|
||||
List[Spec],
|
||||
List[spack.spec.Spec],
|
||||
Dict[str, Tuple[dict, dict]],
|
||||
Dict[str, spack.oci.oci.Blob],
|
||||
List[Tuple[Spec, BaseException]],
|
||||
List[Tuple[spack.spec.Spec, BaseException]],
|
||||
]:
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
@@ -1659,13 +1656,15 @@ def _oci_push(
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped: List[Spec] = []
|
||||
skipped: List[spack.spec.Spec] = []
|
||||
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
blobs_to_upload = []
|
||||
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
tags_to_check = (
|
||||
target_image.with_tag(_oci_default_tag(s)) for s in installed_specs_with_deps
|
||||
)
|
||||
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
@@ -1693,8 +1692,8 @@ def _oci_push(
|
||||
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
|
||||
]
|
||||
|
||||
manifests_to_upload: List[Spec] = []
|
||||
errors: List[Tuple[Spec, BaseException]] = []
|
||||
manifests_to_upload: List[spack.spec.Spec] = []
|
||||
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
||||
|
||||
# And update the spec to blob mapping for successful uploads
|
||||
for spec, blob_future in zip(blobs_to_upload, blob_futures):
|
||||
@@ -1720,7 +1719,7 @@ def _oci_push(
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
def extra_config(spec: spack.spec.Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
@@ -1736,7 +1735,7 @@ def extra_config(spec: Spec):
|
||||
_oci_put_manifest,
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
target_image.with_tag(_oci_default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
@@ -1753,7 +1752,7 @@ def extra_config(spec: Spec):
|
||||
manifest_progress.start(spec, manifest_future.running())
|
||||
if error is None:
|
||||
manifest_progress.ok(
|
||||
f"Tagged {_format_spec(spec)} as {target_image.with_tag(default_tag(spec))}"
|
||||
f"Tagged {_format_spec(spec)} as {target_image.with_tag(_oci_default_tag(spec))}"
|
||||
)
|
||||
else:
|
||||
manifest_progress.fail()
|
||||
@@ -1788,13 +1787,13 @@ def _oci_update_index(
|
||||
db = BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
spec = spack.spec.Spec.from_dict(spec_dict)
|
||||
db.add(spec)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
@@ -1903,7 +1902,7 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
||||
try:
|
||||
as_string = binary_content.decode("utf-8")
|
||||
if path.endswith(".json.sig"):
|
||||
spec_dict = Spec.extract_json_from_clearsig(as_string)
|
||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string)
|
||||
else:
|
||||
spec_dict = json.loads(as_string)
|
||||
except Exception as e:
|
||||
@@ -1951,9 +1950,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||
binary=True
|
||||
).values()
|
||||
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
|
||||
spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
||||
)
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
|
||||
@@ -1978,7 +1977,7 @@ def fetch_url_to_mirror(url):
|
||||
for mirror in configured_mirrors:
|
||||
if mirror.fetch_url == url:
|
||||
return mirror
|
||||
return spack.mirror.Mirror(url)
|
||||
return spack.mirrors.mirror.Mirror(url)
|
||||
|
||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||
|
||||
@@ -1999,7 +1998,7 @@ def fetch_url_to_mirror(url):
|
||||
if fetch_url.startswith("oci://"):
|
||||
ref = spack.oci.image.ImageReference.from_string(
|
||||
fetch_url[len("oci://") :]
|
||||
).with_tag(spack.oci.image.default_tag(spec))
|
||||
).with_tag(_oci_default_tag(spec))
|
||||
|
||||
# Fetch the manifest
|
||||
try:
|
||||
@@ -2243,7 +2242,8 @@ def relocate_package(spec):
|
||||
]
|
||||
if analogs:
|
||||
# Prefer same-name analogs and prefer higher versions
|
||||
# This matches the preferences in Spec.splice, so we will find same node
|
||||
# This matches the preferences in spack.spec.Spec.splice, so we
|
||||
# will find same node
|
||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||
|
||||
lookup_dag_hash = analog.dag_hash()
|
||||
@@ -2332,7 +2332,9 @@ def is_backup_file(file):
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
codesign("-fs-", binary)
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
@@ -2413,6 +2415,14 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
yield m
|
||||
|
||||
|
||||
def extract_buildcache_tarball(tarfile_path: str, destination: str) -> None:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove common prefix from tarball entries and directly extract them to the install dir.
|
||||
tar.extractall(
|
||||
path=destination, members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
)
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -2482,12 +2492,7 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
try:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
tar.extractall(
|
||||
path=spec.prefix,
|
||||
members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar)),
|
||||
)
|
||||
extract_buildcache_tarball(tarfile_path, destination=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
@@ -2646,7 +2651,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
specfile_is_signed = False
|
||||
found_specs = []
|
||||
|
||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
|
||||
for mirror in binary_mirrors:
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
@@ -2677,10 +2682,10 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
||||
else:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec = spack.spec.Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||
@@ -2707,7 +2712,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec is None:
|
||||
return []
|
||||
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
@@ -2746,7 +2751,7 @@ def clear_spec_cache():
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
|
||||
if not mirror_collection:
|
||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||
@@ -2801,7 +2806,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
|
||||
|
||||
def _url_push_keys(
|
||||
*mirrors: Union[spack.mirror.Mirror, str],
|
||||
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
|
||||
keys: List[str],
|
||||
tmpdir: str,
|
||||
update_index: bool = False,
|
||||
@@ -2868,7 +2873,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
@@ -2885,7 +2890,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
}
|
||||
|
||||
if output_file:
|
||||
with open(output_file, "w") as outf:
|
||||
with open(output_file, "w", encoding="utf-8") as outf:
|
||||
outf.write(json.dumps(rebuilds))
|
||||
|
||||
return 1 if rebuilds else 0
|
||||
@@ -2912,7 +2917,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
|
||||
|
||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
|
||||
tty.die(
|
||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||
)
|
||||
@@ -2921,7 +2926,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
|
||||
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
||||
|
||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||
@@ -2979,7 +2984,7 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
def __call__(self, spec: spack.spec.Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec: The spec being searched for
|
||||
@@ -3117,7 +3122,7 @@ def __init__(self, url: str, local_hash, urlopen=None) -> None:
|
||||
|
||||
def conditional_fetch(self) -> FetchIndexResult:
|
||||
"""Download an index from an OCI registry type mirror."""
|
||||
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
|
||||
url_manifest = self.ref.with_tag(default_index_tag).manifest_url()
|
||||
try:
|
||||
response = self.urlopen(
|
||||
urllib.request.Request(
|
||||
|
@@ -9,7 +9,6 @@
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_file_in_path_or_raise,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
@@ -20,7 +19,6 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_file_in_path_or_raise",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
|
@@ -37,7 +37,7 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -481,19 +481,6 @@ def ensure_gpg_in_path_or_raise() -> None:
|
||||
)
|
||||
|
||||
|
||||
def file_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap file"""
|
||||
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||
return _root_spec(root_spec_name)
|
||||
|
||||
|
||||
def ensure_file_in_path_or_raise() -> None:
|
||||
"""Ensure file is in the PATH or raise"""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["file"], abstract_spec=file_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
@@ -577,15 +564,13 @@ def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
elif sys.platform == "win32":
|
||||
ensure_file_in_path_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
|
@@ -3,8 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Query the status of bootstrapping on this machine"""
|
||||
import platform
|
||||
from typing import List, Optional, Sequence, Tuple, Union
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
@@ -72,7 +72,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||
}
|
||||
if platform.system().lower() == "linux":
|
||||
if sys.platform == "linux":
|
||||
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
@@ -87,17 +87,16 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
|
||||
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
_buildcache_exes: Dict[ExecutablesType, str] = {
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False)
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
if sys.platform == "darwin":
|
||||
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||
|
||||
if platform.system().lower() == "linux":
|
||||
if sys.platform == "linux":
|
||||
result.append(
|
||||
_required_executable(
|
||||
"patchelf",
|
||||
|
@@ -56,7 +56,6 @@
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.build_systems._checks
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
@@ -883,6 +882,9 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
elif context == Context.RUN:
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def accept(self, item):
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
@@ -920,19 +922,19 @@ def effective_deptypes(
|
||||
a flag specifying in what way they do so. The list is ordered topologically
|
||||
from root to leaf, meaning that environment modifications should be applied
|
||||
in reverse so that dependents override dependencies, not the other way around."""
|
||||
visitor = traverse.TopoVisitor(
|
||||
EnvironmentVisitor(*specs, context=context),
|
||||
key=lambda x: x.dag_hash(),
|
||||
topo_sorted_edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges(specs),
|
||||
visitor=EnvironmentVisitor(*specs, context=context),
|
||||
key=traverse.by_dag_hash,
|
||||
root=True,
|
||||
all_edges=True,
|
||||
)
|
||||
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||
|
||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||
use_modes = defaultdict(lambda: UseMode(0))
|
||||
nodes_with_type = []
|
||||
|
||||
for edge in visitor.edges:
|
||||
for edge in topo_sorted_edges:
|
||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||
|
||||
# Mark the starting point
|
||||
@@ -1375,7 +1377,7 @@ def exitcode_msg(p):
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
@@ -1424,27 +1426,20 @@ def make_stack(tb, stack=None):
|
||||
# We found obj, the Package implementation we care about.
|
||||
# Point out the location in the install method where we failed.
|
||||
filename = inspect.getfile(frame.f_code)
|
||||
lineno = frame.f_lineno
|
||||
if os.path.basename(filename) == "package.py":
|
||||
# subtract 1 because we inject a magic import at the top of package files.
|
||||
# TODO: get rid of the magic import.
|
||||
lineno -= 1
|
||||
|
||||
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
|
||||
|
||||
# Build a message showing context in the install method.
|
||||
sourcelines, start = inspect.getsourcelines(frame)
|
||||
|
||||
# Calculate lineno of the error relative to the start of the function.
|
||||
fun_lineno = lineno - start
|
||||
fun_lineno = frame.f_lineno - start
|
||||
start_ctx = max(0, fun_lineno - context)
|
||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||
|
||||
for i, line in enumerate(sourcelines):
|
||||
is_error = start_ctx + i == fun_lineno
|
||||
mark = ">> " if is_error else " "
|
||||
# Add start to get lineno relative to start of file, not function.
|
||||
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
|
||||
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
|
||||
if is_error:
|
||||
marked = colorize("@R{%s}" % cescape(marked))
|
||||
lines.append(marked)
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -63,7 +64,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
@@ -71,7 +72,7 @@ def ensure_build_dependencies_or_raise(
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
dependencies: list of package names of required build dependencies
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
@@ -127,8 +128,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
"""Base class for builders to register common checks"""
|
||||
class BuilderWithDefaults(spack.builder.Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
spack.builder.run_after("install")(sanity_check_prefix)
|
||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -15,6 +15,9 @@
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
@@ -22,7 +25,7 @@
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
@@ -69,14 +72,14 @@ def flags_to_build_system_args(self, flags):
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return self.builder.enable_or_disable(*args, **kwargs)
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return self.builder.with_or_without(*args, **kwargs)
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("autotools")
|
||||
class AutotoolsBuilder(BaseBuilder):
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
@@ -157,7 +160,7 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
def patch_config_files(self) -> bool:
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
@@ -177,23 +180,20 @@ def patch_config_files(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self):
|
||||
def _removed_la_files_log(self) -> str:
|
||||
"""File containing the list of removed libtool archives"""
|
||||
build_dir = self.build_directory
|
||||
if not os.path.isabs(self.build_directory):
|
||||
build_dir = os.path.join(self.pkg.stage.path, build_dir)
|
||||
return os.path.join(build_dir, "removed_la_files.txt")
|
||||
return os.path.join(self.build_directory, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def _do_patch_config_files(self):
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
@@ -294,7 +294,7 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name))
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -304,8 +304,8 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _patch_usr_bin_file(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
@@ -316,8 +316,8 @@ def _patch_usr_bin_file(self):
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
@@ -330,8 +330,8 @@ def _set_autotools_environment_variables(self):
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@spack.builder.run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
@@ -358,8 +358,8 @@ def _do_patch_libtool_configure(self):
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
@@ -507,27 +507,69 @@ def _do_patch_libtool(self):
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self):
|
||||
def configure_directory(self) -> str:
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self):
|
||||
def configure_abs_path(self) -> str:
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
# Handle the case where the configure directory is set to a non-absolute path
|
||||
# Non-absolute paths are always relative to the staging source path
|
||||
build_dir = self.configure_directory
|
||||
if not os.path.isabs(build_dir):
|
||||
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
||||
return build_dir
|
||||
|
||||
@spack.builder.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self):
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
@@ -554,39 +596,12 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def set_configure_or_die(self):
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
@@ -597,7 +612,12 @@ def configure(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
@@ -605,41 +625,49 @@ def build(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name (str): name of the option that is being activated or not
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -647,19 +675,19 @@ def _activate_or_not(
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
)
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
@@ -679,8 +707,8 @@ def _activate_or_not(
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec = self.pkg.spec
|
||||
args = []
|
||||
spec: spack.spec.Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
@@ -698,7 +726,7 @@ def _activate_or_not(
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)):
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
@@ -709,14 +737,12 @@ def _activate_or_not(
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(value, f"{variant}={value}" in spec) for value in feature_values]
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = "{0}_or_{1}_{2}".format(
|
||||
activation_word, deactivation_word, option_value
|
||||
)
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
@@ -725,19 +751,24 @@ def _activate_or_not(
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = "--{0}-{1}".format(activation_word, option_value)
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line += "={0}".format(activation_value(option_value))
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
return line
|
||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -752,12 +783,11 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -765,18 +795,22 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
The special value "prefix" can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -784,15 +818,15 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def remove_libtool_archives(self):
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
@@ -804,7 +838,7 @@ def remove_libtool_archives(self):
|
||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||
with fs.safe_remove(*libtool_files):
|
||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||
with open(self._removed_la_files_log, mode="w") as f:
|
||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||
f.write("\n".join(libtool_files))
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
@@ -814,12 +848,13 @@ def setup_build_environment(self, env):
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec):
|
||||
dirs_seen = set()
|
||||
flags_spack, flags_external = [], []
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.phase_callbacks
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -192,7 +192,10 @@ def initconfig_mpi_entries(self):
|
||||
|
||||
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
||||
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# not all MPIs have Fortran wrappers
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
@@ -321,7 +324,7 @@ def initconfig(self, pkg, spec, prefix):
|
||||
+ self.initconfig_package_entries()
|
||||
)
|
||||
|
||||
with open(self.cache_name, "w") as f:
|
||||
with open(self.cache_name, "w", encoding="utf-8") as f:
|
||||
for entry in cache_entries:
|
||||
f.write("%s\n" % entry)
|
||||
f.write("\n")
|
||||
@@ -332,7 +335,7 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
@@ -27,7 +28,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
class CargoBuilder(BaseBuilder):
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -77,7 +78,7 @@ def install(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
|
@@ -9,7 +9,7 @@
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import List, Optional, Set, Tuple
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
@@ -18,11 +18,15 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack import traverse
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -48,9 +52,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -85,7 +89,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
@@ -93,30 +97,36 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
@@ -157,15 +167,18 @@ def _values(x):
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
edges = traverse.traverse_topo_edges_generator(
|
||||
traverse.with_artificial_edges([pkg.spec]),
|
||||
visitor=traverse.MixedDepthVisitor(
|
||||
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
|
||||
),
|
||||
key=traverse.by_dag_hash,
|
||||
root=False,
|
||||
all_edges=False, # cover all nodes, not all edges
|
||||
)
|
||||
ordered_specs = [edge.spec for edge in edges]
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
@@ -263,15 +276,15 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, *args, **kwargs):
|
||||
return self.builder.define(*args, **kwargs)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
def define_from_variant(self, *args, **kwargs):
|
||||
return self.builder.define_from_variant(*args, **kwargs)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
|
||||
|
||||
@spack.builder.builder("cmake")
|
||||
class CMakeBuilder(BaseBuilder):
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
@@ -321,15 +334,15 @@ class CMakeBuilder(BaseBuilder):
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self):
|
||||
def archive_files(self) -> List[str]:
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self):
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self):
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -338,16 +351,17 @@ def root_cmakelists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self):
|
||||
def generator(self) -> str:
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
@@ -356,7 +370,9 @@ def std_cmake_args(self):
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
def std_args(
|
||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
||||
) -> List[str]:
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
@@ -373,7 +389,6 @@ def std_args(pkg, generator=None):
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
@@ -405,152 +420,31 @@ def std_args(pkg, generator=None):
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON
|
||||
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||
values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define('BUILD_SHARED_LIBS', True),
|
||||
define('CMAKE_CXX_STANDARD', 14),
|
||||
define('swr', ['avx', 'avx2'])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
def define_from_variant(self, cmake_var, variant=None):
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('cxxstd', default='11', values=('11', '14'),
|
||||
multi=False, description='')
|
||||
variant('shared', default=True, description='')
|
||||
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||
description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met,
|
||||
this function returns an empty string. CMake discards empty strings
|
||||
provided on the command line.
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not self.pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return self.define(cmake_var, value)
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
|
||||
@property
|
||||
def build_dirname(self):
|
||||
def build_dirname(self) -> str:
|
||||
"""Directory name to use when building the package."""
|
||||
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self):
|
||||
def cmake_args(self) -> List[str]:
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
@@ -560,7 +454,12 @@ def cmake_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
@@ -575,7 +474,12 @@ def cmake(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -584,7 +488,12 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
@@ -592,9 +501,9 @@ def install(self, pkg, spec, prefix):
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
@@ -605,3 +514,133 @@ def check(self):
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(
|
||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
||||
|
@@ -180,13 +180,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
||||
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
||||
@@ -212,9 +205,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
|
||||
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
|
||||
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
|
||||
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
||||
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
||||
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
||||
|
@@ -7,8 +7,9 @@
|
||||
import spack.builder
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(spack.package_base.PackageBase):
|
||||
@@ -26,7 +27,7 @@ class Package(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("generic")
|
||||
class GenericBuilder(BaseBuilder):
|
||||
class GenericBuilder(BuilderWithDefaults):
|
||||
"""A builder for a generic build system, that require packagers
|
||||
to implement an "install" phase.
|
||||
"""
|
||||
@@ -44,7 +45,7 @@ class GenericBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -7,10 +7,11 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class GoPackage(spack.package_base.PackageBase):
|
||||
@@ -32,7 +33,7 @@ class GoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("go")
|
||||
class GoBuilder(BaseBuilder):
|
||||
class GoBuilder(BuilderWithDefaults):
|
||||
"""The Go builder encodes the most common way of building software with
|
||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -99,7 +100,7 @@ def install(self, pkg, spec, prefix):
|
||||
fs.mkdirp(prefix.bin)
|
||||
fs.install(pkg.name, prefix.bin)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
|
@@ -22,8 +22,8 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.error import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
|
||||
# The file will have been created upon self.license_required AND
|
||||
# self.license_files having been populated, so the "if" is usually
|
||||
# true by the time the present function runs; ../hooks/licensing.py
|
||||
with open(f) as fh:
|
||||
with open(f, encoding="utf-8") as fh:
|
||||
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
||||
license_type = {
|
||||
"ACTIVATION_TYPE": "license_file",
|
||||
@@ -1163,7 +1163,7 @@ def _determine_license_type(self):
|
||||
debug_print(license_type)
|
||||
return license_type
|
||||
|
||||
@spack.builder.run_before("install")
|
||||
@spack.phase_callbacks.run_before("install")
|
||||
def configure(self):
|
||||
"""Generates the silent.cfg file to pass to installer.sh.
|
||||
|
||||
@@ -1185,7 +1185,7 @@ def configure(self):
|
||||
# our configuration accordingly. We can do this because the tokens are
|
||||
# quite long and specific.
|
||||
|
||||
validator_code = open("pset/check.awk", "r").read()
|
||||
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
|
||||
# Let's go a little further and distill the tokens (plus some noise).
|
||||
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
||||
|
||||
@@ -1222,7 +1222,7 @@ def configure(self):
|
||||
config_draft.update(self._determine_license_type)
|
||||
|
||||
# Write sorted *by token* so the file looks less like a hash dump.
|
||||
f = open("silent.cfg", "w")
|
||||
f = open("silent.cfg", "w", encoding="utf-8")
|
||||
for token, value in sorted(config_draft.items()):
|
||||
if token in tokenlike_words:
|
||||
f.write("%s=%s\n" % (token, value))
|
||||
@@ -1250,7 +1250,7 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
@@ -1258,7 +1258,7 @@ def validate_install(self):
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError("The installer has failed to install anything.")
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_rpath(self):
|
||||
if "+rpath" not in self.spec:
|
||||
return
|
||||
@@ -1273,10 +1273,10 @@ def configure_rpath(self):
|
||||
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
||||
|
||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||
with open(compiler_cfg, "w") as fh:
|
||||
with open(compiler_cfg, "w", encoding="utf-8") as fh:
|
||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def configure_auto_dispatch(self):
|
||||
if self._has_compilers:
|
||||
if "auto_dispatch=none" in self.spec:
|
||||
@@ -1297,10 +1297,10 @@ def configure_auto_dispatch(self):
|
||||
ad.append(x)
|
||||
|
||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||
with open(compiler_cfg, "a") as fh:
|
||||
with open(compiler_cfg, "a", encoding="utf-8") as fh:
|
||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def filter_compiler_wrappers(self):
|
||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||
bin_dir = self.component_bin_dir("mpi")
|
||||
@@ -1308,7 +1308,7 @@ def filter_compiler_wrappers(self):
|
||||
f = os.path.join(bin_dir, f)
|
||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def uninstall_ism(self):
|
||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||
@@ -1340,7 +1340,7 @@ def base_lib_dir(self):
|
||||
debug_print(d)
|
||||
return d
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def modify_LLVMgold_rpath(self):
|
||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||
|
||||
|
@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, "w") as config:
|
||||
with open(path, "w", encoding="utf-8") as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
|
@@ -8,11 +8,14 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import (
|
||||
BaseBuilder,
|
||||
BuilderWithDefaults,
|
||||
apply_macos_rpath_fixups,
|
||||
execute_build_time_tests,
|
||||
execute_install_time_tests,
|
||||
@@ -36,7 +39,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("makefile")
|
||||
class MakefileBuilder(BaseBuilder):
|
||||
class MakefileBuilder(BuilderWithDefaults):
|
||||
"""The Makefile builder encodes the most common way of building software with
|
||||
Makefiles. It has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -91,35 +94,50 @@ class MakefileBuilder(BaseBuilder):
|
||||
install_time_test_callbacks = ["installcheck"]
|
||||
|
||||
@property
|
||||
def build_directory(self):
|
||||
def build_directory(self) -> str:
|
||||
"""Return the directory containing the main Makefile."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
def edit(self, pkg, spec, prefix):
|
||||
def edit(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||
pass
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self):
|
||||
def installcheck(self) -> None:
|
||||
"""Searches the Makefile for an ``installcheck`` target
|
||||
and runs it if found.
|
||||
"""
|
||||
@@ -127,4 +145,4 @@ def installcheck(self):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import which
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MavenPackage(spack.package_base.PackageBase):
|
||||
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("maven")
|
||||
class MavenBuilder(BaseBuilder):
|
||||
class MavenBuilder(BuilderWithDefaults):
|
||||
"""The Maven builder encodes the default way to build software with Maven.
|
||||
It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -9,10 +9,13 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class MesonPackage(spack.package_base.PackageBase):
|
||||
@@ -62,7 +65,7 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
|
||||
@spack.builder.builder("meson")
|
||||
class MesonBuilder(BaseBuilder):
|
||||
class MesonBuilder(BuilderWithDefaults):
|
||||
"""The Meson builder encodes the default way to build software with Meson.
|
||||
The builder has three phases that can be overridden, if need be:
|
||||
|
||||
@@ -112,7 +115,7 @@ def archive_files(self):
|
||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||
|
||||
@property
|
||||
def root_mesonlists_dir(self):
|
||||
def root_mesonlists_dir(self) -> str:
|
||||
"""Relative path to the directory containing meson.build
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -121,7 +124,7 @@ def root_mesonlists_dir(self):
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def std_meson_args(self):
|
||||
def std_meson_args(self) -> List[str]:
|
||||
"""Standard meson arguments provided as a property for convenience
|
||||
of package writers.
|
||||
"""
|
||||
@@ -132,7 +135,7 @@ def std_meson_args(self):
|
||||
return std_meson_args
|
||||
|
||||
@staticmethod
|
||||
def std_args(pkg):
|
||||
def std_args(pkg) -> List[str]:
|
||||
"""Standard meson arguments for a generic package."""
|
||||
try:
|
||||
build_type = pkg.spec.variants["buildtype"].value
|
||||
@@ -172,7 +175,7 @@ def build_directory(self):
|
||||
"""Directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def meson_args(self):
|
||||
def meson_args(self) -> List[str]:
|
||||
"""List of arguments that must be passed to meson, except:
|
||||
|
||||
* ``--prefix``
|
||||
@@ -185,7 +188,12 @@ def meson_args(self):
|
||||
"""
|
||||
return []
|
||||
|
||||
def meson(self, pkg, spec, prefix):
|
||||
def meson(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
@@ -196,21 +204,31 @@ def meson(self, pkg, spec, prefix):
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
def check(self) -> None:
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class MSBuildPackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
class MSBuildBuilder(BaseBuilder):
|
||||
class MSBuildBuilder(BuilderWithDefaults):
|
||||
"""The MSBuild builder encodes the most common way of building software with
|
||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, conflicts
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class NMakePackage(spack.package_base.PackageBase):
|
||||
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
class NMakeBuilder(BaseBuilder):
|
||||
class NMakeBuilder(BuilderWithDefaults):
|
||||
"""The NMake builder encodes the most common way of building software with
|
||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||
|
||||
|
@@ -7,7 +7,7 @@
|
||||
from spack.directives import build_system, extends
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class OctavePackage(spack.package_base.PackageBase):
|
||||
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("octave")
|
||||
class OctaveBuilder(BaseBuilder):
|
||||
class OctaveBuilder(BuilderWithDefaults):
|
||||
"""The octave builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.OctaveBuilder.install`
|
||||
|
@@ -32,6 +32,9 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute(source=False, binary=False)
|
||||
|
||||
# contains precompiled binaries without rpaths
|
||||
unresolved_libraries = ["*"]
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
"target=ppc64le:",
|
||||
@@ -255,7 +258,7 @@ def libs(self):
|
||||
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
|
||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
|
||||
"""Base class for Intel oneAPI library packages with SDK components.
|
||||
|
||||
Contains some convenient default implementations for libraries
|
||||
|
@@ -10,11 +10,12 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, extends
|
||||
from spack.install_test import SkipTest, test_part
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class PerlPackage(spack.package_base.PackageBase):
|
||||
@@ -84,7 +85,7 @@ def test_use(self):
|
||||
|
||||
|
||||
@spack.builder.builder("perl")
|
||||
class PerlBuilder(BaseBuilder):
|
||||
class PerlBuilder(BuilderWithDefaults):
|
||||
"""The perl builder provides four phases that can be overridden, if required:
|
||||
|
||||
1. :py:meth:`~.PerlBuilder.configure`
|
||||
@@ -163,7 +164,7 @@ def configure(self, pkg, spec, prefix):
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||
# the Build script.
|
||||
@spack.builder.run_after("configure")
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def fix_shebang(self):
|
||||
if self.build_method == "Build.PL":
|
||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||
@@ -175,7 +176,7 @@ def build(self, pkg, spec, prefix):
|
||||
self.build_executable()
|
||||
|
||||
# Ensure that tests run after build (if requested):
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Runs built-in tests of a Perl package."""
|
||||
|
@@ -24,6 +24,7 @@
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -34,7 +35,7 @@
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||
@@ -374,7 +375,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
def python_spec(self) -> Spec:
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
@@ -425,7 +426,7 @@ def libs(self) -> LibraryList:
|
||||
|
||||
|
||||
@spack.builder.builder("python_pip")
|
||||
class PythonPipBuilder(BaseBuilder):
|
||||
class PythonPipBuilder(BuilderWithDefaults):
|
||||
phases = ("install",)
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
@@ -543,4 +544,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class QMakePackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
class QMakeBuilder(BaseBuilder):
|
||||
class QMakeBuilder(BuilderWithDefaults):
|
||||
"""The qmake builder provides three phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||
@@ -81,4 +82,4 @@ def check(self):
|
||||
with working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
from ._checks import BuilderWithDefaults
|
||||
|
||||
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("ruby")
|
||||
class RubyBuilder(BaseBuilder):
|
||||
class RubyBuilder(BuilderWithDefaults):
|
||||
"""The Ruby builder provides two phases that can be overridden if required:
|
||||
|
||||
#. :py:meth:`~.RubyBuilder.build`
|
||||
|
@@ -4,9 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
|
||||
|
||||
class SConsPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +29,7 @@ class SConsPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("scons")
|
||||
class SConsBuilder(BaseBuilder):
|
||||
class SConsBuilder(BuilderWithDefaults):
|
||||
"""The Scons builder provides the following phases that can be overridden:
|
||||
|
||||
1. :py:meth:`~.SConsBuilder.build`
|
||||
@@ -79,4 +80,4 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
@@ -11,11 +11,12 @@
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
|
||||
|
||||
class SIPPackage(spack.package_base.PackageBase):
|
||||
@@ -103,7 +104,7 @@ def test_imports(self):
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
class SIPBuilder(BaseBuilder):
|
||||
class SIPBuilder(BuilderWithDefaults):
|
||||
"""The SIP builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -170,4 +171,4 @@ def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,9 +6,10 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
|
||||
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
||||
|
||||
|
||||
class WafPackage(spack.package_base.PackageBase):
|
||||
@@ -30,7 +31,7 @@ class WafPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("waf")
|
||||
class WafBuilder(BaseBuilder):
|
||||
class WafBuilder(BuilderWithDefaults):
|
||||
"""The WAF builder provides the following phases that can be overridden:
|
||||
|
||||
* configure
|
||||
@@ -136,7 +137,7 @@ def build_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def install_test(self):
|
||||
"""Run unit tests after install.
|
||||
@@ -146,4 +147,4 @@ def install_test(self):
|
||||
"""
|
||||
pass
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -6,44 +6,30 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
from typing import Dict, List, Optional, Tuple, Type
|
||||
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
|
||||
#: An object of this kind is a shared global state used to collect callbacks during
|
||||
#: class definition time, and is flushed when the class object is created at the end
|
||||
#: of the class definition
|
||||
#:
|
||||
#: Args:
|
||||
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||
CallbackTemporaryStage = collections.namedtuple(
|
||||
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||
)
|
||||
|
||||
#: Shared global state to aggregate "@run_before" callbacks
|
||||
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||
#: Shared global state to aggregate "@run_after" callbacks
|
||||
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
|
||||
|
||||
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||
#: builders for the same package object.
|
||||
_BUILDERS = {}
|
||||
_BUILDERS: Dict[int, "Builder"] = {}
|
||||
|
||||
|
||||
def builder(build_system_name):
|
||||
def builder(build_system_name: str):
|
||||
"""Class decorator used to register the default builder
|
||||
for a given build-system.
|
||||
|
||||
Args:
|
||||
build_system_name (str): name of the build-system
|
||||
build_system_name: name of the build-system
|
||||
"""
|
||||
|
||||
def _decorator(cls):
|
||||
@@ -54,13 +40,9 @@ def _decorator(cls):
|
||||
return _decorator
|
||||
|
||||
|
||||
def create(pkg):
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the builder object that can install it.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||
"""
|
||||
def create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Given a package object with an associated concrete spec, return the builder object that can
|
||||
install it."""
|
||||
if id(pkg) not in _BUILDERS:
|
||||
_BUILDERS[id(pkg)] = _create(pkg)
|
||||
return _BUILDERS[id(pkg)]
|
||||
@@ -75,7 +57,7 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
@@ -83,7 +65,7 @@ def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg):
|
||||
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
The function inspects the build-system used by the package object and try to:
|
||||
@@ -103,7 +85,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
to look for build-related methods in the ``*Package``.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
pkg: package object for which we need a builder
|
||||
"""
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
@@ -168,8 +150,8 @@ def __forward(self, *args, **kwargs):
|
||||
# with the same name is defined in the Package, it will override this definition
|
||||
# (when _ForwardToBaseBuilder is initialized)
|
||||
for method_name in (
|
||||
base_cls.phases
|
||||
+ base_cls.legacy_methods
|
||||
base_cls.phases # type: ignore
|
||||
+ base_cls.legacy_methods # type: ignore
|
||||
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||
):
|
||||
@@ -181,14 +163,14 @@ def __forward(self):
|
||||
|
||||
return __forward
|
||||
|
||||
for attribute_name in base_cls.legacy_attributes:
|
||||
for attribute_name in base_cls.legacy_attributes: # type: ignore
|
||||
setattr(
|
||||
_ForwardToBaseBuilder,
|
||||
attribute_name,
|
||||
property(forward_property_to_getattr(attribute_name)),
|
||||
)
|
||||
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
|
||||
def __init__(self, pkg):
|
||||
# Deal with custom phases in packages here
|
||||
if hasattr(pkg, "phases"):
|
||||
@@ -213,99 +195,18 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
return Adapter(pkg)
|
||||
|
||||
|
||||
def buildsystem_name(pkg):
|
||||
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Given a package object with an associated concrete spec,
|
||||
return the name of its build system.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which we want
|
||||
the build system name
|
||||
"""
|
||||
return the name of its build system."""
|
||||
try:
|
||||
return pkg.spec.variants["build_system"].value
|
||||
except KeyError:
|
||||
# We are reading an old spec without the build_system variant
|
||||
return pkg.legacy_buildsystem
|
||||
|
||||
|
||||
class PhaseCallbacksMeta(type):
|
||||
"""Permit to register arbitrary functions during class definition and run them
|
||||
later, before or after a given install phase.
|
||||
|
||||
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||
stored in a global shared state when a class being defined is parsed by the Python
|
||||
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||
of callbacks is attached to the class being defined.
|
||||
"""
|
||||
|
||||
def __new__(mcs, name, bases, attr_dict):
|
||||
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||
staged_callbacks = temporary_stage.callbacks
|
||||
|
||||
# Here we have an adapter from an old-style package. This means there is no
|
||||
# hierarchy of builders, and every callback that had to be combined between
|
||||
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||
if name == "Adapter":
|
||||
continue
|
||||
|
||||
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||
# registered here.
|
||||
#
|
||||
# The order should be:
|
||||
# 1. Callbacks are registered in order within the same class
|
||||
# 2. Callbacks defined in derived classes precede those defined in base
|
||||
# classes
|
||||
callbacks_from_base = []
|
||||
for base in bases:
|
||||
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||
if not current_callbacks:
|
||||
continue
|
||||
callbacks_from_base.extend(current_callbacks)
|
||||
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||
# Set the callbacks in this class and flush the temporary stage
|
||||
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||
del temporary_stage.callbacks[:]
|
||||
|
||||
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def run_after(phase, when=None):
|
||||
"""Decorator to register a function for running after a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase after which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_AFTER.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
|
||||
@staticmethod
|
||||
def run_before(phase, when=None):
|
||||
"""Decorator to register a function for running before a given phase.
|
||||
|
||||
Args:
|
||||
phase (str): phase before which the function must run.
|
||||
when (str): condition under which the function is run (if None, it is always run).
|
||||
"""
|
||||
|
||||
def _decorator(fn):
|
||||
key = (phase, when)
|
||||
item = (key, fn)
|
||||
_RUN_BEFORE.callbacks.append(item)
|
||||
return fn
|
||||
|
||||
return _decorator
|
||||
return pkg.legacy_buildsystem # type: ignore
|
||||
|
||||
|
||||
class BuilderMeta(
|
||||
PhaseCallbacksMeta,
|
||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
@@ -400,8 +301,12 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
)
|
||||
|
||||
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_BEFORE.attribute_name
|
||||
)
|
||||
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
|
||||
spack.phase_callbacks._RUN_AFTER.attribute_name
|
||||
)
|
||||
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
@@ -421,8 +326,8 @@ def __init__(self, name, builder):
|
||||
self.name = name
|
||||
self.builder = builder
|
||||
self.phase_fn = self._select_phase_fn()
|
||||
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
|
||||
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
|
||||
|
||||
def _make_callbacks(self, callbacks_attribute):
|
||||
result = []
|
||||
@@ -483,15 +388,103 @@ def copy(self):
|
||||
return copy.deepcopy(self)
|
||||
|
||||
|
||||
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
"""A builder is a class that, given a package object (i.e. associated with
|
||||
concrete spec), knows how to install it.
|
||||
class BaseBuilder(metaclass=BuilderMeta):
|
||||
"""An interface for builders, without any phases defined. This class is exposed in the package
|
||||
API, so that packagers can create a single class to define ``setup_build_environment`` and
|
||||
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the
|
||||
"phases" of the installation in the correct order.
|
||||
Example:
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object to be built
|
||||
.. code-block:: python
|
||||
|
||||
class AnyBuilder(BaseBuilder):
|
||||
@run_after("install")
|
||||
def fixup_install(self):
|
||||
# do something after the package is installed
|
||||
pass
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set("MY_ENV_VAR", "my_value")
|
||||
|
||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
|
||||
pass
|
||||
"""
|
||||
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
self.pkg = pkg
|
||||
|
||||
@property
|
||||
def spec(self) -> spack.spec.Spec:
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications
|
||||
) -> None:
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env) # type: ignore
|
||||
|
||||
def setup_dependent_build_environment(
|
||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
||||
) -> None:
|
||||
"""Sets up the build environment of a package that depends on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to modify the build
|
||||
environment of a package that *depends* on this one.
|
||||
|
||||
This gives packages the ability to set environment variables for the build of the
|
||||
dependent, which can be useful to provide search hints for headers or libraries if they are
|
||||
not in standard locations.
|
||||
|
||||
This method will be called before the dependent package prefix exists in Spack's store.
|
||||
|
||||
Args:
|
||||
env: environment modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec: the spec of the dependent package about to be built. This allows the
|
||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
|
||||
|
||||
def __repr__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
|
||||
|
||||
def __str__(self):
|
||||
fmt = "{name}{/hash:7}"
|
||||
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
|
||||
|
||||
|
||||
class Builder(BaseBuilder, collections.abc.Sequence):
|
||||
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
|
||||
knows how to install it.
|
||||
|
||||
The builder behaves like a sequence, and when iterated over return the "phases" of the
|
||||
installation in the correct order.
|
||||
"""
|
||||
|
||||
#: Sequence of phases. Must be defined in derived classes
|
||||
@@ -506,95 +499,22 @@ class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||
build_time_test_callbacks: List[str]
|
||||
install_time_test_callbacks: List[str]
|
||||
|
||||
#: List of glob expressions. Each expression must either be
|
||||
#: absolute or relative to the package source path.
|
||||
#: Matching artifacts found at the end of the build process will be
|
||||
#: copied in the same directory tree as _spack_build_logfile and
|
||||
#: _spack_build_envfile.
|
||||
archive_files: List[str] = []
|
||||
#: List of glob expressions. Each expression must either be absolute or relative to the package
|
||||
#: source path. Matching artifacts found at the end of the build process will be copied in the
|
||||
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def __init__(self, pkg):
|
||||
self.pkg = pkg
|
||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
||||
super().__init__(pkg)
|
||||
self.callbacks = {}
|
||||
for phase in self.phases:
|
||||
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||
|
||||
@property
|
||||
def spec(self):
|
||||
return self.pkg.spec
|
||||
|
||||
@property
|
||||
def stage(self):
|
||||
return self.pkg.stage
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
return self.pkg.prefix
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
|
||||
This method will be called before the current package prefix exists in
|
||||
Spack's store.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
|
||||
This is similar to ``setup_build_environment``, but it is used to
|
||||
modify the build environments of packages that *depend* on this one.
|
||||
|
||||
This gives packages like Python and others that follow the extension
|
||||
model a way to implement common environment or compile-time settings
|
||||
for dependencies.
|
||||
|
||||
This method will be called before the dependent package prefix exists
|
||||
in Spack's store.
|
||||
|
||||
Examples:
|
||||
1. Installing python modules generally requires ``PYTHONPATH``
|
||||
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||
module's install prefix. This method could be used to set that
|
||||
variable.
|
||||
|
||||
Args:
|
||||
env (spack.util.environment.EnvironmentModifications): environment
|
||||
modifications to be applied when the dependent package is built.
|
||||
Package authors can call methods on it to alter the build environment.
|
||||
|
||||
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||
about to be built. This allows the extendee (self) to query
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
return self.callbacks[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.phases)
|
||||
|
||||
def __repr__(self):
|
||||
msg = "{0}({1})"
|
||||
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
def __str__(self):
|
||||
msg = '"{0}" builder for "{1}"'
|
||||
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||
|
||||
|
||||
# Export these names as standalone to be used in packages
|
||||
run_after = PhaseCallbacksMeta.run_after
|
||||
run_before = PhaseCallbacksMeta.run_before
|
||||
|
File diff suppressed because it is too large
Load Diff
41
lib/spack/spack/ci/README.md
Normal file
41
lib/spack/spack/ci/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Spack CI generators
|
||||
|
||||
This document describes how the ci module can be extended to provide novel
|
||||
ci generators. The module currently has only a single generator for gitlab.
|
||||
The unit-tests for the ci module define a small custom generator for testing
|
||||
purposes as well.
|
||||
|
||||
The process of generating a pipeline involves creating a ci-enabled spack
|
||||
environment, activating it, and running `spack ci generate`, possibly with
|
||||
arguments describing things like where the output should be written.
|
||||
|
||||
Internally pipeline generation is broken into two components: general and
|
||||
ci platform specific.
|
||||
|
||||
## General pipeline functionality
|
||||
|
||||
General pipeline functionality includes building a pipeline graph (really,
|
||||
a forest), pruning it in a variety of ways, and gathering attributes for all
|
||||
the generated spec build jobs from the spack configuration.
|
||||
|
||||
All of the above functionality is defined in the `__init__.py` of the top-level
|
||||
ci module, and should be roughly the same for pipelines generated for any
|
||||
platform.
|
||||
|
||||
## CI platform specific functionality
|
||||
|
||||
Functionality specific to CI platforms (e.g. gitlab, gha, etc.) should be
|
||||
defined in a dedicated module. In order to define a generator for a new
|
||||
platform, there are only a few requirements:
|
||||
|
||||
1. add a file under `ci` in which you define a generator method decorated with
|
||||
the `@generator` attribute. .
|
||||
|
||||
1. import it from `lib/spack/spack/ci/__init__.py`, so that your new generator
|
||||
is registered.
|
||||
|
||||
1. the generator method must take as arguments PipelineDag, SpackCIConfig,
|
||||
and PipelineOptions objects, in that order.
|
||||
|
||||
1. the generator method must produce an output file containing the
|
||||
generated pipeline.
|
1285
lib/spack/spack/ci/__init__.py
Normal file
1285
lib/spack/spack/ci/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
825
lib/spack/spack/ci/common.py
Normal file
825
lib/spack/spack/ci/common.py
Normal file
@@ -0,0 +1,825 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
from collections import deque
|
||||
from enum import Enum
|
||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||
from urllib.parse import quote, urlencode, urlparse
|
||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import Singleton, memoized
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config as cfg
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
|
||||
def _urlopen():
|
||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
without_ssl = build_opener(
|
||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||
)
|
||||
|
||||
# And dynamically dispatch based on the config:verify_ssl.
|
||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||
opener = with_ssl if verify_ssl else without_ssl
|
||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||
return opener.open(fullurl, data, timeout)
|
||||
|
||||
return dispatch_open
|
||||
|
||||
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||
|
||||
|
||||
def copy_files_to_artifacts(src, artifacts_dir):
|
||||
"""
|
||||
Copy file(s) to the given artifacts directory
|
||||
|
||||
Parameters:
|
||||
src (str): the glob-friendly path expression for the file(s) to copy
|
||||
artifacts_dir (str): the destination directory
|
||||
"""
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
msg = (
|
||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||
f"exception: {str(err)}"
|
||||
)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
if IS_WINDOWS:
|
||||
quote_str = f'"{quote_str}"'
|
||||
return quote_str
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
return spec.intersects(match_string)
|
||||
|
||||
|
||||
def _noop(x):
|
||||
return x
|
||||
|
||||
|
||||
def unpack_script(script_section, op=_noop):
|
||||
script = []
|
||||
for cmd in script_section:
|
||||
if isinstance(cmd, list):
|
||||
for subcmd in cmd:
|
||||
script.append(op(subcmd))
|
||||
else:
|
||||
script.append(op(cmd))
|
||||
|
||||
return script
|
||||
|
||||
|
||||
def ensure_expected_target_path(path: str) -> str:
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
for posix separators
|
||||
|
||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix style paths
|
||||
"""
|
||||
if path:
|
||||
return path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def update_env_scopes(
|
||||
env: ev.Environment,
|
||||
cli_scopes: List[str],
|
||||
output_file: str,
|
||||
transform_windows_paths: bool = False,
|
||||
) -> None:
|
||||
"""Add any config scopes from cli_scopes which aren't already included in the
|
||||
environment, by reading the yaml, adding the missing includes, and writing the
|
||||
updated yaml back to the same location.
|
||||
"""
|
||||
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
|
||||
env_yaml_root = syaml.load(env_fd)
|
||||
|
||||
# Add config scopes to environment
|
||||
env_includes = env_yaml_root["spack"].get("include", [])
|
||||
include_scopes: List[str] = []
|
||||
for scope in cli_scopes:
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = [
|
||||
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
|
||||
]
|
||||
|
||||
with open(output_file, "w", encoding="utf-8") as fd:
|
||||
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
|
||||
|
||||
|
||||
def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||
"""Write out the file describing specs that should be copied"""
|
||||
buildcache_copies = {}
|
||||
|
||||
for release_spec in specs:
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
# TODO: This assumes signed version of the spec
|
||||
buildcache_copies[release_spec_dag_hash] = [
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": url_util.join(
|
||||
src_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
"dest": url_util.join(
|
||||
dest_prefix,
|
||||
bindist.build_cache_relative_path(),
|
||||
bindist.tarball_path_name(release_spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
target_dir = os.path.dirname(output_file)
|
||||
|
||||
if not os.path.exists(target_dir):
|
||||
os.makedirs(target_dir)
|
||||
|
||||
with open(output_file, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
|
||||
class CDashHandler:
|
||||
"""
|
||||
Class for managing CDash data and processing.
|
||||
"""
|
||||
|
||||
def __init__(self, ci_cdash):
|
||||
# start with the gitlab ci configuration
|
||||
self.url = ci_cdash.get("url")
|
||||
self.build_group = ci_cdash.get("build-group")
|
||||
self.project = ci_cdash.get("project")
|
||||
self.site = ci_cdash.get("site")
|
||||
|
||||
# grab the authorization token when available
|
||||
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
||||
if self.auth_token:
|
||||
tty.verbose("Using CDash auth token from environment")
|
||||
|
||||
# append runner description to the site if available
|
||||
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||
if runner:
|
||||
self.site += f" ({runner})"
|
||||
|
||||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
win_quote(self.upload_url),
|
||||
"--cdash-build",
|
||||
win_quote(self.build_name()),
|
||||
"--cdash-site",
|
||||
win_quote(self.site),
|
||||
"--cdash-buildstamp",
|
||||
win_quote(self.build_stamp),
|
||||
]
|
||||
|
||||
def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
|
||||
"""Returns the CDash build name.
|
||||
|
||||
A name will be generated if the `spec` is provided,
|
||||
otherwise, the value will be retrieved from the environment
|
||||
through the `SPACK_CDASH_BUILD_NAME` variable.
|
||||
|
||||
Returns: (str) given spec's CDash build name."""
|
||||
if spec:
|
||||
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
||||
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||
return build_name
|
||||
|
||||
env_build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||
tty.debug(f"Using CDash build name ({env_build_name}) from the environment")
|
||||
return env_build_name
|
||||
|
||||
@property # type: ignore
|
||||
def build_stamp(self):
|
||||
"""Returns the CDash build stamp.
|
||||
|
||||
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
|
||||
is preferred due to the representation of timestamps; otherwise,
|
||||
one will be built.
|
||||
|
||||
Returns: (str) current CDash build stamp"""
|
||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||
if build_stamp:
|
||||
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
|
||||
return build_stamp
|
||||
|
||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||
tty.debug(f"Generated new build stamp ({build_stamp})")
|
||||
return build_stamp
|
||||
|
||||
@property # type: ignore
|
||||
@memoized
|
||||
def project_enc(self):
|
||||
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
|
||||
encode = urlencode({"project": self.project})
|
||||
index = encode.find("=") + 1
|
||||
return encode[index:]
|
||||
|
||||
@property
|
||||
def upload_url(self):
|
||||
url_format = f"{self.url}/submit.php?project={self.project_enc}"
|
||||
return url_format
|
||||
|
||||
def copy_test_results(self, source, dest):
|
||||
"""Copy test results to artifacts directory."""
|
||||
reports = fs.join_path(source, "*_Test*.xml")
|
||||
copy_files_to_artifacts(reports, dest)
|
||||
|
||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||
tty.warn(msg)
|
||||
return None
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_group_id = response_json["id"]
|
||||
|
||||
return build_group_id
|
||||
|
||||
def populate_buildgroup(self, job_names):
|
||||
url = f"{self.url}/api/v1/buildgroup.php"
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.auth_token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||
tty.warn(msg)
|
||||
return
|
||||
|
||||
data = {
|
||||
"dynamiclist": [
|
||||
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||
for name in job_names
|
||||
]
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||
tty.warn(msg)
|
||||
|
||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||
configuration identifies it as known to have broken tests or
|
||||
the CI installation failed).
|
||||
|
||||
Args:
|
||||
spec: spec being tested
|
||||
report_dir: directory where the report will be written
|
||||
reason: reason the test is being skipped
|
||||
"""
|
||||
configuration = CDashConfiguration(
|
||||
upload_url=self.upload_url,
|
||||
packages=[spec.name],
|
||||
build=self.build_name(),
|
||||
site=self.site,
|
||||
buildstamp=self.build_stamp,
|
||||
track=None,
|
||||
)
|
||||
reporter = CDash(configuration=configuration)
|
||||
reporter.test_skipped_report(report_dir, spec, reason)
|
||||
|
||||
|
||||
class PipelineType(Enum):
|
||||
COPY_ONLY = 1
|
||||
spack_copy_only = 1
|
||||
PROTECTED_BRANCH = 2
|
||||
spack_protected_branch = 2
|
||||
PULL_REQUEST = 3
|
||||
spack_pull_request = 3
|
||||
|
||||
|
||||
class PipelineOptions:
|
||||
"""A container for all pipeline options that can be specified (whether
|
||||
via cli, config/yaml, or environment variables)"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
env: ev.Environment,
|
||||
buildcache_destination: spack.mirrors.mirror.Mirror,
|
||||
artifacts_root: str = "jobs_scratch_dir",
|
||||
print_summary: bool = True,
|
||||
output_file: Optional[str] = None,
|
||||
check_index_only: bool = False,
|
||||
broken_specs_url: Optional[str] = None,
|
||||
rebuild_index: bool = True,
|
||||
untouched_pruning_dependent_depth: Optional[int] = None,
|
||||
prune_untouched: bool = False,
|
||||
prune_up_to_date: bool = True,
|
||||
prune_external: bool = True,
|
||||
stack_name: Optional[str] = None,
|
||||
pipeline_type: Optional[PipelineType] = None,
|
||||
require_signing: bool = False,
|
||||
cdash_handler: Optional["CDashHandler"] = None,
|
||||
):
|
||||
"""
|
||||
Args:
|
||||
env: Active spack environment
|
||||
buildcache_destination: The mirror where built binaries should be pushed
|
||||
artifacts_root: Path to location where artifacts should be stored
|
||||
print_summary: Print a summary of the scheduled pipeline
|
||||
output_file: Path where output file should be written
|
||||
check_index_only: Only fetch the index or fetch all spec files
|
||||
broken_specs_url: URL where broken specs (on develop) should be reported
|
||||
rebuild_index: Generate a job to rebuild mirror index after rebuilds
|
||||
untouched_pruning_dependent_depth: How many parents to traverse from changed pkg specs
|
||||
prune_untouched: Prune jobs for specs that were unchanged in git history
|
||||
prune_up_to_date: Prune specs from pipeline if binary exists on the mirror
|
||||
prune_external: Prune specs from pipeline if they are external
|
||||
stack_name: Name of spack stack
|
||||
pipeline_type: Type of pipeline running (optional)
|
||||
require_signing: Require buildcache to be signed (fail w/out signing key)
|
||||
cdash_handler: Object for communicating build information with CDash
|
||||
"""
|
||||
self.env = env
|
||||
self.buildcache_destination = buildcache_destination
|
||||
self.artifacts_root = artifacts_root
|
||||
self.print_summary = print_summary
|
||||
self.output_file = output_file
|
||||
self.check_index_only = check_index_only
|
||||
self.broken_specs_url = broken_specs_url
|
||||
self.rebuild_index = rebuild_index
|
||||
self.untouched_pruning_dependent_depth = untouched_pruning_dependent_depth
|
||||
self.prune_untouched = prune_untouched
|
||||
self.prune_up_to_date = prune_up_to_date
|
||||
self.prune_external = prune_external
|
||||
self.stack_name = stack_name
|
||||
self.pipeline_type = pipeline_type
|
||||
self.require_signing = require_signing
|
||||
self.cdash_handler = cdash_handler
|
||||
|
||||
|
||||
class PipelineNode:
|
||||
spec: spack.spec.Spec
|
||||
parents: Set[str]
|
||||
children: Set[str]
|
||||
|
||||
def __init__(self, spec: spack.spec.Spec):
|
||||
self.spec = spec
|
||||
self.parents = set()
|
||||
self.children = set()
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
"""Return key of the stored spec"""
|
||||
return PipelineDag.key(self.spec)
|
||||
|
||||
|
||||
class PipelineDag:
|
||||
"""Turn a list of specs into a simple directed graph, that doesn't keep track
|
||||
of edge types."""
|
||||
|
||||
@classmethod
|
||||
def key(cls, spec: spack.spec.Spec) -> str:
|
||||
return spec.dag_hash()
|
||||
|
||||
def __init__(self, specs: List[spack.spec.Spec]) -> None:
|
||||
# Build dictionary of nodes
|
||||
self.nodes: Dict[str, PipelineNode] = {
|
||||
PipelineDag.key(s): PipelineNode(s)
|
||||
for s in traverse.traverse_nodes(specs, deptype=dt.ALL_TYPES, root=True)
|
||||
}
|
||||
|
||||
# Create edges
|
||||
for edge in traverse.traverse_edges(
|
||||
specs, deptype=dt.ALL_TYPES, root=False, cover="edges"
|
||||
):
|
||||
parent_key = PipelineDag.key(edge.parent)
|
||||
child_key = PipelineDag.key(edge.spec)
|
||||
|
||||
self.nodes[parent_key].children.add(child_key)
|
||||
self.nodes[child_key].parents.add(parent_key)
|
||||
|
||||
def prune(self, node_key: str):
|
||||
"""Remove a node from the graph, and reconnect its parents and children"""
|
||||
node = self.nodes[node_key]
|
||||
for parent in node.parents:
|
||||
self.nodes[parent].children.remove(node_key)
|
||||
self.nodes[parent].children |= node.children
|
||||
for child in node.children:
|
||||
self.nodes[child].parents.remove(node_key)
|
||||
self.nodes[child].parents |= node.parents
|
||||
del self.nodes[node_key]
|
||||
|
||||
def traverse_nodes(
|
||||
self, direction: str = "children"
|
||||
) -> Generator[Tuple[int, PipelineNode], None, None]:
|
||||
"""Yields (depth, node) from the pipeline graph. Traversal is topologically
|
||||
ordered from the roots if ``direction`` is ``children``, or from the leaves
|
||||
if ``direction`` is ``parents``. The yielded depth is the length of the
|
||||
longest path from the starting point to the yielded node."""
|
||||
if direction == "children":
|
||||
get_in_edges = lambda node: node.parents
|
||||
get_out_edges = lambda node: node.children
|
||||
else:
|
||||
get_in_edges = lambda node: node.children
|
||||
get_out_edges = lambda node: node.parents
|
||||
|
||||
sort_key = lambda k: self.nodes[k].spec.name
|
||||
|
||||
out_edges = {k: sorted(get_out_edges(n), key=sort_key) for k, n in self.nodes.items()}
|
||||
num_in_edges = {k: len(get_in_edges(n)) for k, n in self.nodes.items()}
|
||||
|
||||
# Populate a queue with all the nodes that have no incoming edges
|
||||
nodes = deque(
|
||||
sorted(
|
||||
[(0, key) for key in self.nodes.keys() if num_in_edges[key] == 0],
|
||||
key=lambda item: item[1],
|
||||
)
|
||||
)
|
||||
|
||||
while nodes:
|
||||
# Remove the next node, n, from the queue and yield it
|
||||
depth, n_key = nodes.pop()
|
||||
yield (depth, self.nodes[n_key])
|
||||
|
||||
# Remove an in-edge from every node, m, pointed to by an
|
||||
# out-edge from n. If any of those nodes are left with
|
||||
# 0 remaining in-edges, add them to the queue.
|
||||
for m in out_edges[n_key]:
|
||||
num_in_edges[m] -= 1
|
||||
if num_in_edges[m] == 0:
|
||||
nodes.appendleft((depth + 1, m))
|
||||
|
||||
def get_dependencies(self, node: PipelineNode) -> List[PipelineNode]:
|
||||
"""Returns a list of nodes corresponding to the direct dependencies
|
||||
of the given node."""
|
||||
return [self.nodes[k] for k in node.children]
|
||||
|
||||
|
||||
class SpackCIConfig:
|
||||
"""Spack CI object used to generate intermediate representation
|
||||
used by the CI generator(s).
|
||||
"""
|
||||
|
||||
def __init__(self, ci_config):
|
||||
"""Given the information from the ci section of the config
|
||||
and the staged jobs, set up meta data needed for generating Spack
|
||||
CI IR.
|
||||
"""
|
||||
|
||||
self.ci_config = ci_config
|
||||
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
|
||||
|
||||
self.ir = {
|
||||
"jobs": {},
|
||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||
"target": self.ci_config.get("target", "gitlab"),
|
||||
}
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
for name in self.named_jobs:
|
||||
# Skip the special named jobs
|
||||
if name not in ["any", "build"]:
|
||||
jobs[name] = self.__init_job("")
|
||||
|
||||
def __init_job(self, release_spec):
|
||||
"""Initialize job object"""
|
||||
job_object = {"spec": release_spec, "attributes": {}}
|
||||
if release_spec:
|
||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
return job_object
|
||||
|
||||
def __is_named(self, section):
|
||||
"""Check if a pipeline-gen configuration section is for a named job,
|
||||
and if so return the name otherwise return none.
|
||||
"""
|
||||
for _name in self.named_jobs:
|
||||
keys = [f"{_name}-job", f"{_name}-job-remove"]
|
||||
if any([key for key in keys if key in section]):
|
||||
return _name
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
jname = f"{name}-job{suffix}"
|
||||
else:
|
||||
jname = f"{name}-job"
|
||||
|
||||
return jname
|
||||
|
||||
def __apply_submapping(self, dest, spec, section):
|
||||
"""Apply submapping setion to the IR dict"""
|
||||
matched = False
|
||||
only_first = section.get("match_behavior", "first") == "first"
|
||||
|
||||
for match_attrs in reversed(section["submapping"]):
|
||||
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
|
||||
for match_string in match_attrs["match"]:
|
||||
if _spec_matches(spec, match_string):
|
||||
matched = True
|
||||
if "build-job-remove" in match_attrs:
|
||||
spack.config.remove_yaml(dest, attrs["build-job-remove"])
|
||||
if "build-job" in match_attrs:
|
||||
spack.schema.merge_yaml(dest, attrs["build-job"])
|
||||
break
|
||||
if matched and only_first:
|
||||
break
|
||||
|
||||
return dest
|
||||
|
||||
# Create jobs for all the pipeline specs
|
||||
def init_pipeline_jobs(self, pipeline: PipelineDag):
|
||||
for _, node in pipeline.traverse_nodes():
|
||||
dag_hash = node.spec.dag_hash()
|
||||
self.ir["jobs"][dag_hash] = self.__init_job(node.spec)
|
||||
|
||||
# Generate IR from the configs
|
||||
def generate_ir(self):
|
||||
"""Generate the IR from the Spack CI configurations."""
|
||||
|
||||
jobs = self.ir["jobs"]
|
||||
|
||||
# Implicit job defaults
|
||||
defaults = [
|
||||
{
|
||||
"build-job": {
|
||||
"script": [
|
||||
"cd {env_dir}",
|
||||
"spack env activate --without-view .",
|
||||
"spack ci rebuild",
|
||||
]
|
||||
}
|
||||
},
|
||||
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
|
||||
]
|
||||
|
||||
# Job overrides
|
||||
overrides = [
|
||||
# Reindex script
|
||||
{
|
||||
"reindex-job": {
|
||||
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
|
||||
}
|
||||
},
|
||||
# Cleanup script
|
||||
{
|
||||
"cleanup-job": {
|
||||
"script:": ["spack -d mirror destroy {mirror_prefix}/$CI_PIPELINE_ID"]
|
||||
}
|
||||
},
|
||||
# Add signing job tags
|
||||
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
|
||||
# Remove reserved tags
|
||||
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
|
||||
]
|
||||
|
||||
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
|
||||
|
||||
for section in reversed(pipeline_gen):
|
||||
name = self.__is_named(section)
|
||||
has_submapping = "submapping" in section
|
||||
has_dynmapping = "dynamic-mapping" in section
|
||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||
|
||||
if name:
|
||||
remove_job_name = self.__job_name(name, suffix="-remove")
|
||||
merge_job_name = self.__job_name(name)
|
||||
do_remove = remove_job_name in section
|
||||
do_merge = merge_job_name in section
|
||||
|
||||
def _apply_section(dest, src):
|
||||
if do_remove:
|
||||
dest = spack.config.remove_yaml(dest, src[remove_job_name])
|
||||
if do_merge:
|
||||
dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
|
||||
|
||||
if name == "build":
|
||||
# Apply attributes to all build jobs
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
_apply_section(job["attributes"], section)
|
||||
elif name == "any":
|
||||
# Apply section attributes too all jobs
|
||||
for _, job in jobs.items():
|
||||
_apply_section(job["attributes"], section)
|
||||
else:
|
||||
# Create a signing job if there is script and the job hasn't
|
||||
# been initialized yet
|
||||
if name == "signing" and name not in jobs:
|
||||
if "signing-job" in section:
|
||||
if "script" not in section["signing-job"]:
|
||||
continue
|
||||
else:
|
||||
jobs[name] = self.__init_job("")
|
||||
# Apply attributes to named job
|
||||
_apply_section(jobs[name]["attributes"], section)
|
||||
|
||||
elif has_submapping:
|
||||
# Apply section jobs with specs to match
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["attributes"] = self.__apply_submapping(
|
||||
job["attributes"], job["spec"], section
|
||||
)
|
||||
elif has_dynmapping:
|
||||
mapping = section["dynamic-mapping"]
|
||||
|
||||
dynmap_name = mapping.get("name")
|
||||
|
||||
# Check if this section should be skipped
|
||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||
if dynmap_name and dynmap_skip:
|
||||
if re.match(dynmap_skip, dynmap_name):
|
||||
continue
|
||||
|
||||
# Get the endpoint
|
||||
endpoint = mapping["endpoint"]
|
||||
endpoint_url = urlparse(endpoint)
|
||||
|
||||
# Configure the request header
|
||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
header.update(mapping.get("header", {}))
|
||||
|
||||
# Expand header environment variables
|
||||
# ie. if tokens are passed
|
||||
for value in header.values():
|
||||
value = os.path.expandvars(value)
|
||||
|
||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||
|
||||
required = mapping.get("require", [])
|
||||
allowed = mapping.get("allow", [])
|
||||
ignored = mapping.get("ignore", [])
|
||||
|
||||
# required keys are implicitly allowed
|
||||
allowed = sorted(set(allowed + required))
|
||||
ignored = sorted(set(ignored))
|
||||
required = sorted(set(required))
|
||||
|
||||
# Make sure required things are not also ignored
|
||||
assert not any([ikey in required for ikey in ignored])
|
||||
|
||||
def job_query(job):
|
||||
job_vars = job["attributes"]["variables"]
|
||||
query = (
|
||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||
).format_map(job_vars)
|
||||
return f"spec={quote(query)}"
|
||||
|
||||
for job in jobs.values():
|
||||
if not job["spec"]:
|
||||
continue
|
||||
|
||||
# Create request for this job
|
||||
query = job_query(job)
|
||||
request = Request(
|
||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||
)
|
||||
try:
|
||||
response = _dyn_mapping_urlopener(
|
||||
request, verify_ssl=verify_ssl, timeout=timeout
|
||||
)
|
||||
except Exception as e:
|
||||
# For now just ignore any errors from dynamic mapping and continue
|
||||
# This is still experimental, and failures should not stop CI
|
||||
# from running normally
|
||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||
tty.warn(f"{e}")
|
||||
continue
|
||||
|
||||
config = json.load(codecs.getreader("utf-8")(response))
|
||||
|
||||
# Strip ignore keys
|
||||
if ignored:
|
||||
for key in ignored:
|
||||
if key in config:
|
||||
config.pop(key)
|
||||
|
||||
# Only keep allowed keys
|
||||
clean_config = {}
|
||||
if allowed:
|
||||
for key in allowed:
|
||||
if key in config:
|
||||
clean_config[key] = config[key]
|
||||
else:
|
||||
clean_config = config
|
||||
|
||||
# Verify all of the required keys are present
|
||||
if required:
|
||||
missing_keys = []
|
||||
for key in required:
|
||||
if key not in clean_config.keys():
|
||||
missing_keys.append(key)
|
||||
|
||||
if missing_keys:
|
||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||
|
||||
if clean_config:
|
||||
job["attributes"] = spack.schema.merge_yaml(
|
||||
job.get("attributes", {}), clean_config
|
||||
)
|
||||
|
||||
for _, job in jobs.items():
|
||||
if job["spec"]:
|
||||
job["spec"] = job["spec"].name
|
||||
|
||||
return self.ir
|
||||
|
||||
|
||||
class SpackCIError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
36
lib/spack/spack/ci/generator_registry.py
Normal file
36
lib/spack/spack/ci/generator_registry.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# Holds all known formatters
|
||||
"""Generators that support writing out pipelines for various CI platforms,
|
||||
using a common pipeline graph definition.
|
||||
"""
|
||||
import spack.error
|
||||
|
||||
_generators = {}
|
||||
|
||||
|
||||
def generator(name):
|
||||
"""Decorator to register a pipeline generator method.
|
||||
A generator method should take PipelineDag, SpackCIConfig, and
|
||||
PipelineOptions arguments, and should produce a pipeline file.
|
||||
"""
|
||||
|
||||
def _decorator(generate_method):
|
||||
_generators[name] = generate_method
|
||||
return generate_method
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
def get_generator(name):
|
||||
try:
|
||||
return _generators[name]
|
||||
except KeyError:
|
||||
raise UnknownGeneratorException(name)
|
||||
|
||||
|
||||
class UnknownGeneratorException(spack.error.SpackError):
|
||||
def __init__(self, generator_name):
|
||||
super().__init__(f"No registered generator for {generator_name}")
|
416
lib/spack/spack/ci/gitlab.py
Normal file
416
lib/spack/spack/ci/gitlab.py
Normal file
@@ -0,0 +1,416 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import copy
|
||||
import os
|
||||
import shutil
|
||||
from typing import List, Optional
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config as cfg
|
||||
import spack.mirrors.mirror
|
||||
import spack.schema
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from .common import (
|
||||
SPACK_RESERVED_TAGS,
|
||||
PipelineDag,
|
||||
PipelineOptions,
|
||||
PipelineType,
|
||||
SpackCIConfig,
|
||||
SpackCIError,
|
||||
ensure_expected_target_path,
|
||||
unpack_script,
|
||||
update_env_scopes,
|
||||
write_pipeline_manifest,
|
||||
)
|
||||
from .generator_registry import generator
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
# "always",
|
||||
"unknown_failure",
|
||||
"script_failure",
|
||||
"api_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"runner_system_failure",
|
||||
"runner_unsupported",
|
||||
"stale_schedule",
|
||||
# "job_execution_timeout",
|
||||
"archived_failure",
|
||||
"unmet_prerequisites",
|
||||
"scheduler_failure",
|
||||
"data_integrity_failure",
|
||||
]
|
||||
JOB_NAME_FORMAT = "{name}{@version} {/hash}"
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def get_job_name(spec: spack.spec.Spec, build_group: Optional[str] = None) -> str:
|
||||
"""Given a spec and possibly a build group, return the job name. If the
|
||||
resulting name is longer than 255 characters, it will be truncated.
|
||||
|
||||
Arguments:
|
||||
spec: Spec job will build
|
||||
build_group: Name of build group this job belongs to (a CDash notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
job_name = spec.format(JOB_NAME_FORMAT)
|
||||
|
||||
if build_group:
|
||||
job_name = f"{job_name} {build_group}"
|
||||
|
||||
return job_name[:255]
|
||||
|
||||
|
||||
def maybe_generate_manifest(pipeline: PipelineDag, options: PipelineOptions, manifest_path):
|
||||
# TODO: Consider including only hashes of rebuilt specs in the manifest,
|
||||
# instead of full source and destination urls. Also, consider renaming
|
||||
# the variable that controls whether or not to write the manifest from
|
||||
# "SPACK_COPY_BUILDCACHE" to "SPACK_WRITE_PIPELINE_MANIFEST" or similar.
|
||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||
if spack_buildcache_copy:
|
||||
buildcache_copy_src_prefix = options.buildcache_destination.fetch_url
|
||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||
|
||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
manifest_specs = [s for s in options.env.all_specs() if not s.external]
|
||||
else:
|
||||
manifest_specs = [n.spec for _, n in pipeline.traverse_nodes(direction="children")]
|
||||
|
||||
write_pipeline_manifest(
|
||||
manifest_specs, buildcache_copy_src_prefix, buildcache_copy_dest_prefix, manifest_path
|
||||
)
|
||||
|
||||
|
||||
@generator("gitlab")
|
||||
def generate_gitlab_yaml(pipeline: PipelineDag, spack_ci: SpackCIConfig, options: PipelineOptions):
|
||||
"""Given a pipeline graph, job attributes, and pipeline options,
|
||||
write a pipeline that can be consumed by GitLab to the given output file.
|
||||
|
||||
Arguments:
|
||||
pipeline: An already pruned graph of jobs representing all the specs to build
|
||||
spack_ci: An object containing the configured attributes of all jobs in the pipeline
|
||||
options: An object containing all the pipeline options gathered from yaml, env, etc...
|
||||
"""
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR") or os.getcwd()
|
||||
generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
|
||||
generate_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
|
||||
artifacts_root = options.artifacts_root
|
||||
if artifacts_root.startswith(ci_project_dir):
|
||||
artifacts_root = os.path.relpath(artifacts_root, ci_project_dir)
|
||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, artifacts_root)
|
||||
output_file = options.output_file
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
else:
|
||||
output_file_path = os.path.abspath(output_file)
|
||||
gen_ci_dir = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(gen_ci_dir):
|
||||
os.makedirs(gen_ci_dir)
|
||||
|
||||
spack_ci_ir = spack_ci.generate_ir()
|
||||
|
||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||
|
||||
# Now that we've added the mirrors we know about, they should be properly
|
||||
# reflected in the environment manifest file, so copy that into the
|
||||
# concrete environment directory, along with the spack.lock file.
|
||||
if not os.path.exists(concrete_env_dir):
|
||||
os.makedirs(concrete_env_dir)
|
||||
shutil.copyfile(options.env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||
shutil.copyfile(options.env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
|
||||
|
||||
update_env_scopes(
|
||||
options.env,
|
||||
[
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if not s.writable
|
||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||
and os.path.exists(s.path)
|
||||
],
|
||||
os.path.join(concrete_env_dir, "spack.yaml"),
|
||||
# Here transforming windows paths is only required in the special case
|
||||
# of copy_only_pipelines, a unique scenario where the generate job and
|
||||
# child pipelines are run on different platforms. To make this compatible
|
||||
# w/ Windows, we cannot write Windows style path separators that will be
|
||||
# consumed on by the Posix copy job runner.
|
||||
#
|
||||
# TODO (johnwparent): Refactor config + cli read/write to deal only in
|
||||
# posix style paths
|
||||
transform_windows_paths=(options.pipeline_type == PipelineType.COPY_ONLY),
|
||||
)
|
||||
|
||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||
|
||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||
# situations where the CI_PROJECT_DIR varies between the pipeline
|
||||
# generation job and the rebuild jobs. This can happen when gitlab
|
||||
# checks out the project into a runner-specific directory, for example,
|
||||
# and different runners are picked for generate and rebuild jobs.
|
||||
|
||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||
|
||||
def main_script_replacements(cmd):
|
||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||
|
||||
output_object = {}
|
||||
job_id = 0
|
||||
stage_id = 0
|
||||
stages: List[List] = []
|
||||
stage_names = []
|
||||
|
||||
max_length_needs = 0
|
||||
max_needs_job = ""
|
||||
|
||||
if not options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
for level, node in pipeline.traverse_nodes(direction="parents"):
|
||||
stage_id = level
|
||||
if len(stages) == stage_id:
|
||||
stages.append([])
|
||||
stages[stage_id].append(node.spec)
|
||||
stage_name = f"stage-{level}"
|
||||
|
||||
if stage_name not in stage_names:
|
||||
stage_names.append(stage_name)
|
||||
|
||||
release_spec = node.spec
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
|
||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||
|
||||
if not job_object:
|
||||
tty.warn(f"No match found for {release_spec}, skipping it")
|
||||
continue
|
||||
|
||||
if options.pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||
if options.pipeline_type == PipelineType.PROTECTED_BRANCH:
|
||||
job_object["tags"].extend(["protected"])
|
||||
elif options.pipeline_type == PipelineType.PULL_REQUEST:
|
||||
job_object["tags"].extend(["public"])
|
||||
|
||||
if "script" not in job_object:
|
||||
raise AttributeError
|
||||
|
||||
job_object["script"] = unpack_script(job_object["script"], op=main_script_replacements)
|
||||
|
||||
if "before_script" in job_object:
|
||||
job_object["before_script"] = unpack_script(job_object["before_script"])
|
||||
|
||||
if "after_script" in job_object:
|
||||
job_object["after_script"] = unpack_script(job_object["after_script"])
|
||||
|
||||
build_group = options.cdash_handler.build_group if options.cdash_handler else None
|
||||
job_name = get_job_name(release_spec, build_group)
|
||||
|
||||
dep_nodes = pipeline.get_dependencies(node)
|
||||
job_object["needs"] = [
|
||||
{"job": get_job_name(dep_node.spec, build_group), "artifacts": False}
|
||||
for dep_node in dep_nodes
|
||||
]
|
||||
|
||||
job_object["needs"].append(
|
||||
{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}
|
||||
)
|
||||
|
||||
job_vars = job_object["variables"]
|
||||
|
||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||
# whether DAG pruning was enabled or not.
|
||||
already_built = bindist.get_mirrors_for_spec(spec=release_spec, index_only=True)
|
||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = "False" if already_built else "True"
|
||||
|
||||
if options.cdash_handler:
|
||||
build_name = options.cdash_handler.build_name(release_spec)
|
||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||
build_stamp = options.cdash_handler.build_stamp
|
||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||
|
||||
job_object["artifacts"] = spack.schema.merge_yaml(
|
||||
job_object.get("artifacts", {}),
|
||||
{
|
||||
"when": "always",
|
||||
"paths": [
|
||||
rel_job_log_dir,
|
||||
rel_job_repro_dir,
|
||||
rel_job_test_dir,
|
||||
rel_user_artifacts_dir,
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
job_object["stage"] = stage_name
|
||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||
job_object["interruptible"] = True
|
||||
|
||||
length_needs = len(job_object["needs"])
|
||||
if length_needs > max_length_needs:
|
||||
max_length_needs = length_needs
|
||||
max_needs_job = job_name
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||
|
||||
if job_id > 0:
|
||||
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
|
||||
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
}
|
||||
|
||||
# In some cases, pipeline generation should write a manifest. Currently
|
||||
# the only purpose is to specify a list of sources and destinations for
|
||||
# everything that should be copied.
|
||||
distinguish_stack = options.stack_name if options.stack_name else "rebuilt"
|
||||
manifest_path = os.path.join(
|
||||
pipeline_artifacts_dir, "specs_to_copy", f"copy_{distinguish_stack}_specs.json"
|
||||
)
|
||||
maybe_generate_manifest(pipeline, options, manifest_path)
|
||||
|
||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||
stage_names.append("copy")
|
||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||
sync_job["stage"] = "copy"
|
||||
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}]
|
||||
|
||||
if "variables" not in sync_job:
|
||||
sync_job["variables"] = {}
|
||||
|
||||
sync_job["variables"][
|
||||
"SPACK_COPY_ONLY_DESTINATION"
|
||||
] = options.buildcache_destination.fetch_url
|
||||
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
if "buildcache-source" not in pipeline_mirrors:
|
||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||
|
||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||
sync_job["dependencies"] = []
|
||||
|
||||
output_object["copy"] = sync_job
|
||||
job_id += 1
|
||||
|
||||
if job_id > 0:
|
||||
if (
|
||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
and options.pipeline_type == PipelineType.PROTECTED_BRANCH
|
||||
):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append("stage-sign-pkgs")
|
||||
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||
|
||||
signing_job["script"] = unpack_script(signing_job["script"])
|
||||
|
||||
signing_job["stage"] = "stage-sign-pkgs"
|
||||
signing_job["when"] = "always"
|
||||
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
||||
signing_job["interruptible"] = True
|
||||
if "variables" not in signing_job:
|
||||
signing_job["variables"] = {}
|
||||
signing_job["variables"][
|
||||
"SPACK_BUILDCACHE_DESTINATION"
|
||||
] = options.buildcache_destination.push_url
|
||||
signing_job["dependencies"] = []
|
||||
|
||||
output_object["sign-pkgs"] = signing_job
|
||||
|
||||
if options.rebuild_index:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append("stage-rebuild-index")
|
||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||
|
||||
final_job["stage"] = "stage-rebuild-index"
|
||||
target_mirror = options.buildcache_destination.push_url
|
||||
final_job["script"] = unpack_script(
|
||||
final_job["script"],
|
||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||
)
|
||||
|
||||
final_job["when"] = "always"
|
||||
final_job["retry"] = service_job_retries
|
||||
final_job["interruptible"] = True
|
||||
final_job["dependencies"] = []
|
||||
|
||||
output_object["rebuild-index"] = final_job
|
||||
|
||||
output_object["stages"] = stage_names
|
||||
|
||||
# Capture the version of Spack used to generate the pipeline, that can be
|
||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||
# repository, presume we are a Spack release and use the Git tag instead.
|
||||
spack_version = spack.get_version()
|
||||
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
||||
|
||||
rebuild_everything = not options.prune_up_to_date and not options.prune_untouched
|
||||
|
||||
output_object["variables"] = {
|
||||
"SPACK_ARTIFACTS_ROOT": artifacts_root,
|
||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||
"SPACK_VERSION": spack_version,
|
||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||
"SPACK_PIPELINE_TYPE": options.pipeline_type.name if options.pipeline_type else "None",
|
||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(options.prune_up_to_date),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": str(options.require_signing),
|
||||
}
|
||||
|
||||
if options.stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = options.stack_name
|
||||
|
||||
output_vars = output_object["variables"]
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
# If this job fails ignore the status and carry on
|
||||
noop_job["retry"] = 0
|
||||
noop_job["allow_failure"] = True
|
||||
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w", encoding="utf-8") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
@@ -4,12 +4,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import difflib
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import List, Union
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
@@ -23,15 +24,18 @@
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.spec_parser
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
|
||||
@@ -122,6 +126,8 @@ def get_module(cmd_name):
|
||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||
except ImportError:
|
||||
module = spack.extensions.get_module(cmd_name)
|
||||
if not module:
|
||||
raise CommandNotFoundError(cmd_name)
|
||||
|
||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||
attr_setdefault(module, DESCRIPTION, "")
|
||||
@@ -157,12 +163,12 @@ def quote_kvp(string: str) -> str:
|
||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||
"""
|
||||
match = spack.parser.SPLIT_KVP.match(string)
|
||||
match = spack.spec_parser.SPLIT_KVP.match(string)
|
||||
if not match:
|
||||
return string
|
||||
|
||||
key, delim, value = match.groups()
|
||||
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
|
||||
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
|
||||
|
||||
|
||||
def parse_specs(
|
||||
@@ -174,7 +180,7 @@ def parse_specs(
|
||||
args = [args] if isinstance(args, str) else args
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
specs = spack.spec_parser.parse(arg_string)
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
@@ -266,39 +272,48 @@ def matching_specs_from_env(specs):
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
def disambiguate_spec(
|
||||
spec: spack.spec.Spec,
|
||||
env: Optional[ev.Environment],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
env (spack.environment.Environment): a spack environment,
|
||||
if one is active, or None if no environment is active
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
Args:
|
||||
spec: a spec to disambiguate
|
||||
env: a spack environment, if one is active, or None if no environment is active
|
||||
local: do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||
|
||||
|
||||
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
|
||||
def disambiguate_spec_from_hashes(
|
||||
spec: spack.spec.Spec,
|
||||
hashes: List[str],
|
||||
local: bool = False,
|
||||
installed: Union[bool, InstallRecordStatus] = True,
|
||||
first: bool = False,
|
||||
) -> spack.spec.Spec:
|
||||
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
||||
|
||||
Arguments:
|
||||
spec (spack.spec.Spec): a spec to disambiguate
|
||||
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||
local (bool): do not search chained spack instances
|
||||
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||
install status argument passed to database query.
|
||||
See ``spack.database.Database._query`` for details.
|
||||
spec: a spec to disambiguate
|
||||
hashes: a set of hashes of specs among which to disambiguate
|
||||
local: if True, do not search chained spack instances
|
||||
installed: install status argument passed to database query.
|
||||
first: returns the first matching spec, even if more than one match is found
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
tty.die(f"Spec '{spec}' matches no installed packages.")
|
||||
|
||||
elif first:
|
||||
return matching_specs[0]
|
||||
@@ -679,3 +694,24 @@ def find_environment(args):
|
||||
def first_line(docstring):
|
||||
"""Return the first line of the docstring."""
|
||||
return docstring.split("\n")[0]
|
||||
|
||||
|
||||
class CommandNotFoundError(spack.error.SpackError):
|
||||
"""Exception class thrown when a requested command is not recognized as
|
||||
such.
|
||||
"""
|
||||
|
||||
def __init__(self, cmd_name):
|
||||
msg = (
|
||||
f"{cmd_name} is not a recognized Spack command or extension command; "
|
||||
"check with `spack commands`."
|
||||
)
|
||||
long_msg = None
|
||||
|
||||
similar = difflib.get_close_matches(cmd_name, all_commands())
|
||||
|
||||
if 1 <= len(similar) <= 5:
|
||||
long_msg = "\nDid you mean one of the following commands?\n "
|
||||
long_msg += "\n ".join(similar)
|
||||
|
||||
super().__init__(msg, long_msg)
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import spack.bootstrap.config
|
||||
import spack.bootstrap.core
|
||||
import spack.config
|
||||
import spack.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -51,9 +51,9 @@
|
||||
},
|
||||
}
|
||||
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/patchelf.json"
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
@@ -400,7 +400,7 @@ def _mirror(args):
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
spack.mirrors.utils.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
@@ -419,7 +419,7 @@ def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join("metadata", subdir)
|
||||
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode="w") as f:
|
||||
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
|
@@ -21,7 +21,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.oci.oci
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
@@ -34,6 +34,8 @@
|
||||
from spack.cmd.common import arguments
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
@@ -308,7 +310,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
return [
|
||||
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
|
||||
for s in specs
|
||||
]
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
@@ -387,7 +392,7 @@ def push_fn(args):
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
mirror = args.mirror
|
||||
assert isinstance(mirror, spack.mirror.Mirror)
|
||||
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
@@ -726,7 +731,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
deduped_manifest = {}
|
||||
|
||||
for manifest_path in manifest_file_list:
|
||||
with open(manifest_path) as fd:
|
||||
with open(manifest_path, encoding="utf-8") as fd:
|
||||
manifest = json.loads(fd.read())
|
||||
for spec_hash, copy_list in manifest.items():
|
||||
# Last duplicate hash wins
|
||||
@@ -745,7 +750,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
||||
# Special case OCI images for now.
|
||||
try:
|
||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||
|
@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
with open(filename, "r+", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -17,10 +16,11 @@
|
||||
import spack.ci as spack_ci
|
||||
import spack.cmd
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
@@ -62,22 +62,8 @@ def setup_parser(subparser):
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
prune_dag_group = generate.add_mutually_exclusive_group()
|
||||
prune_dag_group.add_argument(
|
||||
"--prune-dag",
|
||||
action="store_true",
|
||||
dest="prune_dag",
|
||||
@@ -85,7 +71,7 @@ def setup_parser(subparser):
|
||||
help="skip up-to-date specs\n\n"
|
||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
prune_dag_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
@@ -93,6 +79,23 @@ def setup_parser(subparser):
|
||||
help="process up-to-date specs\n\n"
|
||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||
)
|
||||
prune_ext_group = generate.add_mutually_exclusive_group()
|
||||
prune_ext_group.add_argument(
|
||||
"--prune-externals",
|
||||
action="store_true",
|
||||
dest="prune_externals",
|
||||
default=True,
|
||||
help="skip external specs\n\n"
|
||||
"do not generate jobs for specs that are marked as external",
|
||||
)
|
||||
prune_ext_group.add_argument(
|
||||
"--no-prune-externals",
|
||||
action="store_false",
|
||||
dest="prune_externals",
|
||||
default=True,
|
||||
help="process external specs\n\n"
|
||||
"generate jobs for specs even when they are marked as external",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
@@ -108,14 +111,18 @@ def setup_parser(subparser):
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
default="jobs_scratch_dir",
|
||||
help="path to the root of the artifacts directory\n\n"
|
||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||
"this directory. their location will be passed to generated child jobs through the "
|
||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||
"The spack ci module assumes it will normally be run from within your project "
|
||||
"directory, wherever that is checked out to run your ci. The artifacts root directory "
|
||||
"should specifiy a name that can safely be used for artifacts within your project "
|
||||
"directory.",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(generate)
|
||||
spack.cmd.common.arguments.add_common_arguments(generate, ["jobs"])
|
||||
|
||||
# Rebuild the buildcache index associated with the mirror in the
|
||||
# active, gitlab-enabled environment.
|
||||
index = subparsers.add_parser(
|
||||
@@ -145,6 +152,7 @@ def setup_parser(subparser):
|
||||
help="stop stand-alone tests after the first failure",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||
|
||||
# Facilitate reproduction of a failed CI build job
|
||||
reproduce = subparsers.add_parser(
|
||||
@@ -187,42 +195,8 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
else:
|
||||
output_file_path = os.path.abspath(output_file)
|
||||
gen_ci_dir = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(gen_ci_dir):
|
||||
os.makedirs(gen_ci_dir)
|
||||
|
||||
# Generate the jobs
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env,
|
||||
True,
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
artifacts_root=artifacts_root,
|
||||
)
|
||||
spack_ci.generate_pipeline(env, args)
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
@@ -240,7 +214,7 @@ def ci_reindex(args):
|
||||
ci_mirrors = yaml_root["mirrors"]
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
remote_mirror_url = mirror_urls[0]
|
||||
mirror = spack.mirror.Mirror(remote_mirror_url)
|
||||
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
|
||||
|
||||
buildcache.update_index(mirror, update_keys=True)
|
||||
|
||||
@@ -328,7 +302,7 @@ def ci_rebuild(args):
|
||||
|
||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||
buildcache_destination = None
|
||||
if "buildcache-destination" not in pipeline_mirrors:
|
||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||
@@ -387,7 +361,7 @@ def ci_rebuild(args):
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
with open(job_spec_json_path, "w") as fd:
|
||||
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
@@ -397,7 +371,7 @@ def ci_rebuild(args):
|
||||
"job_spec_json": job_spec_json_file,
|
||||
"ci_project_dir": ci_project_dir,
|
||||
}
|
||||
with open(repro_file, "w") as fd:
|
||||
with open(repro_file, "w", encoding="utf-8") as fd:
|
||||
fd.write(json.dumps(repro_details))
|
||||
|
||||
# Write information about spack into an artifact in the repro dir
|
||||
@@ -433,14 +407,19 @@ def ci_rebuild(args):
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
install_args = [
|
||||
f'--use-buildcache={spack_ci.common.win_quote("package:never,dependencies:only")}'
|
||||
]
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
if args.jobs:
|
||||
install_args.append(f"-j{args.jobs}")
|
||||
|
||||
slash_hash = spack_ci.common.win_quote("/" + job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
@@ -605,7 +584,7 @@ def ci_rebuild(args):
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
with open("install_timers.json", "w", encoding="utf-8") as timelog:
|
||||
extra_attributes = {"name": ".ci-rebuild"}
|
||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
|
@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
|
||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||
documented_commands: Set[str] = set()
|
||||
for filename in args.rst_files:
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
for line in f:
|
||||
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
||||
if match:
|
||||
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
|
||||
if not args.header:
|
||||
return
|
||||
|
||||
with open(args.header) as header:
|
||||
with open(args.header, encoding="utf-8") as header:
|
||||
out.write(header.read())
|
||||
|
||||
|
||||
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
|
||||
if args.update:
|
||||
tty.msg(f"Updating file: {args.update}")
|
||||
with open(args.update, "w") as f:
|
||||
with open(args.update, "w", encoding="utf-8") as f:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
|
@@ -14,7 +14,8 @@
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.reporters
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -168,7 +169,7 @@ def installed_specs(args):
|
||||
else:
|
||||
packages = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
packages.append(s.format())
|
||||
return packages
|
||||
@@ -528,6 +529,7 @@ def __call__(self, parser, namespace, values, option_string):
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
# specified on the command line.
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||
|
||||
|
||||
@@ -689,31 +691,31 @@ def mirror_name_or_url(m):
|
||||
|
||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||
if "/" in m or "\\" in m or m in (".", ".."):
|
||||
return spack.mirror.Mirror(m)
|
||||
return spack.mirrors.mirror.Mirror(m)
|
||||
|
||||
# Otherwise, the named mirror is required to exist.
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(m)
|
||||
return spack.mirrors.utils.require_mirror_name(m)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||
|
||||
|
||||
def mirror_url(url):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_url(url)
|
||||
return spack.mirrors.mirror.Mirror.from_url(url)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_directory(path):
|
||||
try:
|
||||
return spack.mirror.Mirror.from_local_path(path)
|
||||
return spack.mirrors.mirror.Mirror.from_local_path(path)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
||||
|
||||
def mirror_name(name):
|
||||
try:
|
||||
return spack.mirror.require_mirror_name(name)
|
||||
return spack.mirrors.utils.require_mirror_name(name)
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(str(e)) from e
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.schema
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -566,7 +567,7 @@ def config_prefer_upstream(args):
|
||||
|
||||
# Simply write the config to the specified file.
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
new = spack.schema.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
|
@@ -110,7 +110,7 @@ def write(self, pkg_path):
|
||||
all_deps.append(self.dependencies)
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
|
||||
pkg_file.write(
|
||||
package_template.format(
|
||||
name=self.name,
|
||||
|
@@ -23,9 +23,10 @@
|
||||
import spack.installer
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
@@ -95,8 +96,12 @@ def deprecate(parser, args):
|
||||
if len(specs) != 2:
|
||||
raise SpackError("spack deprecate requires exactly two specs")
|
||||
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
|
||||
deprecate = spack.cmd.disambiguate_spec(
|
||||
specs[0],
|
||||
env,
|
||||
local=True,
|
||||
installed=(InstallRecordStatus.INSTALLED | InstallRecordStatus.DEPRECATED),
|
||||
)
|
||||
|
||||
if args.install:
|
||||
deprecator = specs[1].concretized()
|
||||
|
@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
with open(path, "r", encoding="utf-8"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
with open(file_path, "r", encoding="utf-8"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
|
@@ -865,7 +865,7 @@ def env_loads(args):
|
||||
args.recurse_dependencies = False
|
||||
|
||||
loads_file = fs.join_path(env.path, "loads")
|
||||
with open(loads_file, "w") as f:
|
||||
with open(loads_file, "w", encoding="utf-8") as f:
|
||||
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
||||
|
||||
spack.cmd.modules.loads(module_type, specs, args, f)
|
||||
@@ -1053,7 +1053,7 @@ def env_depfile(args):
|
||||
|
||||
# Finally write to stdout/file.
|
||||
if args.output:
|
||||
with open(args.output, "w") as f:
|
||||
with open(args.output, "w", encoding="utf-8") as f:
|
||||
f.write(makefile)
|
||||
else:
|
||||
sys.stdout.write(makefile)
|
||||
|
@@ -17,7 +17,8 @@
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "list and search installed packages"
|
||||
section = "basic"
|
||||
@@ -137,21 +138,22 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--loaded", action="store_true", help="show only packages loaded in the user environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
only_missing_or_deprecated = subparser.add_mutually_exclusive_group()
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"-M",
|
||||
"--only-missing",
|
||||
action="store_true",
|
||||
dest="only_missing",
|
||||
help="show only missing dependencies",
|
||||
)
|
||||
only_missing_or_deprecated.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--deprecated",
|
||||
action="store_true",
|
||||
help="show deprecated packages as well as installed specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
@@ -165,14 +167,23 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def query_arguments(args):
|
||||
# Set up query arguments.
|
||||
installed = []
|
||||
if not (args.only_missing or args.only_deprecated):
|
||||
installed.append(InstallStatuses.INSTALLED)
|
||||
if (args.deprecated or args.only_deprecated) and not args.only_missing:
|
||||
installed.append(InstallStatuses.DEPRECATED)
|
||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||
installed.append(InstallStatuses.MISSING)
|
||||
if args.only_missing and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-missing with --deprecated, or --missing")
|
||||
|
||||
if args.only_deprecated and (args.deprecated or args.missing):
|
||||
raise RuntimeError("cannot use --only-deprecated with --deprecated, or --missing")
|
||||
|
||||
installed = InstallRecordStatus.INSTALLED
|
||||
if args.only_missing:
|
||||
installed = InstallRecordStatus.MISSING
|
||||
elif args.only_deprecated:
|
||||
installed = InstallRecordStatus.DEPRECATED
|
||||
|
||||
if args.missing:
|
||||
installed |= InstallRecordStatus.MISSING
|
||||
|
||||
if args.deprecated:
|
||||
installed |= InstallRecordStatus.DEPRECATED
|
||||
|
||||
predicate_fn = None
|
||||
if args.unknown:
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
@@ -217,11 +217,11 @@ def gpg_publish(args):
|
||||
mirror = None
|
||||
if args.directory:
|
||||
url = spack.util.url.path_to_file_url(args.directory)
|
||||
mirror = spack.mirror.Mirror(url, url)
|
||||
mirror = spack.mirrors.mirror.Mirror(url, url)
|
||||
elif args.mirror_name:
|
||||
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution._url_push_keys(
|
||||
|
@@ -78,8 +78,8 @@
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
mvapich2 @g{%pgi} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with pgi compiler, with support for multiple fabrics
|
||||
mvapich2 @g{%gcc} @B{fabrics=psm,mrail,sock}
|
||||
mvapich2, built with gcc compiler, with support for multiple fabrics
|
||||
"""
|
||||
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
@@ -202,11 +203,13 @@ def print_namespace(pkg, args):
|
||||
def print_phases(pkg, args):
|
||||
"""output installation phases"""
|
||||
|
||||
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
|
||||
builder = spack.builder.create(pkg)
|
||||
|
||||
if hasattr(builder, "phases") and builder.phases:
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Installation Phases:"))
|
||||
phase_str = ""
|
||||
for phase in pkg.builder.phases:
|
||||
for phase in builder.phases:
|
||||
phase_str += " {0}".format(phase)
|
||||
color.cprint(phase_str)
|
||||
|
||||
|
@@ -291,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
|
||||
tty.error("'spack install' created no log.")
|
||||
else:
|
||||
sys.stderr.write("Full build log:\n")
|
||||
with open(e.pkg.log_path, errors="replace") as log:
|
||||
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
|
||||
shutil.copyfileobj(log, sys.stderr)
|
||||
|
||||
|
||||
@@ -445,7 +445,7 @@ def concrete_specs_from_file(args):
|
||||
"""Return the list of concrete specs read from files."""
|
||||
result = []
|
||||
for file in args.specfiles:
|
||||
with open(file, "r") as f:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
if file.endswith("yaml") or file.endswith("yml"):
|
||||
s = spack.spec.Spec.from_yaml(f)
|
||||
else:
|
||||
|
@@ -191,7 +191,7 @@ def verify(args):
|
||||
|
||||
for relpath in _licensed_files(args):
|
||||
path = os.path.join(args.root, relpath)
|
||||
with open(path) as f:
|
||||
with open(path, encoding="utf-8") as f:
|
||||
lines = [line for line in f][:license_lines]
|
||||
|
||||
error = _check_license(lines, path)
|
||||
|
@@ -340,7 +340,7 @@ def list(parser, args):
|
||||
return
|
||||
|
||||
tty.msg("Updating file: %s" % args.update)
|
||||
with open(args.update, "w") as f:
|
||||
with open(args.update, "w", encoding="utf-8") as f:
|
||||
formatter(sorted_packages, f)
|
||||
|
||||
elif args.count:
|
||||
|
@@ -31,7 +31,7 @@ def line_to_rtf(str):
|
||||
return str.replace("\n", "\\par")
|
||||
|
||||
contents = ""
|
||||
with open(file_path, "r+") as f:
|
||||
with open(file_path, "r+", encoding="utf-8") as f:
|
||||
for line in f.readlines():
|
||||
contents += line_to_rtf(line)
|
||||
return rtf_header.format(contents)
|
||||
@@ -93,7 +93,7 @@ def make_installer(parser, args):
|
||||
rtf_spack_license = txt_to_rtf(spack_license)
|
||||
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
||||
|
||||
with open(spack_license, "w") as rtf_license:
|
||||
with open(spack_license, "w", encoding="utf-8") as rtf_license:
|
||||
written = rtf_license.write(rtf_spack_license)
|
||||
if written == 0:
|
||||
raise RuntimeError("Failed to generate properly formatted license file")
|
||||
|
@@ -10,7 +10,8 @@
|
||||
import spack.cmd
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
from ..enums import InstallRecordStatus
|
||||
|
||||
description = "mark packages as explicitly or implicitly installed"
|
||||
section = "admin"
|
||||
@@ -67,8 +68,7 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
has_errors = False
|
||||
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED]
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=InstallRecordStatus.INSTALLED)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
|
@@ -14,7 +14,8 @@
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.mirrors.mirror
|
||||
import spack.mirrors.utils
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.web as web_util
|
||||
@@ -365,15 +366,15 @@ def mirror_add(args):
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
|
||||
else:
|
||||
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirror.add(mirror, args.scope)
|
||||
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
|
||||
spack.mirrors.utils.add(mirror, args.scope)
|
||||
|
||||
|
||||
def mirror_remove(args):
|
||||
"""remove a mirror by name"""
|
||||
spack.mirror.remove(args.name, args.scope)
|
||||
spack.mirrors.utils.remove(args.name, args.scope)
|
||||
|
||||
|
||||
def _configure_mirror(args):
|
||||
@@ -382,7 +383,7 @@ def _configure_mirror(args):
|
||||
if args.name not in mirrors:
|
||||
tty.die(f"No mirror found with name {args.name}.")
|
||||
|
||||
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
|
||||
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
|
||||
direction = "fetch" if args.fetch else "push" if args.push else None
|
||||
changes = {}
|
||||
if args.url:
|
||||
@@ -449,7 +450,7 @@ def mirror_set_url(args):
|
||||
def mirror_list(args):
|
||||
"""print out available mirrors to the console"""
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
|
||||
if not mirrors:
|
||||
tty.msg("No mirrors configured.")
|
||||
return
|
||||
@@ -467,7 +468,7 @@ def specs_from_text_file(filename, concretize=False):
|
||||
concretize (bool): if True concretize the specs before returning
|
||||
the list.
|
||||
"""
|
||||
with open(filename, "r") as f:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
specs_in_file = f.readlines()
|
||||
specs_in_file = [s.strip() for s in specs_in_file]
|
||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
||||
@@ -489,9 +490,9 @@ def concrete_specs_from_user(args):
|
||||
|
||||
def extend_with_additional_versions(specs, num_versions):
|
||||
if num_versions == "all":
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
else:
|
||||
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||
return mirror_specs
|
||||
|
||||
@@ -570,7 +571,7 @@ def concrete_specs_from_environment():
|
||||
|
||||
def all_specs_with_all_versions():
|
||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||
return mirror_specs
|
||||
|
||||
@@ -659,19 +660,21 @@ def _specs_and_action(args):
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
present, mirrored, error = spack.mirrors.utils.create(
|
||||
path, mirror_specs, skip_unstable_versions
|
||||
)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
@@ -681,7 +684,7 @@ def mirror_destroy(args):
|
||||
mirror_url = None
|
||||
|
||||
if args.mirror_name:
|
||||
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||
mirror_url = result.push_url
|
||||
elif args.mirror_url:
|
||||
mirror_url = args.mirror_url
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.lmod
|
||||
|
||||
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules
|
||||
import spack.modules.tcl
|
||||
|
||||
|
||||
|
@@ -150,7 +150,7 @@ def pkg_source(args):
|
||||
content = ph.canonical_source(spec)
|
||||
else:
|
||||
message = "Source for %s:" % filename
|
||||
with open(filename) as f:
|
||||
with open(filename, encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
|
||||
if sys.stdout.isatty():
|
||||
|
@@ -94,7 +94,7 @@ def ipython_interpreter(args):
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
with open(startup_file, encoding="utf-8") as startup:
|
||||
exec(startup.read())
|
||||
|
||||
# IPython can also support running a script OR command, not both
|
||||
@@ -126,7 +126,7 @@ def python_interpreter(args):
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
with open(startup_file, encoding="utf-8") as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
|
@@ -82,14 +82,6 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
@@ -99,46 +91,35 @@ def spec(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
elif env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
if not args.format:
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
concrete_specs = env.concrete_roots()
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
|
@@ -19,11 +19,48 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
class StageFilter:
|
||||
"""
|
||||
Encapsulation of reasons to skip staging
|
||||
"""
|
||||
|
||||
def __init__(self, exclusions, skip_installed):
|
||||
"""
|
||||
:param exclusions: A list of specs to skip if satisfied.
|
||||
:param skip_installed: A boolean indicating whether to skip already installed specs.
|
||||
"""
|
||||
self.exclusions = exclusions
|
||||
self.skip_installed = skip_installed
|
||||
|
||||
def __call__(self, spec):
|
||||
"""filter action, true means spec should be filtered"""
|
||||
if spec.external:
|
||||
return True
|
||||
|
||||
if self.skip_installed and spec.installed:
|
||||
return True
|
||||
|
||||
if any(spec.satisfies(exclude) for exclude in self.exclusions):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||
subparser.add_argument(
|
||||
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-e",
|
||||
"--exclude",
|
||||
action="append",
|
||||
default=[],
|
||||
help="exclude packages that satisfy the specified specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-s", "--skip-installed", action="store_true", help="dont restage already installed specs"
|
||||
)
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
@@ -31,11 +68,14 @@ def stage(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
exclusion_specs = spack.cmd.parse_specs(args.exclude, concretize=False)
|
||||
filter = StageFilter(exclusion_specs, args.skip_installed)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
tty.die("`spack stage` requires a spec or an active environment")
|
||||
return _stage_env(env)
|
||||
return _stage_env(env, filter)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
@@ -49,6 +89,11 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
|
||||
if filter(spec):
|
||||
continue
|
||||
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
@@ -57,9 +102,13 @@ def stage(parser, args):
|
||||
_stage(pkg)
|
||||
|
||||
|
||||
def _stage_env(env: ev.Environment):
|
||||
def _stage_env(env: ev.Environment, filter):
|
||||
tty.msg(f"Staging specs from environment {env.name}")
|
||||
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
||||
|
||||
if filter(spec):
|
||||
continue
|
||||
|
||||
_stage(spec.package)
|
||||
|
||||
|
||||
|
@@ -3,18 +3,21 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import ast
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from itertools import zip_longest
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.git
|
||||
from spack.util.executable import which
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
description = "runs source code style checks on spack"
|
||||
section = "developer"
|
||||
@@ -36,10 +39,7 @@ def grouper(iterable, n, fillvalue=None):
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
tool_names = ["import", "isort", "black", "flake8", "mypy"]
|
||||
|
||||
#: warnings to ignore in mypy
|
||||
mypy_ignores = [
|
||||
@@ -61,14 +61,28 @@ def is_package(f):
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
class tool:
|
||||
def __init__(self, name, required=False):
|
||||
def __init__(self, name: str, required: bool = False, external: bool = True) -> None:
|
||||
self.name = name
|
||||
self.external = external
|
||||
self.required = required
|
||||
|
||||
def __call__(self, fun):
|
||||
tools[self.name] = (fun, self.required)
|
||||
self.fun = fun
|
||||
tools[self.name] = self
|
||||
return fun
|
||||
|
||||
@property
|
||||
def installed(self) -> bool:
|
||||
return bool(which(self.name)) if self.external else True
|
||||
|
||||
@property
|
||||
def executable(self) -> Optional[Executable]:
|
||||
return which(self.name) if self.external else None
|
||||
|
||||
|
||||
#: tools we run in spack style
|
||||
tools: Dict[str, tool] = {}
|
||||
|
||||
|
||||
def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
"""Get list of changed files in the Spack repository.
|
||||
@@ -176,22 +190,22 @@ def setup_parser(subparser):
|
||||
"-t",
|
||||
"--tool",
|
||||
action="append",
|
||||
help="specify which tools to run (default: %s)" % ",".join(tool_names),
|
||||
help="specify which tools to run (default: %s)" % ", ".join(tool_names),
|
||||
)
|
||||
tool_group.add_argument(
|
||||
"-s",
|
||||
"--skip",
|
||||
metavar="TOOL",
|
||||
action="append",
|
||||
help="specify tools to skip (choose from %s)" % ",".join(tool_names),
|
||||
help="specify tools to skip (choose from %s)" % ", ".join(tool_names),
|
||||
)
|
||||
|
||||
subparser.add_argument("files", nargs=argparse.REMAINDER, help="specific files to check")
|
||||
|
||||
|
||||
def cwd_relative(path, args):
|
||||
def cwd_relative(path, root, initial_working_dir):
|
||||
"""Translate prefix-relative path to current working directory-relative."""
|
||||
return os.path.relpath(os.path.join(args.root, path), args.initial_working_dir)
|
||||
return os.path.relpath(os.path.join(root, path), initial_working_dir)
|
||||
|
||||
|
||||
def rewrite_and_print_output(
|
||||
@@ -201,7 +215,10 @@ def rewrite_and_print_output(
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
return replacement.format(
|
||||
cwd_relative(match.group(1), args.root, args.initial_working_dir),
|
||||
*list(match.groups()[1:]),
|
||||
)
|
||||
|
||||
for line in output.split("\n"):
|
||||
if not line:
|
||||
@@ -220,7 +237,7 @@ def print_style_header(file_list, args, tools_to_run):
|
||||
# translate modified paths to cwd_relative if needed
|
||||
paths = [filename.strip() for filename in file_list]
|
||||
if not args.root_relative:
|
||||
paths = [cwd_relative(filename, args) for filename in paths]
|
||||
paths = [cwd_relative(filename, args.root, args.initial_working_dir) for filename in paths]
|
||||
|
||||
tty.msg("Modified files", *paths)
|
||||
sys.stdout.flush()
|
||||
@@ -306,8 +323,6 @@ def process_files(file_list, is_args):
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = (
|
||||
"--rm",
|
||||
"spack",
|
||||
"--rm",
|
||||
"spack.pkgkit",
|
||||
"--rm",
|
||||
@@ -352,17 +367,137 @@ def run_black(black_cmd, file_list, args):
|
||||
return returncode
|
||||
|
||||
|
||||
def _module_part(root: str, expr: str):
|
||||
parts = expr.split(".")
|
||||
# spack.pkg is for repositories, don't try to resolve it here.
|
||||
if ".".join(parts[:2]) == spack.repo.ROOT_PYTHON_NAMESPACE:
|
||||
return None
|
||||
while parts:
|
||||
f1 = os.path.join(root, "lib", "spack", *parts) + ".py"
|
||||
f2 = os.path.join(root, "lib", "spack", *parts, "__init__.py")
|
||||
|
||||
if (
|
||||
os.path.exists(f1)
|
||||
# ensure case sensitive match
|
||||
and f"{parts[-1]}.py" in os.listdir(os.path.dirname(f1))
|
||||
or os.path.exists(f2)
|
||||
):
|
||||
return ".".join(parts)
|
||||
parts.pop()
|
||||
return None
|
||||
|
||||
|
||||
def _run_import_check(
|
||||
file_list: List[str],
|
||||
*,
|
||||
fix: bool,
|
||||
root_relative: bool,
|
||||
root=spack.paths.prefix,
|
||||
working_dir=spack.paths.prefix,
|
||||
out=sys.stdout,
|
||||
):
|
||||
if sys.version_info < (3, 9):
|
||||
print("import check requires Python 3.9 or later")
|
||||
return 0
|
||||
|
||||
is_use = re.compile(r"(?<!from )(?<!import )(?:llnl|spack)\.[a-zA-Z0-9_\.]+")
|
||||
|
||||
# redundant imports followed by a `# comment` are ignored, cause there can be legimitate reason
|
||||
# to import a module: execute module scope init code, or to deal with circular imports.
|
||||
is_abs_import = re.compile(r"^import ((?:llnl|spack)\.[a-zA-Z0-9_\.]+)$", re.MULTILINE)
|
||||
|
||||
exit_code = 0
|
||||
|
||||
for file in file_list:
|
||||
to_add = set()
|
||||
to_remove = []
|
||||
|
||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||
|
||||
try:
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
contents = f.read()
|
||||
parsed = ast.parse(contents)
|
||||
except Exception:
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: could not parse", file=out)
|
||||
continue
|
||||
|
||||
for m in is_abs_import.finditer(contents):
|
||||
if contents.count(m.group(1)) == 1:
|
||||
to_remove.append(m.group(0))
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
|
||||
|
||||
# Clear all strings to avoid matching comments/strings etc.
|
||||
for node in ast.walk(parsed):
|
||||
if isinstance(node, ast.Constant) and isinstance(node.value, str):
|
||||
node.value = ""
|
||||
|
||||
filtered_contents = ast.unparse(parsed) # novermin
|
||||
for m in is_use.finditer(filtered_contents):
|
||||
module = _module_part(root, m.group(0))
|
||||
if not module or module in to_add:
|
||||
continue
|
||||
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
|
||||
continue
|
||||
to_add.add(module)
|
||||
exit_code = 1
|
||||
print(f"{pretty_path}: missing import: {module} ({m.group(0)})", file=out)
|
||||
|
||||
if not fix or not to_add and not to_remove:
|
||||
continue
|
||||
|
||||
with open(file, "r", encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
if to_add:
|
||||
# insert missing imports before the first import, delegate ordering to isort
|
||||
for node in parsed.body:
|
||||
if isinstance(node, (ast.Import, ast.ImportFrom)):
|
||||
first_line = node.lineno
|
||||
break
|
||||
else:
|
||||
print(f"{pretty_path}: could not fix", file=out)
|
||||
continue
|
||||
lines.insert(first_line, "\n".join(f"import {x}" for x in to_add) + "\n")
|
||||
|
||||
new_contents = "".join(lines)
|
||||
|
||||
# remove redundant imports
|
||||
for statement in to_remove:
|
||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||
|
||||
with open(file, "w", encoding="utf-8") as f:
|
||||
f.write(new_contents)
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
@tool("import", external=False)
|
||||
def run_import_check(import_check_cmd, file_list, args):
|
||||
exit_code = _run_import_check(
|
||||
file_list,
|
||||
fix=args.fix,
|
||||
root_relative=args.root_relative,
|
||||
root=args.root,
|
||||
working_dir=args.initial_working_dir,
|
||||
)
|
||||
print_tool_result("import", exit_code)
|
||||
return exit_code
|
||||
|
||||
|
||||
def validate_toolset(arg_value):
|
||||
"""Validate --tool and --skip arguments (sets of optionally comma-separated tools)."""
|
||||
tools = set(",".join(arg_value).split(",")) # allow args like 'isort,flake8'
|
||||
for tool in tools:
|
||||
if tool not in tool_names:
|
||||
tty.die("Invaild tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
tty.die("Invalid tool: '%s'" % tool, "Choose from: %s" % ", ".join(tool_names))
|
||||
return tools
|
||||
|
||||
|
||||
def missing_tools(tools_to_run):
|
||||
return [t for t in tools_to_run if which(t) is None]
|
||||
def missing_tools(tools_to_run: List[str]) -> List[str]:
|
||||
return [t for t in tools_to_run if not tools[t].installed]
|
||||
|
||||
|
||||
def _bootstrap_dev_dependencies():
|
||||
@@ -417,9 +552,9 @@ def prefix_relative(path):
|
||||
|
||||
print_style_header(file_list, args, tools_to_run)
|
||||
for tool_name in tools_to_run:
|
||||
run_function, required = tools[tool_name]
|
||||
tool = tools[tool_name]
|
||||
print_tool_header(tool_name)
|
||||
return_code |= run_function(which(tool_name), file_list, args)
|
||||
return_code |= tool.fun(tool.executable, file_list, args)
|
||||
|
||||
if return_code == 0:
|
||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||
|
@@ -346,7 +346,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
||||
|
||||
results = {}
|
||||
with open(test_suite.results_file, "r") as f:
|
||||
with open(test_suite.results_file, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
@@ -371,7 +371,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
spec = test_specs[pkg_id]
|
||||
log_file = test_suite.log_file_for_spec(spec)
|
||||
if os.path.isfile(log_file):
|
||||
with open(log_file, "r") as f:
|
||||
with open(log_file, "r", encoding="utf-8") as f:
|
||||
msg += "\n{0}".format("".join(f.readlines()))
|
||||
tty.msg(msg)
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user