Compare commits
516 Commits
v0.17.2.3-
...
minimal-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3985b307a1 | ||
|
|
6a57aede57 | ||
|
|
ba701a7cf8 | ||
|
|
557845cccc | ||
|
|
c5297523af | ||
|
|
6883868896 | ||
|
|
1c5587f72d | ||
|
|
6e7eb49888 | ||
|
|
3df4a32c4f | ||
|
|
95b03e7bc9 | ||
|
|
817ee81eaa | ||
|
|
330832c22c | ||
|
|
306bed48d7 | ||
|
|
63402c512b | ||
|
|
736fddc079 | ||
|
|
036048c26f | ||
|
|
8616ba04db | ||
|
|
07e9c0695a | ||
|
|
f24886acb5 | ||
|
|
5031578c39 | ||
|
|
7c4cc1c71c | ||
|
|
f7258e246f | ||
|
|
ff980a1452 | ||
|
|
51130abf86 | ||
|
|
383356452b | ||
|
|
5fc1547886 | ||
|
|
68cd6c72c7 | ||
|
|
3d2ff57e7b | ||
|
|
3bc656808c | ||
|
|
7ded692a76 | ||
|
|
aa3c7a138a | ||
|
|
42441cddcc | ||
|
|
b78025345b | ||
|
|
2113b625d1 | ||
|
|
c6c3d243e1 | ||
|
|
870b997cb6 | ||
|
|
c9492f1cd4 | ||
|
|
24f370491e | ||
|
|
d688a699fa | ||
|
|
4fbb822072 | ||
|
|
f86c481280 | ||
|
|
91a99882b3 | ||
|
|
74bef2105a | ||
|
|
630ebb9d8b | ||
|
|
183465321e | ||
|
|
580f9ec86e | ||
|
|
0b0920bc90 | ||
|
|
ee04a1ab0b | ||
|
|
55f4950ed4 | ||
|
|
23960ed623 | ||
|
|
fb2730d87f | ||
|
|
30f2394782 | ||
|
|
262c3f07bf | ||
|
|
b018eb041f | ||
|
|
3f4398dd67 | ||
|
|
a225a5b276 | ||
|
|
c9cfc548da | ||
|
|
3b30886a3a | ||
|
|
c2fd98ccd2 | ||
|
|
a0fe6ab2ed | ||
|
|
c3be777ea8 | ||
|
|
8fe39be3df | ||
|
|
f5250da611 | ||
|
|
c2af154cd2 | ||
|
|
1f6b880fff | ||
|
|
2c211d95ee | ||
|
|
c46f673c16 | ||
|
|
8ff2b4b747 | ||
|
|
9e05dde28c | ||
|
|
b1ef5a75f0 | ||
|
|
f9aa7c611c | ||
|
|
9a2e01e22d | ||
|
|
2090351d7f | ||
|
|
c775c322ec | ||
|
|
1185eb9199 | ||
|
|
51fa8e7b5e | ||
|
|
f505c50770 | ||
|
|
2fdc817f03 | ||
|
|
e1d0b35d5b | ||
|
|
ace5a7c4bf | ||
|
|
a91ae8cafe | ||
|
|
b9e3ee6dd0 | ||
|
|
10ea0a2a3e | ||
|
|
d6f8ffc6bc | ||
|
|
02be2f27d1 | ||
|
|
dfd0702aec | ||
|
|
f454a683b5 | ||
|
|
d7d0c892d8 | ||
|
|
d566330a33 | ||
|
|
446cbf4b5a | ||
|
|
5153c9e98c | ||
|
|
d74f2d0be5 | ||
|
|
021b65d76f | ||
|
|
45312d49be | ||
|
|
3fcd85efe9 | ||
|
|
6f3a082c3e | ||
|
|
23e2820547 | ||
|
|
22b999fcd4 | ||
|
|
1df7de62ca | ||
|
|
97ec8f1d19 | ||
|
|
63b6e484fc | ||
|
|
2b12d19314 | ||
|
|
c37fcccd7c | ||
|
|
6034b5afc2 | ||
|
|
17bc937083 | ||
|
|
ad8db0680d | ||
|
|
4f033b155b | ||
|
|
ad829ccee1 | ||
|
|
4b60a17174 | ||
|
|
edb91f4077 | ||
|
|
0fdc3bf420 | ||
|
|
8b34cabb16 | ||
|
|
77fb651e01 | ||
|
|
35ed7973e2 | ||
|
|
e73b19024f | ||
|
|
7803bc9e5f | ||
|
|
55c400297c | ||
|
|
8686e18494 | ||
|
|
d28967bbf3 | ||
|
|
5f928f71c0 | ||
|
|
dc7bdf5f24 | ||
|
|
a681fd7b42 | ||
|
|
f40f1b5c7c | ||
|
|
edd3cf0b17 | ||
|
|
ff03e2ef4c | ||
|
|
bee311edf3 | ||
|
|
73b69cfeec | ||
|
|
4a1041dbc3 | ||
|
|
ccab7bf4fd | ||
|
|
e24e71be6a | ||
|
|
72d83a6f94 | ||
|
|
5cb40cbcd2 | ||
|
|
c93e465134 | ||
|
|
521c206030 | ||
|
|
15eb98368d | ||
|
|
7c1d566959 | ||
|
|
7ab46e26b5 | ||
|
|
6db215dd89 | ||
|
|
72b38851eb | ||
|
|
9d9e970367 | ||
|
|
283a4e6068 | ||
|
|
d20cc7b124 | ||
|
|
0dd373846f | ||
|
|
c202953528 | ||
|
|
be0e3f4458 | ||
|
|
fd3bb5177b | ||
|
|
9de61c0197 | ||
|
|
84cfb3b7fe | ||
|
|
cb0d12b9d5 | ||
|
|
f6e7c0b740 | ||
|
|
512645ff2e | ||
|
|
32a2c22b2b | ||
|
|
e02020c80a | ||
|
|
d900ac2003 | ||
|
|
faa277778e | ||
|
|
b60d3dcd29 | ||
|
|
e0bed2d6a7 | ||
|
|
745c191d73 | ||
|
|
42e9430fbc | ||
|
|
f11572166f | ||
|
|
aa6665d5ee | ||
|
|
3e8f31a068 | ||
|
|
3625ea4726 | ||
|
|
245b95223d | ||
|
|
e00c8a7d98 | ||
|
|
97792f04e9 | ||
|
|
ca069f6906 | ||
|
|
35a91bdd72 | ||
|
|
c866a50446 | ||
|
|
3033abb5bd | ||
|
|
d37f439557 | ||
|
|
18710936f1 | ||
|
|
12b0278f08 | ||
|
|
34fd6e36ce | ||
|
|
363536fd92 | ||
|
|
d57d343b6d | ||
|
|
1b254d19c4 | ||
|
|
82b916be36 | ||
|
|
66d3648200 | ||
|
|
d2fc7b9f7d | ||
|
|
1067749371 | ||
|
|
8bd893367d | ||
|
|
2ed542b744 | ||
|
|
5cb7a5db45 | ||
|
|
f84991b5a8 | ||
|
|
928ecd1f4e | ||
|
|
72e594fb10 | ||
|
|
f9d701f9cf | ||
|
|
63f7053fe8 | ||
|
|
1c51d6313b | ||
|
|
c164e6fe03 | ||
|
|
2d823dcf90 | ||
|
|
a12c638224 | ||
|
|
0739691688 | ||
|
|
62ffc8c1dd | ||
|
|
61969566f8 | ||
|
|
49948bb3e7 | ||
|
|
c9c347f0f0 | ||
|
|
ee9b61be7a | ||
|
|
be45292a9c | ||
|
|
61f8c97bb7 | ||
|
|
b0403624cf | ||
|
|
4baed234be | ||
|
|
9de1edee80 | ||
|
|
9ce726eed5 | ||
|
|
c45ee381bd | ||
|
|
56d76766b7 | ||
|
|
6c309bbb32 | ||
|
|
cfed42ecfc | ||
|
|
01b79abcdf | ||
|
|
4d84c774d1 | ||
|
|
00e9780136 | ||
|
|
a65e00392c | ||
|
|
250fa6dada | ||
|
|
dd7822fdf7 | ||
|
|
555202833f | ||
|
|
b76fc61deb | ||
|
|
186abe525e | ||
|
|
31d8607b3c | ||
|
|
622841063c | ||
|
|
23b7071bb0 | ||
|
|
359229f5f8 | ||
|
|
2db545ffdc | ||
|
|
3986ac3828 | ||
|
|
c47c5d75e4 | ||
|
|
a0d4630448 | ||
|
|
7f1659786b | ||
|
|
b5da0d02bf | ||
|
|
8575afac4e | ||
|
|
7997dfcf80 | ||
|
|
19c8e02e32 | ||
|
|
320e6e06e6 | ||
|
|
d7b66dd286 | ||
|
|
70a8b91ec2 | ||
|
|
1883fedae7 | ||
|
|
c4412306da | ||
|
|
c49508648a | ||
|
|
9bcf496f21 | ||
|
|
060e88387e | ||
|
|
a24bae1986 | ||
|
|
d08520cb15 | ||
|
|
5397dcee51 | ||
|
|
d62e4b1d66 | ||
|
|
9ed1c76486 | ||
|
|
10efbc071f | ||
|
|
d517dcdc71 | ||
|
|
104d60887f | ||
|
|
1bde91735b | ||
|
|
335083d2dc | ||
|
|
0858c281e4 | ||
|
|
adc8a2ca00 | ||
|
|
dc6d45c8b4 | ||
|
|
fcaf9c8cdf | ||
|
|
1f74dc63dc | ||
|
|
ceaad43e54 | ||
|
|
4c8eb92314 | ||
|
|
89775e32c0 | ||
|
|
5ab526185a | ||
|
|
654a07d642 | ||
|
|
dfdbd1151d | ||
|
|
22f3ef0a21 | ||
|
|
165bcf5cc3 | ||
|
|
27462bc982 | ||
|
|
c2afb4b916 | ||
|
|
6c6685b5fa | ||
|
|
17c32811fb | ||
|
|
ad1391db75 | ||
|
|
d8e010a9f5 | ||
|
|
5a55e78073 | ||
|
|
c1007efe5a | ||
|
|
2f14695882 | ||
|
|
afc2d4284a | ||
|
|
999eee64b8 | ||
|
|
5d0f2bb461 | ||
|
|
ec295a13fd | ||
|
|
1b1770ea9e | ||
|
|
d3a0ac1c0a | ||
|
|
3137e7c61b | ||
|
|
202214d855 | ||
|
|
b6e1cbd86d | ||
|
|
e6d1c2d9f3 | ||
|
|
1a368419da | ||
|
|
6898b7c2f6 | ||
|
|
2836648904 | ||
|
|
0dd9e5c86f | ||
|
|
011a491b16 | ||
|
|
c9714533f3 | ||
|
|
faeffdfaf2 | ||
|
|
a15a69a769 | ||
|
|
9a33121859 | ||
|
|
e88396e5ed | ||
|
|
dcd2f8a4ed | ||
|
|
381ec8abac | ||
|
|
fd6d226524 | ||
|
|
cef9245ee1 | ||
|
|
24fecdc738 | ||
|
|
4a228055e2 | ||
|
|
17ede26cea | ||
|
|
8bcccbeac7 | ||
|
|
e6346eb033 | ||
|
|
a5d06325e7 | ||
|
|
9bb23a7f46 | ||
|
|
5a434cb840 | ||
|
|
0be5dea13f | ||
|
|
b6f2a70f7b | ||
|
|
0d3d1ea7d0 | ||
|
|
39c4a66e5b | ||
|
|
0bd0ba53a3 | ||
|
|
e24373f262 | ||
|
|
b7f33fb393 | ||
|
|
250d5d2c00 | ||
|
|
8b0f6187e0 | ||
|
|
8bf988abb9 | ||
|
|
aab7dcaad9 | ||
|
|
5b68fa1ecb | ||
|
|
4d03a2768e | ||
|
|
6b6147d5a0 | ||
|
|
6702e87ee4 | ||
|
|
7ad5ca2cc3 | ||
|
|
2418cfb79b | ||
|
|
9486c76d70 | ||
|
|
dc99fe98b9 | ||
|
|
cb97b25646 | ||
|
|
a84593a510 | ||
|
|
2f6556ea82 | ||
|
|
b4213b2c60 | ||
|
|
e378d96d15 | ||
|
|
715686f0ec | ||
|
|
d0fdaf6d03 | ||
|
|
b8ebaa0813 | ||
|
|
5c7d6c6e10 | ||
|
|
96f9a1d88b | ||
|
|
f5eb9fb501 | ||
|
|
dd8f533e97 | ||
|
|
bab41de538 | ||
|
|
17c1808ef7 | ||
|
|
157ee3458f | ||
|
|
3ce8bff22e | ||
|
|
281165693a | ||
|
|
f8653dfb9b | ||
|
|
a6589daa87 | ||
|
|
43a503c195 | ||
|
|
1eff83990b | ||
|
|
17e71a675a | ||
|
|
872aa32a00 | ||
|
|
4c19410669 | ||
|
|
a5e92893d3 | ||
|
|
bdef031d4e | ||
|
|
a10f5656ab | ||
|
|
653ed78645 | ||
|
|
bc7fc8f456 | ||
|
|
55bbbe8657 | ||
|
|
a6dcce4cf2 | ||
|
|
e301de98cb | ||
|
|
6ecee4e6d5 | ||
|
|
2d89bc350c | ||
|
|
14e5497758 | ||
|
|
4576fbe648 | ||
|
|
84611b5f29 | ||
|
|
cbf0c3a8c4 | ||
|
|
d6c1619b67 | ||
|
|
d23d611f35 | ||
|
|
8ed8922af5 | ||
|
|
01c17562f5 | ||
|
|
c97d931ea7 | ||
|
|
21bf0cf43c | ||
|
|
e2c72e583f | ||
|
|
4219b89faa | ||
|
|
253b208537 | ||
|
|
4509e96704 | ||
|
|
f0bb7c74a9 | ||
|
|
c93fd4c600 | ||
|
|
76d9df2cf1 | ||
|
|
d2f67ff7b9 | ||
|
|
b5b62b0c82 | ||
|
|
e691d6df64 | ||
|
|
25206c86c4 | ||
|
|
144d7cd932 | ||
|
|
e7eceaf4e6 | ||
|
|
f943cc0149 | ||
|
|
879949b78e | ||
|
|
38e5b96431 | ||
|
|
9a028e3b15 | ||
|
|
f2c1b40e58 | ||
|
|
06b5217c01 | ||
|
|
52bf7f4157 | ||
|
|
31eb759892 | ||
|
|
b5f2c20f74 | ||
|
|
cf48588c45 | ||
|
|
e59cde9b7f | ||
|
|
7e54bddc0c | ||
|
|
7f1411d131 | ||
|
|
f0afceeb9c | ||
|
|
7ee15553e4 | ||
|
|
d062d2e92b | ||
|
|
7c631d1c55 | ||
|
|
6df71118fb | ||
|
|
ed7812b8be | ||
|
|
ff03ac3e06 | ||
|
|
b6ae2436be | ||
|
|
ec266a86b6 | ||
|
|
ecbac17217 | ||
|
|
43a84f58e9 | ||
|
|
9ed37742e9 | ||
|
|
dbe2c44a25 | ||
|
|
f40780310b | ||
|
|
c06f69d0bf | ||
|
|
8a6b73bb2c | ||
|
|
9e6298569e | ||
|
|
ad0430f463 | ||
|
|
7d26d56e59 | ||
|
|
06988c38fd | ||
|
|
f6d2b07368 | ||
|
|
bb43308c44 | ||
|
|
6a9df34abd | ||
|
|
1006dd54de | ||
|
|
cf905ec14a | ||
|
|
19bb4bdeb8 | ||
|
|
47f9e71302 | ||
|
|
2e9da47a2d | ||
|
|
3e863848f8 | ||
|
|
e7a0b952ab | ||
|
|
8b85b33ba5 | ||
|
|
90dafdd9f0 | ||
|
|
d989478154 | ||
|
|
ebd930ace9 | ||
|
|
53e0e7aabe | ||
|
|
27cbc4ebc8 | ||
|
|
2515cafb9c | ||
|
|
5593611b5e | ||
|
|
f544b051c4 | ||
|
|
c534e70950 | ||
|
|
9ffb3b4ac0 | ||
|
|
893c5271ac | ||
|
|
f7a9456553 | ||
|
|
8250235207 | ||
|
|
eb51591b02 | ||
|
|
06e7249850 | ||
|
|
c4ad003af2 | ||
|
|
1243717012 | ||
|
|
da3d315cba | ||
|
|
48ff4c7679 | ||
|
|
9fc6494a28 | ||
|
|
9d4cedac51 | ||
|
|
f66139dfe4 | ||
|
|
79e0a3dad0 | ||
|
|
3dbbf3a101 | ||
|
|
f273e7d329 | ||
|
|
351072cd9f | ||
|
|
0b23f1be05 | ||
|
|
b8015a71e9 | ||
|
|
739f040fa8 | ||
|
|
d5fc859f46 | ||
|
|
753f4a4bc3 | ||
|
|
8c280d98ca | ||
|
|
d5e1fa5771 | ||
|
|
f30b79b2c5 | ||
|
|
54146d44f9 | ||
|
|
0c31ab87c9 | ||
|
|
e49cccb0d9 | ||
|
|
7a1841c464 | ||
|
|
40ebeb2dc8 | ||
|
|
e5444b9a77 | ||
|
|
89cc16a9cd | ||
|
|
a24070d532 | ||
|
|
f473fd8084 | ||
|
|
5055f5e3e2 | ||
|
|
0b26103c07 | ||
|
|
f99614be02 | ||
|
|
c02e83092e | ||
|
|
85e99fa154 | ||
|
|
83b91246b1 | ||
|
|
0f4b228eff | ||
|
|
2abbad1ca5 | ||
|
|
43d8fe212f | ||
|
|
84befcdfe2 | ||
|
|
def8fce250 | ||
|
|
3a0aba0835 | ||
|
|
254cd624fe | ||
|
|
802a48fb43 | ||
|
|
c81affa551 | ||
|
|
3e51304b68 | ||
|
|
62abbeaf6f | ||
|
|
80e24f3f69 | ||
|
|
24f5069584 | ||
|
|
5da78991d4 | ||
|
|
cfb5d5f988 | ||
|
|
a10d262f5f | ||
|
|
16e926d374 | ||
|
|
b9d6a5103d | ||
|
|
834f8e04ca | ||
|
|
3ade5516a2 | ||
|
|
b41de6d86b | ||
|
|
c72fba7e4c | ||
|
|
96113a5dc6 | ||
|
|
db0335fa54 | ||
|
|
268c671dc8 | ||
|
|
3dd4999fd7 | ||
|
|
c1ed51e767 | ||
|
|
2a8a0aa156 | ||
|
|
5073613c6e | ||
|
|
4a76ca1f5e | ||
|
|
878e6b6712 | ||
|
|
aeba9daea6 | ||
|
|
25d115ba1a | ||
|
|
f37e07a882 | ||
|
|
4c0cc5a295 | ||
|
|
57968e7ad4 | ||
|
|
02a7fc69ed | ||
|
|
d729b4e72b | ||
|
|
35a4c2325e | ||
|
|
6253445b13 | ||
|
|
dea5fe87f7 | ||
|
|
6162ea95b0 | ||
|
|
44b409d696 | ||
|
|
a9fbc0175d |
134
.github/workflows/bootstrap.yml
vendored
134
.github/workflows/bootstrap.yml
vendored
@@ -24,6 +24,7 @@ jobs:
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -31,14 +32,20 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -51,6 +58,7 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -61,22 +69,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -89,6 +95,7 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -98,22 +105,20 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -121,10 +126,10 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -134,9 +139,12 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
@@ -150,11 +158,13 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -169,12 +179,14 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -189,12 +201,14 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -209,6 +223,7 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -218,22 +233,20 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -245,6 +258,7 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -255,22 +269,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -282,13 +294,15 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -298,13 +312,15 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -312,3 +328,11 @@ jobs:
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
||||
17
.github/workflows/build-containers.yml
vendored
17
.github/workflows/build-containers.yml
vendored
@@ -43,9 +43,10 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -75,33 +76,33 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
9
.github/workflows/macos_python.yml
vendored
9
.github/workflows/macos_python.yml
vendored
@@ -22,9 +22,10 @@ on:
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -36,10 +37,11 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -50,9 +52,10 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
28
.github/workflows/unit_tests.yaml
vendored
28
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -171,7 +171,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -218,7 +218,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -274,7 +274,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -331,7 +331,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
@@ -350,7 +350,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
16
.github/workflows/windows_python.yml
vendored
16
.github/workflows/windows_python.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -139,11 +139,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
|
||||
@@ -14,4 +14,39 @@ concretizer:
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
reuse: true
|
||||
|
||||
# If `true`, Spack will consider minimizing builds its *topmost* priority.
|
||||
# Note that this can result in weird package configurations. In particular,
|
||||
# Spack will disable variants and might downgrade versions to avoid building
|
||||
# new packages for an install. By default, Spack respects defaults from
|
||||
# packages and preferences *before* minimizing the number of builds.
|
||||
#
|
||||
# Example for intuition: `cmake` can optionally build without openssl, but
|
||||
# it's enabled by default because many builds use that functionality. Using
|
||||
# `minimal: true` will build `cmake~openssl` unless the user asks for
|
||||
# `cmake+openssl` explicitly.
|
||||
minimal: false
|
||||
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
|
||||
@@ -33,6 +33,9 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
||||
@@ -35,7 +35,8 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
|
||||
@@ -192,32 +192,32 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. warning::
|
||||
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||
you update Spack frequently.
|
||||
|
||||
The ``--reuse`` option described here will become the default installation
|
||||
method in the next Spack version, and you will be able to get the current
|
||||
behavior by using ``spack install --fresh``.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
In case you want the latest versions and configurations to be installed instead,
|
||||
you can add the ``--fresh`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --reuse mpich
|
||||
$ spack install --fresh mpich
|
||||
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
Reusing installations in this mode is "accidental", and happening only if
|
||||
there's a match between existing installations and what Spack would have installed
|
||||
anyhow.
|
||||
|
||||
You can use the ``spack spec -I mpich`` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
||||
``concretizer.yaml``.
|
||||
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||
``concretizer.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
|
||||
@@ -219,33 +219,65 @@ Concretizer options
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. _code-block: yaml
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
``reuse``
|
||||
^^^^^^^^^^^^^^^^
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
||||
.. code-block:: yaml
|
||||
|
||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
||||
enabled by default.
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
.. note::
|
||||
|
||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
||||
to get the old behavior.
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
|
||||
@@ -39,6 +39,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
@@ -47,6 +48,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Language-specific
|
||||
|
||||
build_systems/luapackage
|
||||
build_systems/octavepackage
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
|
||||
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
@@ -0,0 +1,123 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
||||
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
@@ -0,0 +1,105 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
luarocks, prefer MakefilePackage.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
#. ``install`` - install the project
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# If the archive is a source rock
|
||||
$ luarocks unpack <archive>.src.rock
|
||||
$ # preprocess is a noop by default
|
||||
$ luarocks make <name>.rockspec
|
||||
|
||||
|
||||
Any of these phases can be overridden in your package as necessary.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||
a source rock archive (``.src.rock``). This file declares things like build
|
||||
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||
|
||||
It is common for the rockspec file to list the lua version required in
|
||||
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||
implementation, but it is a good idea to specify the version required with
|
||||
a ``depends_on`` statement. The block normally will be a table definition like
|
||||
this:
|
||||
|
||||
.. code-block:: lua
|
||||
|
||||
dependencies = {
|
||||
"lua >= 5.1",
|
||||
}
|
||||
|
||||
The LuaPackage class supports source repositories and archives containing
|
||||
a rockspec and directly downloading source rock files. It *does not* support
|
||||
downloading dependencies listed inside a rockspec, and thus does not support
|
||||
directly downloading a rockspec as an archive.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All base dependencies are added by the build system, but LuaRocks is run to
|
||||
avoid downloading extra Lua dependencies during build. If the package needs
|
||||
Lua libraries outside the standard set, they should be added as dependencies.
|
||||
|
||||
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||
version. If the package requires LuaJit rather than Lua,
|
||||
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||
work ``depends_on("lua")`` will express that.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to luarocks make
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||
override the ``luarocks_args`` method like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the LuaRocks build system, see:
|
||||
https://luarocks.org/
|
||||
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
|
||||
@@ -23,7 +23,10 @@
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
from sphinx.parsers import RSTParser
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
@@ -82,9 +85,6 @@
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -92,8 +92,20 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
# since Makefiles require tabs.
|
||||
#
|
||||
class NoTabExpansionRSTParser(RSTParser):
|
||||
def parse(self, inputstring, document):
|
||||
if isinstance(inputstring, str):
|
||||
lines = inputstring.splitlines()
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
|
||||
@@ -59,7 +59,8 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -245,7 +246,8 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -366,7 +368,8 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
@@ -281,8 +281,8 @@ need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
the root specs in a unified way* to ensure that
|
||||
each package in the environment corresponds to a single concrete spec. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
@@ -349,6 +349,24 @@ If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
Every ``spack install`` process builds one package at a time with multiple build
|
||||
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install & spack install & spack install & spack install
|
||||
|
||||
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||
for details.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
@@ -481,7 +499,7 @@ Spec concretization
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
under any environment is determined by the ``concretizer:unify`` property:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -491,10 +509,15 @@ under any environment is determined by the ``concretization`` property:
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: separately`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: together`` is replaced by ``concretizer:unify:false``.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
@@ -910,3 +933,93 @@ environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
|
||||
|
||||
------------------------------------------
|
||||
Generating Depfiles from Environments
|
||||
------------------------------------------
|
||||
|
||||
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||
targets that can be included in existing ``Makefile``\s, to allow
|
||||
other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
current working directory, and ``make -j64`` installs the environment,
|
||||
exploiting parallelism across packages as much as possible. Spack
|
||||
respects the Make jobserver and forwards it to the build environment
|
||||
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||
load, even when packages are built in parallel.
|
||||
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
|
||||
GNU Make version 4.3 and above have great support for output synchronization
|
||||
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
spack.lock: spack.yaml
|
||||
$(SPACK) -e . concretize -f
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
||||
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
@@ -5323,7 +5323,7 @@ would be quite complicated to do using regex only. Employing the
|
||||
.. code-block:: python
|
||||
|
||||
class Gcc(Package):
|
||||
executables = [r'g\+\+']
|
||||
executables = ['g++']
|
||||
|
||||
def filter_detected_exes(cls, prefix, exes_in_prefix):
|
||||
return [x for x in exes_in_prefix if 'clang' not in x]
|
||||
|
||||
@@ -115,7 +115,8 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretization: separately
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
||||
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,7 +78,8 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
||||
@@ -25,4 +25,5 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
36
lib/spack/env/cc
vendored
36
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -241,14 +241,14 @@ case "$command" in
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|emcc)
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
||||
command="$SPACK_CC"
|
||||
language="C"
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|em++)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -401,7 +401,8 @@ input_command="$*"
|
||||
# command line and recombine them with Spack arguments later. We
|
||||
# parse these out so that we can make sure that system paths come
|
||||
# last, that package arguments come first, and that Spack arguments
|
||||
# are injected properly.
|
||||
# are injected properly. Based on configuration, we also strip -Werror
|
||||
# arguments.
|
||||
#
|
||||
# All other arguments, including -l arguments, are treated as
|
||||
# 'other_args' and left in their original order. This ensures that
|
||||
@@ -440,6 +441,29 @@ while [ $# -ne 0 ]; do
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case '$1' in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REMOVE}" ] ; then
|
||||
eval "\
|
||||
case '$1' in
|
||||
$SPACK_COMPILER_FLAGS_REMOVE)
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
@@ -768,7 +792,9 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
||||
1
lib/spack/env/emscripten/em++
vendored
1
lib/spack/env/emscripten/em++
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
1
lib/spack/env/emscripten/emcc
vendored
1
lib/spack/env/emscripten/emcc
vendored
@@ -1 +0,0 @@
|
||||
../cc
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
* Version: 0.1.4 (commit 53fc4ac91e9b4c5e4079f15772503a80bece72ad)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file:
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,26 +80,46 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if platform.machine() == "x86_64":
|
||||
if _machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -125,6 +145,18 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -184,12 +216,7 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
architecture_family = _machine()
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -244,12 +271,7 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -288,7 +310,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.vendor in (vendor, "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -303,8 +325,9 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
)
|
||||
|
||||
|
||||
|
||||
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file:
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
||||
@@ -88,6 +88,20 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -291,6 +305,20 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -333,6 +361,18 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -384,6 +424,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -432,6 +486,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -490,6 +558,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -550,6 +630,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -615,6 +707,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -672,6 +776,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -732,6 +848,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -798,6 +926,20 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -868,6 +1010,20 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -937,6 +1093,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1004,6 +1172,18 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1098,6 +1278,20 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1142,6 +1336,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1192,6 +1400,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1246,6 +1468,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1301,6 +1537,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1360,6 +1610,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1422,6 +1688,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1485,6 +1767,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1543,6 +1841,30 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1788,7 +2110,6 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -1821,18 +2142,26 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -1954,7 +2283,40 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [],
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -1964,14 +2326,22 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"versions": "9.0:12.0",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"versions": "11.0:12.5",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -64,6 +64,7 @@
|
||||
'is_exe',
|
||||
'join_path',
|
||||
'last_modification_time_recursive',
|
||||
'library_extensions',
|
||||
'mkdirp',
|
||||
'partition_path',
|
||||
'prefixes',
|
||||
@@ -109,12 +110,15 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
|
||||
|
||||
def possible_library_filenames(library_names):
|
||||
"""Given a collection of library names like 'libfoo', generate the set of
|
||||
library filenames that may be found on the system (e.g. libfoo.so). This
|
||||
generates the library filenames that may appear on any OS.
|
||||
library filenames that may be found on the system (e.g. libfoo.so).
|
||||
"""
|
||||
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
lib_extensions = library_extensions
|
||||
return set(
|
||||
'.'.join((lib, extension)) for lib, extension in
|
||||
itertools.product(library_names, lib_extensions))
|
||||
@@ -363,7 +367,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group):
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -372,7 +376,10 @@ def chgrp(path, group):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
os.chown(path, -1, gid)
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
@@ -764,39 +771,36 @@ def __init__(self, inner_exception, outer_exception):
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
deleted. If there is an exception, the move is undone.
|
||||
def replace_directory_transaction(directory_name):
|
||||
"""Temporarily renames a directory in the same parent dir. If the operations
|
||||
executed within the context manager don't raise an exception, the renamed directory
|
||||
is deleted. If there is an exception, the move is undone.
|
||||
|
||||
Args:
|
||||
directory_name (path): absolute path of the directory name
|
||||
tmp_root (path): absolute path of the parent directory where to create
|
||||
the temporary
|
||||
|
||||
Returns:
|
||||
temporary directory where ``directory_name`` has been moved
|
||||
"""
|
||||
# Check the input is indeed a directory with absolute path.
|
||||
# Raise before anything is done to avoid moving the wrong directory
|
||||
assert os.path.isdir(directory_name), \
|
||||
'Invalid directory: ' + directory_name
|
||||
assert os.path.isabs(directory_name), \
|
||||
'"directory_name" must contain an absolute path: ' + directory_name
|
||||
directory_name = os.path.abspath(directory_name)
|
||||
assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
|
||||
|
||||
directory_basename = os.path.basename(directory_name)
|
||||
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
|
||||
# so dirname is the directory's parent not the directory itself.
|
||||
tmpdir = tempfile.mkdtemp(
|
||||
dir=os.path.dirname(directory_name),
|
||||
prefix='.backup')
|
||||
|
||||
if tmp_root is not None:
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
# We have to jump through hoops to support Windows, since
|
||||
# os.rename(directory_name, tmpdir) errors there.
|
||||
backup_dir = os.path.join(tmpdir, 'backup')
|
||||
os.rename(directory_name, backup_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
yield backup_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
@@ -804,10 +808,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
os.rename(backup_dir, directory_name)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
|
||||
@@ -815,8 +816,8 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1097,7 +1098,32 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink, isdir = f.is_symlink(), f.is_dir()
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, 'winerror')\
|
||||
and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
@@ -1105,7 +1131,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
continue
|
||||
|
||||
if not isdir:
|
||||
# Handle files
|
||||
# handle files
|
||||
visitor.visit_file(root, rel_child, depth)
|
||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||
# Handle ordinary directories
|
||||
@@ -1180,6 +1206,35 @@ def remove_if_dead_link(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
def readonly_file_handler(ignore_errors=False):
|
||||
# TODO: generate stages etc. with write permissions wherever
|
||||
# so this callback is no-longer required
|
||||
"""
|
||||
Generate callback for shutil.rmtree to handle permissions errors on
|
||||
Windows. Some files may unexpectedly lack write permissions even
|
||||
though they were generated by Spack on behalf of the user (e.g. the
|
||||
stage), so this callback will detect such cases and modify the
|
||||
permissions if that is the issue. For other errors, the fallback
|
||||
is either to raise (if ignore_errors is False) or ignore (if
|
||||
ignore_errors is True). This is only intended for Windows systems
|
||||
and will raise a separate error if it is ever invoked (by accident)
|
||||
on a non-Windows system.
|
||||
"""
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
|
||||
excvalue.errno == errno.EACCES:
|
||||
# change the file to be readable,writable,executable: 0777
|
||||
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||
# retry
|
||||
func(path)
|
||||
elif not ignore_errors:
|
||||
raise
|
||||
return error_remove_readonly
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
@@ -1187,23 +1242,18 @@ def remove_linked_tree(path):
|
||||
If the directory is a symlink, follows the link and removes the real
|
||||
directory before removing the link.
|
||||
|
||||
This method will force-delete files on Windows
|
||||
|
||||
Parameters:
|
||||
path (str): Directory to be removed
|
||||
"""
|
||||
# On windows, cleaning a Git stage can be an issue
|
||||
# as git leaves readonly files that Python handles
|
||||
# poorly on Windows. Remove readonly status and try again
|
||||
def onerror(func, path, exe_info):
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
try:
|
||||
func(path)
|
||||
except Exception as e:
|
||||
tty.warn(e)
|
||||
pass
|
||||
|
||||
kwargs = {'ignore_errors': True}
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
if is_windows:
|
||||
kwargs = {'onerror': onerror}
|
||||
kwargs['ignore_errors'] = False
|
||||
kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1009,3 +1011,64 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
@@ -809,19 +809,23 @@ def __enter__(self):
|
||||
def background_reader(reader, echo_writer, _kill):
|
||||
# for each line printed to logfile, read it
|
||||
# if echo: write line to user
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
line = reader.readline()
|
||||
while line:
|
||||
if self.echo:
|
||||
self.echo_writer.write('{0}'.format(line.decode()))
|
||||
self.echo_writer.flush()
|
||||
line = reader.readline()
|
||||
try:
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
# Flush buffered build output to file
|
||||
# stdout/err fds refer to log file
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
line = reader.readline()
|
||||
if self.echo and line:
|
||||
echo_writer.write('{0}'.format(line.decode()))
|
||||
echo_writer.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
@@ -837,7 +841,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._ioflag = False
|
||||
else:
|
||||
self.writer.close()
|
||||
self.reader.close()
|
||||
self.echo_writer.flush()
|
||||
self.stdout.flush()
|
||||
self.stderr.flush()
|
||||
@@ -853,10 +856,7 @@ def force_echo(self):
|
||||
if not self._active:
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
try:
|
||||
yield self
|
||||
finally:
|
||||
pass
|
||||
yield
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
|
||||
@@ -276,6 +276,24 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -182,7 +181,6 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
dag_hash = indexed_spec.dag_hash()
|
||||
full_hash = indexed_spec._full_hash
|
||||
|
||||
if dag_hash not in self._mirrors_for_spec:
|
||||
self._mirrors_for_spec[dag_hash] = []
|
||||
@@ -190,11 +188,8 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
for entry in self._mirrors_for_spec[dag_hash]:
|
||||
# A binary mirror can only have one spec per DAG hash, so
|
||||
# if we already have an entry under this DAG hash for this
|
||||
# mirror url, we may need to replace the spec associated
|
||||
# with it (but only if it has a different full_hash).
|
||||
# mirror url, we're done.
|
||||
if entry['mirror_url'] == mirror_url:
|
||||
if full_hash and full_hash != entry['spec']._full_hash:
|
||||
entry['spec'] = indexed_spec
|
||||
break
|
||||
else:
|
||||
self._mirrors_for_spec[dag_hash].append({
|
||||
@@ -403,6 +398,11 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
hash_fetch_url = url_util.join(
|
||||
mirror_url, _build_cache_relative_path, 'index.json.hash')
|
||||
|
||||
if not web_util.url_exists(index_fetch_url):
|
||||
# A binary mirror is not required to have an index, so avoid
|
||||
# raising FetchCacheError in that case.
|
||||
return False
|
||||
|
||||
old_cache_key = None
|
||||
fetched_hash = None
|
||||
|
||||
@@ -762,6 +762,62 @@ def sign_tarball(key, force, specfile_path):
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
s = None
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
return s
|
||||
|
||||
|
||||
def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
|
||||
for file_path in file_list:
|
||||
try:
|
||||
s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path))
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
if s:
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute its hash
|
||||
with open(index_json_path) as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(db_root_dir, 'index.json.hash')
|
||||
with open(index_hash_path, 'w') as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, 'index.json.hash'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/plain'})
|
||||
|
||||
|
||||
def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
@@ -790,35 +846,6 @@ def generate_package_index(cache_prefix):
|
||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
|
||||
all_mirror_specs = {}
|
||||
|
||||
for file_path in file_list:
|
||||
try:
|
||||
spec_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
spec_dict = sjson.load(spec_file_contents)
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
spec_dict = syaml.load(spec_file_contents)
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
all_mirror_specs[s.dag_hash()] = {
|
||||
'spec_url': spec_url,
|
||||
'spec': s,
|
||||
'num_deps': len(list(s.traverse(root=False))),
|
||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
||||
'buildinfo': spec_dict['buildinfo'],
|
||||
}
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
||||
key=lambda k: all_mirror_specs[k]['num_deps'])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
@@ -826,85 +853,7 @@ def generate_package_index(cache_prefix):
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
tty.debug('Specs sorted by number of dependencies:')
|
||||
for dag_hash in sorted_specs:
|
||||
spec_record = all_mirror_specs[dag_hash]
|
||||
s = spec_record['spec']
|
||||
num_deps = spec_record['num_deps']
|
||||
tty.debug(' {0}/{1} -> {2}'.format(
|
||||
s.name, dag_hash[:7], num_deps))
|
||||
if num_deps > 0:
|
||||
# Check each of this spec's dependencies (which we have already
|
||||
# processed), as they are the source of truth for their own
|
||||
# full hash. If the full hash we have for any deps does not
|
||||
# match what those deps have themselves, then we need to splice
|
||||
# this spec with those deps, and push this spliced spec
|
||||
# (spec.json file) back to the mirror, as well as update the
|
||||
# all_mirror_specs dictionary with this spliced spec.
|
||||
to_splice = []
|
||||
for dep in s.dependencies():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in all_mirror_specs:
|
||||
true_dep = all_mirror_specs[dep_dag_hash]['spec']
|
||||
if true_dep.full_hash() != dep.full_hash():
|
||||
to_splice.append(true_dep)
|
||||
|
||||
if to_splice:
|
||||
tty.debug(' needs the following deps spliced:')
|
||||
for true_dep in to_splice:
|
||||
tty.debug(' {0}/{1}'.format(
|
||||
true_dep.name, true_dep.dag_hash()[:7]))
|
||||
s = s.splice(true_dep, True)
|
||||
|
||||
# Push this spliced spec back to the mirror
|
||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
||||
spliced_spec_dict[key] = spec_record[key]
|
||||
|
||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
||||
with open(temp_json_path, 'w') as fd:
|
||||
fd.write(sjson.dump(spliced_spec_dict))
|
||||
|
||||
spliced_spec_url = spec_record['spec_url']
|
||||
web_util.push_to_url(
|
||||
temp_json_path, spliced_spec_url, keep_original=False)
|
||||
tty.debug(' spliced and wrote {0}'.format(
|
||||
spliced_spec_url))
|
||||
spec_record['spec'] = s
|
||||
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now that we have fixed any old specfiles that might have had the wrong
|
||||
# full hash for their dependencies, we can generate the index, compute
|
||||
# the hash, and push those files to the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute it's hash
|
||||
with open(index_json_path) as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(db_root_dir, 'index.json.hash')
|
||||
with open(index_hash_path, 'w') as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, 'index.json.hash'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/plain'})
|
||||
_read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir)
|
||||
except Exception as err:
|
||||
msg = 'Encountered problem pushing package index to {0}: {1}'.format(
|
||||
cache_prefix, err)
|
||||
@@ -1568,12 +1517,11 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
"""
|
||||
package = spack.repo.get(spec)
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||
return
|
||||
elif spec.concrete and package.installed and not force:
|
||||
elif spec.concrete and spec.installed and not force:
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
@@ -1611,16 +1559,14 @@ def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
|
||||
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
||||
|
||||
|
||||
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_is_json = True
|
||||
lenient = not full_hash_match
|
||||
found_specs = []
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
@@ -1650,29 +1596,21 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
# Do not recompute the full hash for the fetched spec, instead just
|
||||
# read the property.
|
||||
if lenient or fetched_spec._full_hash == spec_full_hash:
|
||||
found_specs.append({
|
||||
'mirror_url': mirror.fetch_url,
|
||||
'spec': fetched_spec,
|
||||
})
|
||||
found_specs.append({
|
||||
'mirror_url': mirror.fetch_url,
|
||||
'spec': fetched_spec,
|
||||
})
|
||||
|
||||
return found_specs
|
||||
|
||||
|
||||
def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
mirrors_to_check=None, index_only=False):
|
||||
def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
"""
|
||||
Check if concrete spec exists on mirrors and return a list
|
||||
indicating the mirrors on which it can be found
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
full_hash_match (bool): If True, only includes mirrors where the spec
|
||||
full hash matches the locally computed full hash of the ``spec``
|
||||
argument. If False, any mirror which has a matching DAG hash
|
||||
is included in the results.
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
@@ -1689,29 +1627,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = []
|
||||
lenient = not full_hash_match
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
def filter_candidates(candidate_list):
|
||||
filtered_candidates = []
|
||||
for candidate in candidate_list:
|
||||
candidate_full_hash = candidate['spec']._full_hash
|
||||
if lenient or spec_full_hash == candidate_full_hash:
|
||||
filtered_candidates.append(candidate)
|
||||
return filtered_candidates
|
||||
|
||||
candidates = binary_index.find_built_spec(spec)
|
||||
if candidates:
|
||||
results = filter_candidates(candidates)
|
||||
results = binary_index.find_built_spec(spec)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
if not results and not index_only:
|
||||
results = try_direct_fetch(spec,
|
||||
full_hash_match=full_hash_match,
|
||||
mirrors=mirrors_to_check)
|
||||
|
||||
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
|
||||
# We found a spec by the direct fetch approach, we might as well
|
||||
# add it to our mapping.
|
||||
if results:
|
||||
binary_index.update_spec(spec, results)
|
||||
|
||||
@@ -1861,124 +1784,35 @@ def push_keys(*mirrors, **kwargs):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
def needs_rebuild(spec, mirror_url):
|
||||
if not spec.concrete:
|
||||
raise ValueError('spec must be concrete to check against mirror')
|
||||
|
||||
pkg_name = spec.name
|
||||
pkg_version = spec.version
|
||||
|
||||
pkg_hash = spec.dag_hash()
|
||||
pkg_full_hash = spec.full_hash()
|
||||
|
||||
tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
|
||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
||||
tty.debug('Checking {0}-{1}, dag_hash = {2}'.format(
|
||||
pkg_name, pkg_version, pkg_hash))
|
||||
tty.debug(spec.tree())
|
||||
|
||||
# Try to retrieve the specfile directly, based on the known
|
||||
# format of the name, in order to determine if the package
|
||||
# needs to be rebuilt.
|
||||
cache_prefix = build_cache_prefix(mirror_url)
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
||||
deprecated_specfile_name)
|
||||
|
||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
||||
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1} or {2}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(
|
||||
spec.short_spec,
|
||||
specfile_path,
|
||||
deprecated_specfile_path))
|
||||
tty.debug(url_err)
|
||||
tty.debug(url_err_y)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
if not spec_file_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(
|
||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_dict = (sjson.load(spec_file_contents)
|
||||
if specfile_is_json else syaml.load(spec_file_contents))
|
||||
|
||||
try:
|
||||
nodes = spec_dict['spec']['nodes']
|
||||
except KeyError:
|
||||
# Prior node dict format omitted 'nodes' key
|
||||
nodes = spec_dict['spec']
|
||||
name = spec.name
|
||||
|
||||
# In the old format:
|
||||
# The "spec" key represents a list of objects, each with a single
|
||||
# key that is the package name. While the list usually just contains
|
||||
# a single object, we iterate over the list looking for the object
|
||||
# with the name of this concrete spec as a key, out of an abundance
|
||||
# of caution.
|
||||
# In format version 2:
|
||||
# ['spec']['nodes'] is still a list of objects, but with a
|
||||
# multitude of keys. The list will commonly contain many objects, and in the
|
||||
# case of build specs, it is highly likely that the same name will occur
|
||||
# once as the actual package, and then again as the build provenance of that
|
||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
||||
if nodes and 'name' not in nodes[0]:
|
||||
# old style
|
||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
||||
cached_pkg_specs = [item for item in nodes
|
||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
||||
|
||||
# If either the full_hash didn't exist in the specfile, or it
|
||||
# did, but didn't match the one we computed locally, then we should
|
||||
# just rebuild. This can be simplified once the dag_hash and the
|
||||
# full_hash become the same thing.
|
||||
rebuild = False
|
||||
|
||||
if not cached_target:
|
||||
reason = 'did not find spec in specfile contents'
|
||||
rebuild = True
|
||||
elif ht.full_hash.name not in cached_target:
|
||||
reason = 'full_hash was missing from remote specfile'
|
||||
rebuild = True
|
||||
else:
|
||||
full_hash = cached_target[ht.full_hash.name]
|
||||
if full_hash != pkg_full_hash:
|
||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
||||
full_hash, pkg_full_hash)
|
||||
rebuild = True
|
||||
|
||||
if rebuild:
|
||||
tty.msg('Rebuilding {0}, reason: {1}'.format(
|
||||
spec.short_spec, reason))
|
||||
tty.msg(spec.tree())
|
||||
|
||||
return rebuild
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
|
||||
|
||||
def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
rebuild_on_errors=False):
|
||||
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
"""Check all the given specs against buildcaches on the given mirrors and
|
||||
determine if any of the specs need to be rebuilt. Reasons for needing to
|
||||
rebuild include binary cache for spec isn't present on a mirror, or it is
|
||||
present but the full_hash has changed since last time spec was built.
|
||||
determine if any of the specs need to be rebuilt. Specs need to be rebuilt
|
||||
when their hash doesn't exist in the mirror.
|
||||
|
||||
Arguments:
|
||||
mirrors (dict): Mirrors to check against
|
||||
@@ -1986,8 +1820,6 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
output_file (str): Path to output file to be written. If provided,
|
||||
mirrors with missing or out-of-date specs will be formatted as a
|
||||
JSON object and written to this file.
|
||||
rebuild_on_errors (bool): Treat any errors encountered while
|
||||
checking specs as a signal to rebuild package.
|
||||
|
||||
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
||||
|
||||
@@ -1999,7 +1831,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
rebuild_list = []
|
||||
|
||||
for spec in specs:
|
||||
if needs_rebuild(spec, mirror.fetch_url, rebuild_on_errors):
|
||||
if needs_rebuild(spec, mirror.fetch_url):
|
||||
rebuild_list.append({
|
||||
'short_spec': spec.short_spec,
|
||||
'hash': spec.dag_hash()
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -417,11 +418,10 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _source_is_trusted(conf):
|
||||
def _validate_source_is_trusted(conf):
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
raise ValueError('source is not trusted')
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -488,34 +488,25 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
errors = {}
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -539,15 +530,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -562,14 +552,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
||||
@@ -111,6 +111,20 @@
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
def should_set_parallel_jobs(jobserver_support=False):
|
||||
"""Returns true in general, except when:
|
||||
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
||||
- jobserver_support is enabled, and a jobserver was found.
|
||||
"""
|
||||
if (
|
||||
jobserver_support and
|
||||
'MAKEFLAGS' in os.environ and
|
||||
'--jobserver' in os.environ['MAKEFLAGS']
|
||||
):
|
||||
return False
|
||||
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
@@ -120,9 +134,6 @@ class MakeExecutable(Executable):
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
@@ -133,9 +144,8 @@ def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
|
||||
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
|
||||
parallel = should_set_parallel_jobs(jobserver_support=True) and \
|
||||
kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
if parallel:
|
||||
args = ('-j{0}'.format(self.jobs),) + args
|
||||
@@ -181,7 +191,7 @@ def clean_environment():
|
||||
env.unset('PYTHONPATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
env.unset('MAKEFLAGS')
|
||||
# env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
@@ -232,6 +242,17 @@ def clean_environment():
|
||||
# show useful matches.
|
||||
env.set('LC_ALL', build_lang)
|
||||
|
||||
remove_flags = set()
|
||||
keep_flags = set()
|
||||
if spack.config.get('config:flags:keep_werror') == 'all':
|
||||
keep_flags.add('-Werror*')
|
||||
else:
|
||||
if spack.config.get('config:flags:keep_werror') == 'specific':
|
||||
keep_flags.add('-Werror=*')
|
||||
remove_flags.add('-Werror*')
|
||||
env.set('SPACK_COMPILER_FLAGS_KEEP', '|'.join(keep_flags))
|
||||
env.set('SPACK_COMPILER_FLAGS_REMOVE', '|'.join(remove_flags))
|
||||
|
||||
# Remove any macports installs from the PATH. The macports ld can
|
||||
# cause conflicts with the built-in linker on el capitan. Solves
|
||||
# assembler issues, e.g.:
|
||||
@@ -512,14 +533,7 @@ def _set_variables_for_single_module(pkg, module):
|
||||
m.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
# FIXME: !!!!!
|
||||
m.make = MakeExecutable('make', jobs)
|
||||
m.emmake = MakeExecutable('emmake', jobs)
|
||||
m.emmake.add_default_arg('make')
|
||||
m.emmake.add_default_arg('AR=emar')
|
||||
m.emmake.add_default_arg('RANLIB=emranlib')
|
||||
m.emmake.add_default_arg('NM=emnm')
|
||||
|
||||
m.gmake = MakeExecutable('gmake', jobs)
|
||||
m.scons = MakeExecutable('scons', jobs)
|
||||
m.ninja = MakeExecutable('ninja', jobs)
|
||||
@@ -530,15 +544,9 @@ def _set_variables_for_single_module(pkg, module):
|
||||
# Find the configure script in the archive path
|
||||
# Don't use which for this; we want to find it in the current dir.
|
||||
m.configure = Executable('./configure')
|
||||
m.emconfigure = Executable('emconfigure')
|
||||
m.emconfigure.add_default_arg('./configure')
|
||||
|
||||
m.meson = Executable('meson')
|
||||
|
||||
m.cmake = Executable('cmake')
|
||||
m.emcmake = Executable('emcmake')
|
||||
m.emcmake.add_default_arg('cmake')
|
||||
|
||||
m.ctest = MakeExecutable('ctest', jobs)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
@@ -842,7 +850,7 @@ def setup_package(pkg, dirty, context='build'):
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
if on_cray and not dirty:
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
|
||||
@@ -1041,7 +1049,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd):
|
||||
input_multiprocess_fd, jsfd1, jsfd2):
|
||||
|
||||
context = kwargs.get('context', 'build')
|
||||
|
||||
@@ -1148,6 +1156,8 @@ def child_fun():
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
||||
|
||||
@@ -1157,11 +1167,17 @@ def child_fun():
|
||||
'fileno'):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get('MAKEFLAGS', False)
|
||||
if mflags:
|
||||
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd))
|
||||
input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
|
||||
|
||||
p.start()
|
||||
|
||||
|
||||
@@ -419,10 +419,7 @@ def configure(self, spec, prefix):
|
||||
options += self.configure_args()
|
||||
|
||||
with working_dir(self.build_directory, create=True):
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emconfigure(*options)
|
||||
else:
|
||||
inspect.getmodule(self).configure(*options)
|
||||
inspect.getmodule(self).configure(*options)
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
if (self.spec.platform == 'darwin'
|
||||
|
||||
@@ -210,6 +210,10 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
||||
@@ -94,13 +94,11 @@ class CMakePackage(PackageBase):
|
||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||
#: for more information.
|
||||
|
||||
# generator = "Unix Makefiles"
|
||||
generator = "Ninja"
|
||||
depends_on('ninja', type='build')
|
||||
generator = "Unix Makefiles"
|
||||
|
||||
if sys.platform == 'win32':
|
||||
generator = "Ninja"
|
||||
depends_on('ninja', type='build')
|
||||
depends_on('ninja')
|
||||
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
@@ -178,6 +176,7 @@ def _std_args(pkg):
|
||||
'-G', generator,
|
||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||
define('CMAKE_BUILD_TYPE', build_type),
|
||||
define('BUILD_TESTING', pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -363,6 +362,7 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
|
||||
@@ -376,10 +376,7 @@ def cmake(self, spec, prefix):
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with working_dir(self.build_directory, create=True):
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emcmake(*options)
|
||||
else:
|
||||
inspect.getmodule(self).cmake(*options)
|
||||
inspect.getmodule(self).cmake(*options)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
|
||||
@@ -107,10 +107,10 @@ def cuda_flags(arch_list):
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
|
||||
102
lib/spack/spack/build_systems/lua.py
Normal file
102
lib/spack/spack/build_systems/lua.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
class LuaPackage(PackageBase):
|
||||
"""Specialized class for lua packages"""
|
||||
|
||||
phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'LuaPackage'
|
||||
|
||||
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||
depends_on('lua-lang')
|
||||
extends('lua', when='^lua')
|
||||
with when('^lua-luajit'):
|
||||
extends('lua-luajit')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit+lualinks')
|
||||
with when('^lua-luajit-openresty'):
|
||||
extends('lua-luajit-openresty')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit-openresty+lualinks')
|
||||
|
||||
def unpack(self, spec, prefix):
|
||||
if os.path.splitext(self.stage.archive_file)[1] == '.rock':
|
||||
directory = self.luarocks('unpack', self.stage.archive_file, output=str)
|
||||
dirlines = directory.split('\n')
|
||||
# TODO: figure out how to scope this better
|
||||
os.chdir(dirlines[2])
|
||||
|
||||
def _generate_tree_line(self, name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
|
||||
|
||||
def generate_luarocks_config(self, spec, prefix):
|
||||
spec = self.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(
|
||||
deptypes=("build", "run"), deptype_query="run"
|
||||
):
|
||||
if d.package.extends(self.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, 'w') as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
rocks_trees={{
|
||||
{}
|
||||
}}
|
||||
""".format(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
|
||||
|
||||
def preprocess(self, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return Executable(self.spec['lua-lang'].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, spec, prefix):
|
||||
rock = '.'
|
||||
specs = find('.', '*.rockspec', recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.luarocks('--tree=' + prefix, 'make', *rocks_args)
|
||||
@@ -82,20 +82,14 @@ def build(self, spec, prefix):
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emmake(*self.build_targets)
|
||||
else:
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
inspect.getmodule(self).make(*self.build_targets)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
|
||||
as targets.
|
||||
"""
|
||||
with working_dir(self.build_directory):
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emmake(*self.install_targets)
|
||||
else:
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
inspect.getmodule(self).make(*self.install_targets)
|
||||
|
||||
run_after('build')(PackageBase._run_default_build_time_test_callbacks)
|
||||
|
||||
|
||||
@@ -30,6 +30,15 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
|
||||
text = """ LICENSE INFORMATION: By downloading and using this software, you agree to the terms
|
||||
and conditions of the software license agreements at https://intel.ly/393CijO."""
|
||||
cls.__doc__ = cls.__doc__ + text
|
||||
return cls
|
||||
|
||||
@property
|
||||
def component_dir(self):
|
||||
"""Subdirectory for this component in the install prefix."""
|
||||
|
||||
@@ -90,8 +90,8 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
return build_group_id
|
||||
|
||||
|
||||
def populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
def _populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||
|
||||
headers = {
|
||||
@@ -132,16 +132,30 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
||||
msg = 'Error response code ({0}) in _populate_buildgroup'.format(
|
||||
response_code)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def is_main_phase(phase_name):
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == 'specs' else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
""" Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
build_group (str): Name of build group this job belongs to (a CDash
|
||||
notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
item_idx = 0
|
||||
format_str = ''
|
||||
format_args = []
|
||||
@@ -163,7 +177,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if is_main_phase(phase) is True or strip_compiler is False:
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
@@ -180,12 +194,12 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
return format_str.format(*format_args)
|
||||
|
||||
|
||||
def get_cdash_build_name(spec, build_group):
|
||||
def _get_cdash_build_name(spec, build_group):
|
||||
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def get_spec_string(spec):
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
'{%compiler}',
|
||||
@@ -197,15 +211,15 @@ def get_spec_string(spec):
|
||||
return spec.format(''.join(format_elements))
|
||||
|
||||
|
||||
def format_root_spec(spec, main_phase, strip_compiler):
|
||||
def _format_root_spec(spec, main_phase, strip_compiler):
|
||||
if main_phase is False and strip_compiler is True:
|
||||
return '{0}@{1} arch={2}'.format(
|
||||
spec.name, spec.version, spec.architecture)
|
||||
else:
|
||||
return spec.build_hash()
|
||||
return spec.dag_hash()
|
||||
|
||||
|
||||
def spec_deps_key(s):
|
||||
def _spec_deps_key(s):
|
||||
return '{0}/{1}'.format(s.name, s.dag_hash(7))
|
||||
|
||||
|
||||
@@ -217,8 +231,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -266,11 +280,11 @@ def stage_spec_jobs(specs, check_index_only=False):
|
||||
|
||||
"""
|
||||
|
||||
# The convenience method below, "remove_satisfied_deps()", does not modify
|
||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||
# the "deps" parameter. Instead, it returns a new dictionary where only
|
||||
# dependencies which have not yet been satisfied are included in the
|
||||
# return value.
|
||||
def remove_satisfied_deps(deps, satisfied_list):
|
||||
def _remove_satisfied_deps(deps, satisfied_list):
|
||||
new_deps = {}
|
||||
|
||||
for key, value in iteritems(deps):
|
||||
@@ -283,7 +297,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
get_spec_dependencies(
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
@@ -302,7 +316,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
# Note that "dependencies" is a dictionary mapping each dependent
|
||||
# package to the set of not-yet-handled dependencies. The final step
|
||||
# below removes all the dependencies that are handled by this stage.
|
||||
dependencies = remove_satisfied_deps(dependencies, next_stage)
|
||||
dependencies = _remove_satisfied_deps(dependencies, next_stage)
|
||||
|
||||
if unstaged:
|
||||
stages.append(unstaged.copy())
|
||||
@@ -310,13 +324,12 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def print_staging_summary(spec_labels, dependencies, stages):
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(' Staging summary:')
|
||||
stage_index = 0
|
||||
for stage in stages:
|
||||
tty.msg(' Staging summary ([x] means a job needs rebuilding):')
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
@@ -324,12 +337,10 @@ def print_staging_summary(spec_labels, dependencies, stages):
|
||||
tty.msg(' [{1}] {0} -> {2}'.format(
|
||||
job,
|
||||
'x' if spec_labels[job]['needs_rebuild'] else ' ',
|
||||
get_spec_string(s)))
|
||||
|
||||
stage_index += 1
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def compute_spec_deps(spec_list, check_index_only=False):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -402,17 +413,17 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, full_hash_match=True, index_only=check_index_only)
|
||||
spec=s, index_only=check_index_only)
|
||||
|
||||
skey = spec_deps_key(s)
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
'spec': get_spec_string(s),
|
||||
'spec': _get_spec_string(s),
|
||||
'root': root_spec,
|
||||
'needs_rebuild': not up_to_date_mirrors,
|
||||
}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = spec_deps_key(d)
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg('Will not stage external dep: {0}'.format(d))
|
||||
continue
|
||||
@@ -435,11 +446,11 @@ def append_dep(s, d):
|
||||
return deps_json_obj
|
||||
|
||||
|
||||
def spec_matches(spec, match_string):
|
||||
def _spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == 'tags':
|
||||
@@ -460,7 +471,7 @@ def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def find_matching_config(spec, gitlab_ci):
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
'image',
|
||||
@@ -471,16 +482,16 @@ def find_matching_config(spec, gitlab_ci):
|
||||
'after_script',
|
||||
]
|
||||
|
||||
copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
ci_mappings = gitlab_ci['mappings']
|
||||
for ci_mapping in ci_mappings:
|
||||
for match_string in ci_mapping['match']:
|
||||
if spec_matches(spec, match_string):
|
||||
if _spec_matches(spec, match_string):
|
||||
if 'runner-attributes' in ci_mapping:
|
||||
copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
_copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
return runner_attributes
|
||||
else:
|
||||
return None
|
||||
@@ -488,16 +499,16 @@ def find_matching_config(spec, gitlab_ci):
|
||||
return runner_attributes
|
||||
|
||||
|
||||
def pkg_name_from_spec_label(spec_label):
|
||||
def _pkg_name_from_spec_label(spec_label):
|
||||
return spec_label[:spec_label.index('/')]
|
||||
|
||||
|
||||
def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
def _format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = spec_deps_key(dep_job)
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
|
||||
if not prune_dag or dep_spec_info['needs_rebuild']:
|
||||
@@ -592,6 +603,33 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
output_file (str): File path where generated file should be written
|
||||
prune_dag (bool): If True, do not generate jobs for specs already
|
||||
exist built on the mirror.
|
||||
check_index_only (bool): If True, attempt to fetch the mirror index
|
||||
and only use that to determine whether built specs on the mirror
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
@@ -804,7 +842,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
max_needs_job = ''
|
||||
|
||||
# If this is configured, spack will fail "spack ci generate" if it
|
||||
# generates any full hash which exists under the broken specs url.
|
||||
# generates any hash which exists under the broken specs url.
|
||||
broken_spec_urls = None
|
||||
if broken_specs_url:
|
||||
if broken_specs_url.startswith('http'):
|
||||
@@ -819,7 +857,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
strip_compilers = phase['strip-compilers']
|
||||
|
||||
main_phase = is_main_phase(phase_name)
|
||||
main_phase = _is_main_phase(phase_name)
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
@@ -830,11 +868,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
root_spec = spec_record['rootSpec']
|
||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
||||
pkg_name = _pkg_name_from_spec_label(spec_label)
|
||||
release_spec = root_spec[pkg_name]
|
||||
release_spec_full_hash = release_spec.full_hash()
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
release_spec_build_hash = release_spec.build_hash()
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
@@ -843,7 +879,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
spec_record['needs_rebuild'] = False
|
||||
continue
|
||||
|
||||
runner_attribs = find_matching_config(
|
||||
runner_attribs = _find_matching_config(
|
||||
release_spec, gitlab_ci)
|
||||
|
||||
if not runner_attribs:
|
||||
@@ -897,15 +933,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
compiler_action = 'FIND_ANY'
|
||||
if is_main_phase(phase_name):
|
||||
if _is_main_phase(phase_name):
|
||||
compiler_action = 'INSTALL_MISSING'
|
||||
|
||||
job_vars = {
|
||||
'SPACK_ROOT_SPEC': format_root_spec(
|
||||
'SPACK_ROOT_SPEC': _format_root_spec(
|
||||
root_spec, main_phase, strip_compilers),
|
||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||
'SPACK_COMPILER_ACTION': compiler_action
|
||||
}
|
||||
@@ -924,15 +958,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_pkg = pkg_name_from_spec_label(dep_label)
|
||||
dep_pkg = _pkg_name_from_spec_label(dep_label)
|
||||
dep_root = spec_labels[dep_label]['rootSpec']
|
||||
dep_jobs.append(dep_root[dep_pkg])
|
||||
|
||||
job_dependencies.extend(
|
||||
format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
_format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
|
||||
rebuild_spec = spec_record['needs_rebuild']
|
||||
|
||||
@@ -943,7 +977,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if is_main_phase(phase_name):
|
||||
if _is_main_phase(phase_name):
|
||||
spec_arch_family = (release_spec.architecture
|
||||
.target
|
||||
.microarchitecture
|
||||
@@ -971,7 +1005,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs['phase-name']]
|
||||
c_spec_key = spec_deps_key(c_spec)
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
@@ -985,14 +1019,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
|
||||
job_dependencies.extend(
|
||||
format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
_format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
else:
|
||||
debug_msg = ''.join([
|
||||
'Considered compiler {0} for spec ',
|
||||
@@ -1009,9 +1043,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
release_spec_full_hash in broken_spec_urls):
|
||||
release_spec_dag_hash in broken_spec_urls):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
release_spec, release_spec_dag_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
@@ -1022,7 +1056,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
|
||||
|
||||
if enable_cdash_reporting:
|
||||
cdash_build_name = get_cdash_build_name(
|
||||
cdash_build_name = _get_cdash_build_name(
|
||||
release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||
@@ -1087,7 +1121,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
print_staging_summary(*phase_stages)
|
||||
_print_staging_summary(*phase_stages)
|
||||
|
||||
tty.debug('{0} build jobs generated in {1} stages'.format(
|
||||
job_id, stage_id))
|
||||
@@ -1099,8 +1133,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if enable_cdash_reporting and cdash_auth_token:
|
||||
try:
|
||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
_populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
||||
else:
|
||||
@@ -1136,9 +1170,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
cleanup_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
@@ -1156,9 +1190,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
final_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if is_pr_pipeline:
|
||||
@@ -1229,9 +1263,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
noop_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
|
||||
if 'script' not in noop_job:
|
||||
noop_job['script'] = [
|
||||
@@ -1254,7 +1288,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def url_encode_string(input_string):
|
||||
def _url_encode_string(input_string):
|
||||
encoded_keyval = urlencode({'donotcare': input_string})
|
||||
eq_idx = encoded_keyval.find('=') + 1
|
||||
encoded_value = encoded_keyval[eq_idx:]
|
||||
@@ -1262,6 +1296,17 @@ def url_encode_string(input_string):
|
||||
|
||||
|
||||
def import_signing_key(base64_signing_key):
|
||||
""" Given Base64-encoded gpg key, decode and import it to use for
|
||||
signing packages.
|
||||
|
||||
Arguments:
|
||||
base64_signing_key (str): A gpg key including the secret key,
|
||||
armor-exported and base64 encoded, so it can be stored in a
|
||||
gitlab CI variable. For an example of how to generate such
|
||||
a key, see:
|
||||
|
||||
https://github.com/spack/spack-infrastructure/blob/main/gitlab-docker/files/gen-key
|
||||
"""
|
||||
if not base64_signing_key:
|
||||
tty.warn('No key found for signing/verifying packages')
|
||||
return
|
||||
@@ -1299,14 +1344,34 @@ def import_signing_key(base64_signing_key):
|
||||
|
||||
|
||||
def can_sign_binaries():
|
||||
""" Utility method to determine if this spack instance is capable of
|
||||
signing binary packages. This is currently only possible if the
|
||||
spack gpg keystore contains exactly one secret key."""
|
||||
return len(gpg_util.signing_keys()) == 1
|
||||
|
||||
|
||||
def can_verify_binaries():
|
||||
""" Utility method to determin if this spack instance is capable (at
|
||||
least in theory) of verifying signed binaries."""
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
""" Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == 'INSTALL_MISSING':
|
||||
tty.debug('Make sure bootstrapped compiler will be installed')
|
||||
config = cfg.get('config')
|
||||
@@ -1330,6 +1395,35 @@ def configure_compilers(compiler_action, scope=None):
|
||||
|
||||
|
||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
||||
""" Build a dictionary of concrete specs relevant to a particular
|
||||
rebuild job. This includes the root spec and the spec to be
|
||||
rebuilt (which could be the same).
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Activated spack environment
|
||||
used to get concrete root spec by hash in case compiler_action
|
||||
is anthing other than FIND_ANY.
|
||||
root_spec (str): If compiler_action is FIND_ANY root_spec is
|
||||
a string representation which can be turned directly into
|
||||
a spec, otherwise, it's a hash used to index the activated
|
||||
spack environment.
|
||||
job_name (str): Name of package to be built, used to index the
|
||||
concrete root spec and produce the concrete spec to be
|
||||
built.
|
||||
compiler_action (str): Determines how to interpret the root_spec
|
||||
parameter, either as a string representation as a hash.
|
||||
|
||||
Returns:
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"root": "<spec>",
|
||||
"<job-pkg-name>": "<spec>",
|
||||
}
|
||||
|
||||
"""
|
||||
spec_map = {
|
||||
'root': None,
|
||||
}
|
||||
@@ -1376,6 +1470,19 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
""" Push one or more binary packages to the mirror.
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Optional environment. If
|
||||
provided, it is used to make sure binary package to push
|
||||
exists in the environment.
|
||||
specfile_path (str): Path to spec.json corresponding to built pkg
|
||||
to push.
|
||||
mirror_url (str): Base url of target mirror
|
||||
sign_binaries (bool): If True, spack will attempt to sign binary
|
||||
package before pushing.
|
||||
"""
|
||||
try:
|
||||
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
||||
except Exception as inst:
|
||||
@@ -1400,6 +1507,15 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
""" Looks for spack-build-out.txt in the stage directory of the given
|
||||
job_spec, and attempts to copy the file into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Arguments:
|
||||
|
||||
job_spec (spack.spec.Spec): Spec associated with spack install log
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
"""
|
||||
try:
|
||||
job_pkg = spack.repo.get(job_spec)
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
@@ -1418,6 +1534,14 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
""" Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
|
||||
Arguments:
|
||||
|
||||
url (str): Complete url to artifacts.zip file
|
||||
work_dir (str): Path to destination where artifacts should be extracted
|
||||
"""
|
||||
tty.msg('Fetching artifacts from: {0}\n'.format(url))
|
||||
|
||||
headers = {
|
||||
@@ -1457,6 +1581,8 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
|
||||
|
||||
def get_spack_info():
|
||||
""" If spack is running from a git repo, return the most recent git log
|
||||
entry, otherwise, return a string containing the spack version. """
|
||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if os.path.exists(git_path):
|
||||
git = exe.which("git")
|
||||
@@ -1472,6 +1598,23 @@ def get_spack_info():
|
||||
|
||||
|
||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
""" Look in the local spack clone to find the checkout_commit, and if
|
||||
provided, the merge_commit given as arguments. If those commits can
|
||||
be found locally, then clone spack and attempt to recreate a merge
|
||||
commit with the same parent commits as tested in gitlab. This looks
|
||||
something like 1) git clone repo && cd repo 2) git checkout
|
||||
<checkout_commit> 3) git merge <merge_commit>. If there is no
|
||||
merge_commit provided, then skip step (3).
|
||||
|
||||
Arguments:
|
||||
|
||||
repro_dir (str): Location where spack should be cloned
|
||||
checkout_commit (str): SHA of PR branch commit
|
||||
merge_commit (str): SHA of target branch parent
|
||||
|
||||
Returns: True if git repo state was successfully recreated, or False
|
||||
otherwise.
|
||||
"""
|
||||
# figure out the path to the spack git version being used for the
|
||||
# reproduction
|
||||
print('checkout_commit: {0}'.format(checkout_commit))
|
||||
@@ -1513,7 +1656,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
fail_on_error=False)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error('Unable to clone your local spac repo:')
|
||||
tty.error('Unable to clone your local spack repo:')
|
||||
tty.msg(clone_out)
|
||||
return False
|
||||
|
||||
@@ -1546,6 +1689,18 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
""" Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
|
||||
First download and extract artifacts. Then look through those artifacts
|
||||
to glean some information needed for the reproduer (e.g. one of the
|
||||
artifacts contains information about the version of spack tested by
|
||||
gitlab, another is the generated pipeline yaml containing details
|
||||
of the job like the docker image used to run it). The output of this
|
||||
function is a set of printed instructions for running docker and then
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, 'spack.lock')[0]
|
||||
|
||||
@@ -155,31 +155,17 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
||||
@@ -161,11 +161,6 @@ def setup_parser(subparser):
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
help="Default to rebuilding packages if errors are encountered " +
|
||||
"during the process of checking whether rebuilding is needed")
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
@@ -361,7 +356,7 @@ def list_fn(args):
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
tty.die(e)
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
@@ -430,7 +425,7 @@ def check_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
sys.exit(bindist.check_specs_against_mirrors(
|
||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
||||
configured_mirrors, specs, args.output_file))
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
@@ -483,11 +478,12 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
@@ -701,7 +697,7 @@ def update_index(mirror_url, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
outdir = '.'
|
||||
outdir = 'file://.'
|
||||
if args.mirror_url:
|
||||
outdir = args.mirror_url
|
||||
|
||||
|
||||
@@ -167,8 +167,7 @@ def ci_reindex(args):
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
source if the mirror does not contain the hash. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -280,8 +279,8 @@ def ci_rebuild(args):
|
||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||
job_spec = spec_map[job_spec_pkg_name]
|
||||
|
||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
||||
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||
|
||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||
# these things have to be put into artifacts. This means downstream
|
||||
@@ -335,23 +334,23 @@ def ci_rebuild(args):
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec yaml into the reproduction directory, and it will
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||
with open(job_spec_json_path, 'w') as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write the concrete root spec yaml into the reproduction directory
|
||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
||||
# Write the concrete root spec json into the reproduction directory
|
||||
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||
with open(root_spec_json_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||
repro_details = {
|
||||
'job_name': ci_job_name,
|
||||
'job_spec_yaml': job_spec_yaml_file,
|
||||
'root_spec_yaml': 'root.yaml',
|
||||
'job_spec_json': job_spec_json_file,
|
||||
'root_spec_json': 'root.json',
|
||||
'ci_project_dir': ci_project_dir
|
||||
}
|
||||
with open(repro_file, 'w') as fd:
|
||||
@@ -366,25 +365,24 @@ def ci_rebuild(args):
|
||||
fd.write(b'\n')
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
if pipeline_mirror_url:
|
||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||
pipeline_mirror_url,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching full hash
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, full_hash_match=True, index_only=False)
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a full hash match on at least one configured mirror. All
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
# matches represent the fully up-to-date spec, so should all be
|
||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||
# of the matches and download the buildcache files from there to
|
||||
# the artifacts, so they're available to be used by dependent
|
||||
# jobs in subsequent stages.
|
||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
||||
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||
job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(' {0}'.format(match['mirror_url']))
|
||||
@@ -403,7 +401,7 @@ def ci_rebuild(args):
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
|
||||
# No full hash match anywhere means we need to rebuild spec
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||
@@ -415,7 +413,6 @@ def ci_rebuild(args):
|
||||
install_args.extend([
|
||||
'install',
|
||||
'--keep-stage',
|
||||
'--require-full-hash-match',
|
||||
])
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
@@ -443,8 +440,8 @@ def ci_rebuild(args):
|
||||
|
||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||
# to identify the spec to install, rather than the concrete spec
|
||||
# yaml file.
|
||||
install_args.extend(['-f', job_spec_yaml_path])
|
||||
# json file.
|
||||
install_args.extend(['-f', job_spec_json_path])
|
||||
|
||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||
tty.debug('spack install arguments: {0}'.format(
|
||||
@@ -477,13 +474,13 @@ def ci_rebuild(args):
|
||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# list of known broken hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
dev_fail_hash = job_spec.full_hash()
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||
broken_spec_path))
|
||||
@@ -494,7 +491,7 @@ def ci_rebuild(args):
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -539,7 +536,7 @@ def ci_rebuild(args):
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
@@ -548,14 +545,14 @@ def ci_rebuild(args):
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.full_hash()
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
|
||||
@@ -380,6 +380,11 @@ def add_concretizer_args(subparser):
|
||||
const=False, default=None,
|
||||
help='do not reuse installed deps; build newest configuration'
|
||||
)
|
||||
subgroup.add_argument(
|
||||
'--minimal', action=ConfigSetAction, dest="concretizer:minimal",
|
||||
const=True, default=None,
|
||||
help='minimize builds (disables default variants, may choose older versions)'
|
||||
)
|
||||
subgroup.add_argument(
|
||||
'--reuse', action=ConfigSetAction, dest="concretizer:reuse",
|
||||
const=True, default=None,
|
||||
|
||||
@@ -18,6 +18,8 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
|
||||
@@ -22,6 +22,9 @@ def setup_parser(subparser):
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
subparser.add_argument(
|
||||
'-q', '--quiet', action='store_true',
|
||||
help="Don't print concretized specs")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -38,5 +41,6 @@ def concretize(parser, args):
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -187,6 +187,27 @@ def cmake_args(self):
|
||||
return args"""
|
||||
|
||||
|
||||
class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = 'LuaPackage'
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
# FIXME: Add arguments to `luarocks make` other than rockspec path
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith('lua-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = 'lua-{0}'.format(name)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
@@ -580,6 +601,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'meson': MesonPackageTemplate,
|
||||
'lua': LuaPackageTemplate,
|
||||
'sip': SIPPackageTemplate,
|
||||
'generic': PackageTemplate,
|
||||
}
|
||||
@@ -644,6 +666,9 @@ def __call__(self, stage, url):
|
||||
if url.endswith('.whl') or '.whl#' in url:
|
||||
self.build_system = 'python'
|
||||
return
|
||||
if url.endswith('.rock'):
|
||||
self.build_system = 'lua'
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
@@ -668,6 +693,7 @@ def __call__(self, stage, url):
|
||||
(r'/Rakefile$', 'ruby'),
|
||||
(r'/setup\.rb$', 'ruby'),
|
||||
(r'/.*\.pro$', 'qmake'),
|
||||
(r'/.*\.rockspec$', 'lua'),
|
||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||
(r'/DESCRIPTION$', 'octave'),
|
||||
(r'/meson\.build$', 'meson'),
|
||||
|
||||
@@ -91,8 +91,8 @@ def dev_build(self, args):
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -68,8 +68,14 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
||||
# get facts for specs, making sure to include build dependencies of concrete
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(t for t in setup.spec_clauses(
|
||||
a, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
b_facts = set(t for t in setup.spec_clauses(
|
||||
b, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -41,7 +43,8 @@
|
||||
'loads',
|
||||
'view',
|
||||
'update',
|
||||
'revert'
|
||||
'revert',
|
||||
'depfile'
|
||||
]
|
||||
|
||||
|
||||
@@ -523,6 +526,154 @@ def env_revert(args):
|
||||
tty.msg(msg.format(manifest_file))
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
subparser.add_argument(
|
||||
'--make-target-prefix', default=None, metavar='TARGET',
|
||||
help='prefix Makefile targets with <TARGET>/<name>. By default the absolute '
|
||||
'path to the directory makedeps under the environment metadata dir is '
|
||||
'used. Can be set to an empty string --make-target-prefix \'\'.')
|
||||
subparser.add_argument(
|
||||
'--make-disable-jobserver', default=True, action='store_false',
|
||||
dest='jobserver', help='disable POSIX jobserver support.')
|
||||
subparser.add_argument(
|
||||
'-o', '--output', default=None, metavar='FILE',
|
||||
help='write the depfile to FILE rather than to stdout')
|
||||
subparser.add_argument(
|
||||
'-G', '--generator', default='make', choices=('make',),
|
||||
help='specify the depfile type. Currently only make is supported.')
|
||||
|
||||
|
||||
def env_depfile(args):
|
||||
# Currently only make is supported.
|
||||
spack.cmd.require_active_env(cmd_name='env depfile')
|
||||
env = ev.active_environment()
|
||||
|
||||
# Maps each hash in the environment to a string of install prereqs
|
||||
hash_to_prereqs = {}
|
||||
hash_to_spec = {}
|
||||
|
||||
if args.make_target_prefix is None:
|
||||
target_prefix = os.path.join(env.env_subdir_path, 'makedeps')
|
||||
else:
|
||||
target_prefix = args.make_target_prefix
|
||||
|
||||
def get_target(name):
|
||||
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||
# supposed to be included
|
||||
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||
return name
|
||||
else:
|
||||
return os.path.join(target_prefix, name)
|
||||
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, '.install', name)
|
||||
|
||||
def get_fetch_target(name):
|
||||
return os.path.join(target_prefix, '.fetch', name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
hash_to_prereqs[s.dag_hash()] = [
|
||||
get_install_target(dep.dag_hash()) for dep in s.dependencies()]
|
||||
|
||||
root_dags = [s.dag_hash() for _, s in env.concretized_specs()]
|
||||
|
||||
# Root specs without deps are the prereqs for the environment target
|
||||
root_install_targets = [get_install_target(h) for h in root_dags]
|
||||
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
# Fetch targets for all packages in the environment, not just roots.
|
||||
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write("""SPACK ?= spack
|
||||
|
||||
.PHONY: {} {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {} {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Fetching $(SPEC))
|
||||
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||
|
||||
{}: {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||
get_target('all'), get_target('env'),
|
||||
get_target('fetch-all'), get_target('fetch'),
|
||||
get_target('env'), ' '.join(root_install_targets),
|
||||
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||
get_target('.fetch/%'), get_target('dirs'),
|
||||
env.path,
|
||||
get_target('.install/%'), get_target('.fetch/%'),
|
||||
'+' if args.jobserver else '', env.path))
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
# The prefix can be an empty string, in that case we don't add the `/`.
|
||||
# The name is currently the dag hash of the spec. In principle it
|
||||
# could be the package name in case of `concretization: together` so
|
||||
# it can be more easily referred to, but for now we don't special case
|
||||
# this.
|
||||
fmt = '{name}{@version}{%compiler}{variants}{arch=architecture}'
|
||||
|
||||
# Set SPEC for each hash
|
||||
buf.write('# Set the human-readable spec for each target\n')
|
||||
for dag_hash in hash_to_prereqs.keys():
|
||||
formatted_spec = hash_to_spec[dag_hash].format(fmt)
|
||||
buf.write("{}: SPEC = {}\n".format(get_target('%/' + dag_hash), formatted_spec))
|
||||
buf.write('\n')
|
||||
|
||||
# Set install dependencies
|
||||
buf.write('# Install dependencies\n')
|
||||
for parent, children in hash_to_prereqs.items():
|
||||
if not children:
|
||||
continue
|
||||
buf.write('{}: {}\n'.format(get_install_target(parent), ' '.join(children)))
|
||||
buf.write('\n')
|
||||
|
||||
# Clean target: remove target files but not their folders, cause
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||
get_target('clean'),
|
||||
get_target('env'),
|
||||
get_target('fetch'),
|
||||
' '.join(all_fetch_targets),
|
||||
' '.join(all_install_targets)))
|
||||
|
||||
makefile = buf.getvalue()
|
||||
|
||||
# Finally write to stdout/file.
|
||||
if args.output:
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(makefile)
|
||||
else:
|
||||
sys.stdout.write(makefile)
|
||||
|
||||
|
||||
#: Dictionary mapping subcommand names and aliases to functions
|
||||
subcommand_functions = {}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -13,6 +14,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
@@ -35,6 +37,9 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
'--not-buildable', action='store_true', default=False,
|
||||
help="packages with detected externals won't be built with Spack")
|
||||
find_parser.add_argument(
|
||||
'-p', '--path', default=None, action='append',
|
||||
help="Alternative search paths for finding externals. May be repeated")
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope('packages'),
|
||||
@@ -55,8 +60,40 @@ def setup_parser(subparser):
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
)
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
'read-cray-manifest', help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
)
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
'--file', default=None,
|
||||
help="specify a location other than the default")
|
||||
read_cray_manifest.add_argument(
|
||||
'--directory', default=None,
|
||||
help="specify a directory storing a group of manifest files")
|
||||
read_cray_manifest.add_argument(
|
||||
'--dry-run', action='store_true', default=False,
|
||||
help="don't modify DB with files that are read")
|
||||
read_cray_manifest.add_argument(
|
||||
'--fail-on-error', action='store_true',
|
||||
help=("if a manifest file cannot be parsed, fail and report the "
|
||||
"full stack trace")
|
||||
)
|
||||
|
||||
|
||||
def external_find(args):
|
||||
if args.all or not (args.tags or args.packages):
|
||||
# If the user calls 'spack external find' with no arguments, and
|
||||
# this system has a description of installed packages, then we should
|
||||
# consume it automatically.
|
||||
try:
|
||||
_collect_and_consume_cray_manifest_files()
|
||||
except NoManifestFileError:
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ['core-packages', 'build-tools']
|
||||
@@ -90,8 +127,10 @@ def external_find(args):
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(packages_to_check)
|
||||
detected_packages.update(spack.detection.by_library(packages_to_check))
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(
|
||||
packages_to_check, path_hints=args.path))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -106,6 +145,56 @@ def external_find(args):
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run,
|
||||
fail_on_error=args.fail_on_error
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False,
|
||||
fail_on_error=False):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
||||
manifest_dirs = []
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read."
|
||||
.format(cray_manifest.default_path))
|
||||
manifest_dirs.append(cray_manifest.default_path)
|
||||
else:
|
||||
tty.debug("Default Cray manifest directory {0} does not exist."
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
"--file/--directory not specified, and no manifest found at {0}"
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}"
|
||||
"\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
@@ -117,5 +206,10 @@ def external_list(args):
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find, 'list': external_list}
|
||||
action = {'find': external_find, 'list': external_list,
|
||||
'read-cray-manifest': external_read_cray_manifest}
|
||||
action[args.external_command](args)
|
||||
|
||||
|
||||
class NoManifestFileError(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
@@ -69,14 +69,10 @@ def fetch(parser, args):
|
||||
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse():
|
||||
package = spack.repo.get(s)
|
||||
|
||||
for s in spec.traverse(root=False):
|
||||
# Skip already-installed packages with --missing
|
||||
if args.missing and package.installed:
|
||||
if args.missing and s.installed:
|
||||
continue
|
||||
|
||||
package.do_fetch()
|
||||
|
||||
package = spack.repo.get(spec)
|
||||
package.do_fetch()
|
||||
s.package.do_fetch()
|
||||
spec.package.do_fetch()
|
||||
|
||||
@@ -184,8 +184,9 @@ def print_detectable(pkg):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Externally Detectable: '))
|
||||
|
||||
# If the package has an 'executables' field, it can detect an installation
|
||||
if hasattr(pkg, 'executables'):
|
||||
# If the package has an 'executables' of 'libraries' field, it
|
||||
# can detect an installation
|
||||
if hasattr(pkg, 'executables') or hasattr(pkg, 'libraries'):
|
||||
find_attributes = []
|
||||
if hasattr(pkg, 'determine_version'):
|
||||
find_attributes.append('version')
|
||||
|
||||
@@ -47,7 +47,6 @@ def update_kwargs_from_args(args, kwargs):
|
||||
'explicit': True, # Always true for install command
|
||||
'stop_at': args.until,
|
||||
'unsigned': args.unsigned,
|
||||
'full_hash_match': args.full_hash_match,
|
||||
})
|
||||
|
||||
kwargs.update({
|
||||
@@ -117,11 +116,6 @@ def setup_parser(subparser):
|
||||
'--no-check-signature', action='store_true',
|
||||
dest='unsigned', default=False,
|
||||
help="do not check signatures of binary packages")
|
||||
subparser.add_argument(
|
||||
'--require-full-hash-match', action='store_true',
|
||||
dest='full_hash_match', default=False, help="""when installing from
|
||||
binary mirrors, do not install binary package unless the full hash of the
|
||||
remote spec matches that of the local spec""")
|
||||
subparser.add_argument(
|
||||
'--show-log-on-error', action='store_true',
|
||||
help="print full build log to stderr if build fails")
|
||||
@@ -159,10 +153,6 @@ def setup_parser(subparser):
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages."""
|
||||
)
|
||||
testing.add_argument(
|
||||
'--run-tests', action='store_true',
|
||||
help='run package tests during installation (same as --test=all)'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--log-format',
|
||||
default=None,
|
||||
@@ -316,11 +306,8 @@ def install(parser, args, **kwargs):
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
if args.run_tests:
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
def get_tests(specs):
|
||||
if args.test == 'all' or args.run_tests:
|
||||
if args.test == 'all':
|
||||
return True
|
||||
elif args.test == 'root':
|
||||
return [spec.name for spec in specs]
|
||||
@@ -477,7 +464,7 @@ def get_tests(specs):
|
||||
})
|
||||
|
||||
# If we are using the monitor, we send configs. and create build
|
||||
# The full_hash is the main package id, the build_hash for others
|
||||
# The dag_hash is the main package id
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||
|
||||
@@ -273,7 +273,7 @@ def refresh(module_type, specs, args):
|
||||
return
|
||||
|
||||
if not args.upstream_modules:
|
||||
specs = list(s for s in specs if not s.package.installed_upstream)
|
||||
specs = list(s for s in specs if not s.installed_upstream)
|
||||
|
||||
if not args.yes_to_all:
|
||||
msg = 'You are about to regenerate {types} module files for:\n'
|
||||
|
||||
@@ -136,13 +136,13 @@ def solve(parser, args):
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
@@ -151,9 +151,9 @@ def solve(parser, args):
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.build_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.build_hash))
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
|
||||
@@ -34,12 +34,16 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status']
|
||||
)
|
||||
subparser.add_argument(
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
const='yaml', help='print concrete spec as YAML')
|
||||
subparser.add_argument(
|
||||
format_group.add_argument(
|
||||
'-j', '--json', action='store_const', dest='format', default=None,
|
||||
const='json', help='print concrete spec as JSON')
|
||||
format_group.add_argument(
|
||||
'--format', action='store', default=None,
|
||||
help='print concrete spec with the specified format string')
|
||||
subparser.add_argument(
|
||||
'-c', '--cover', action='store',
|
||||
default='nodes', choices=['nodes', 'edges', 'paths'],
|
||||
@@ -47,10 +51,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-N', '--namespaces', action='store_true', default=False,
|
||||
help='show fully qualified package names')
|
||||
subparser.add_argument(
|
||||
'--hash-type', default="build_hash",
|
||||
choices=['build_hash', 'full_hash', 'dag_hash'],
|
||||
help='generate spec with a particular hash type.')
|
||||
subparser.add_argument(
|
||||
'-t', '--types', action='store_true', default=False,
|
||||
help='show dependency types')
|
||||
@@ -92,14 +92,13 @@ def spec(parser, args):
|
||||
for (input, output) in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
# The user can specify the hash type to use
|
||||
hash_type = getattr(ht, args.hash_type)
|
||||
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=hash_type))
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.to_json(hash=hash_type))
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
|
||||
@@ -27,12 +27,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -54,6 +48,10 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
@@ -337,6 +337,8 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
|
||||
tty.msg('test specs:')
|
||||
|
||||
failed, skipped, untested = 0, 0, 0
|
||||
for pkg_id in test_specs:
|
||||
if pkg_id in results:
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v%d.%d" % spack.spack_version_info[:2]
|
||||
tutorial_branch = "releases/v0.17"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -62,9 +62,14 @@ def setup_parser(subparser):
|
||||
'-a', '--all', action='store_true', dest='all',
|
||||
help="remove ALL installed packages that match each supplied spec"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--origin', dest='origin',
|
||||
help="only remove DB records with the specified origin"
|
||||
)
|
||||
|
||||
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
|
||||
origin=None):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
@@ -85,8 +90,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes,
|
||||
installed=install_query)
|
||||
matching = spack.store.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -220,15 +225,25 @@ def do_uninstall(env, specs, force):
|
||||
|
||||
# A package is ready to be uninstalled when nothing else references it,
|
||||
# unless we are requested to force uninstall it.
|
||||
is_ready = lambda x: not spack.store.db.query_by_spec_hash(x)[1].ref_count
|
||||
if force:
|
||||
is_ready = lambda x: True
|
||||
def is_ready(dag_hash):
|
||||
if force:
|
||||
return True
|
||||
|
||||
_, record = spack.store.db.query_by_spec_hash(dag_hash)
|
||||
if not record.ref_count:
|
||||
return True
|
||||
|
||||
# If this spec is only used as a build dependency, we can uninstall
|
||||
return all(
|
||||
dspec.deptypes == ("build",) or not dspec.parent.installed
|
||||
for dspec in record.spec.edges_from_dependents()
|
||||
)
|
||||
|
||||
while packages:
|
||||
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
|
||||
if not ready:
|
||||
msg = 'unexpected error [cannot proceed uninstalling specs with' \
|
||||
' remaining dependents {0}]'
|
||||
' remaining link or run dependents {0}]'
|
||||
msg = msg.format(', '.join(x.name for x in packages))
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
@@ -240,7 +255,8 @@ def do_uninstall(env, specs, force):
|
||||
def get_uninstall_list(args, specs, env):
|
||||
# Gets the list of installed specs that match the ones give via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force,
|
||||
args.origin)
|
||||
|
||||
# Takes care of '-R'
|
||||
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
|
||||
|
||||
@@ -495,7 +495,8 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
assert supported(compiler_name)
|
||||
if not supported(compiler_name):
|
||||
raise UnknownCompilerError(compiler_name)
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
@@ -765,7 +766,8 @@ def name_matches(name, name_list):
|
||||
toolchains.add(compiler_cls.__name__)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']):
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']) or \
|
||||
toolchains == set(['Dpcpp', 'Oneapi']):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
@@ -788,6 +790,13 @@ def __init__(self):
|
||||
"Spack could not find any compilers!")
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super(UnknownCompilerError, self).__init__(
|
||||
"Spack doesn't support the requested compiler: {0}"
|
||||
.format(compiler_name))
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super(NoCompilerForSpecError, self).__init__(
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Emscripten(Compiler):
|
||||
cc_names = ['emcc']
|
||||
cxx_names = ['em++']
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {'cc': os.path.join('emscripten', 'emcc'),
|
||||
'cxx': os.path.join('emscripten', 'em++'),
|
||||
'f77': '',
|
||||
'fc': ''}
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@property
|
||||
def debug_flags(self):
|
||||
return ['-g', '-gsource-map', '-gseparate-dwarf', '-g0', '-g1', '-g2', '-g3']
|
||||
|
||||
@property
|
||||
def opt_flags(self):
|
||||
return ['-O0', '-O1', '-O2', '-O3', '-Os', '-Oz']
|
||||
|
||||
@property
|
||||
def disable_new_dtags(self):
|
||||
return ''
|
||||
@@ -78,10 +78,8 @@ def cxx14_flag(self):
|
||||
self, "the C++14 standard", "cxx14_flag", "< 4.8")
|
||||
elif self.real_version < ver('4.9'):
|
||||
return "-std=c++1y"
|
||||
elif self.real_version < ver('6.0'):
|
||||
return "-std=c++14"
|
||||
else:
|
||||
return ""
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
|
||||
@@ -88,7 +88,7 @@
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = (
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'defaults')
|
||||
)
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
@@ -104,6 +104,10 @@
|
||||
'build_jobs': min(16, cpus_available()),
|
||||
'build_stage': '$tempdir/spack-stage',
|
||||
'concretizer': 'clingo',
|
||||
'license_dir': spack.paths.default_license_dir,
|
||||
'flags': {
|
||||
'keep_werror': 'none',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -815,7 +819,7 @@ def _config():
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
('site', os.path.join(spack.paths.etc_path)),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
|
||||
193
lib/spack/spack/cray_manifest.py
Normal file
193
lib/spack/spack/cray_manifest.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.hash_types as hash_types
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
#: packages here.
|
||||
default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
|
||||
|
||||
compiler_name_translation = {
|
||||
'nvidia': 'nvhpc',
|
||||
}
|
||||
|
||||
|
||||
def translated_compiler_name(manifest_compiler_name):
|
||||
"""
|
||||
When creating a Compiler object, Spack expects a name matching
|
||||
one of the classes in `spack.compilers`. Names in the Cray manifest
|
||||
may differ; for cases where we know the name refers to a compiler in
|
||||
Spack, this function translates it automatically.
|
||||
|
||||
This function will raise an error if there is no recorded translation
|
||||
and the name doesn't match a known compiler name.
|
||||
"""
|
||||
if manifest_compiler_name in compiler_name_translation:
|
||||
return compiler_name_translation[manifest_compiler_name]
|
||||
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
||||
return manifest_compiler_name
|
||||
else:
|
||||
# Try to fail quickly. This can occur in two cases: (1) the compiler
|
||||
# definition (2) a spec can specify a compiler that doesn't exist; the
|
||||
# first will be caught when creating compiler definition. The second
|
||||
# will result in Specs with associated undefined compilers.
|
||||
raise spack.compilers.UnknownCompilerError(
|
||||
"Manifest parsing - unknown compiler: {0}"
|
||||
.format(manifest_compiler_name))
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
compiler_name = translated_compiler_name(entry['name'])
|
||||
paths = entry['executables']
|
||||
version = entry['version']
|
||||
arch = entry['arch']
|
||||
operating_system = arch['os']
|
||||
target = arch['target']
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
|
||||
return compiler_cls(
|
||||
spec, operating_system, target, paths
|
||||
)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
arch_str = ""
|
||||
if 'arch' in entry:
|
||||
arch_format = "arch={platform}-{os}-{target}"
|
||||
arch_str = arch_format.format(
|
||||
platform=entry['arch']['platform'],
|
||||
os=entry['arch']['platform_os'],
|
||||
target=entry['arch']['target']['name']
|
||||
)
|
||||
|
||||
compiler_str = ""
|
||||
if 'compiler' in entry:
|
||||
compiler_format = "%{name}@{version}"
|
||||
compiler_str = compiler_format.format(
|
||||
name=translated_compiler_name(entry['compiler']['name']),
|
||||
version=entry['compiler']['version']
|
||||
)
|
||||
|
||||
spec_format = "{name}@{version} {compiler} {arch}"
|
||||
spec_str = spec_format.format(
|
||||
name=entry['name'],
|
||||
version=entry['version'],
|
||||
compiler=compiler_str,
|
||||
arch=arch_str
|
||||
)
|
||||
|
||||
package = spack.repo.get(entry['name'])
|
||||
|
||||
if 'parameters' in entry:
|
||||
variant_strs = list()
|
||||
for name, value in entry['parameters'].items():
|
||||
# TODO: also ensure that the variant value is valid?
|
||||
if not (name in package.variants):
|
||||
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
||||
.format(name, entry['name'], entry['hash'][:7]))
|
||||
continue
|
||||
|
||||
# Value could be a list (of strings), boolean, or string
|
||||
if isinstance(value, six.string_types):
|
||||
variant_strs.append('{0}={1}'.format(name, value))
|
||||
else:
|
||||
try:
|
||||
iter(value)
|
||||
variant_strs.append(
|
||||
'{0}={1}'.format(name, ','.join(value)))
|
||||
continue
|
||||
except TypeError:
|
||||
# Not an iterable
|
||||
pass
|
||||
# At this point not a string or collection, check for boolean
|
||||
if value in [True, False]:
|
||||
bool_symbol = '+' if value else '~'
|
||||
variant_strs.append('{0}{1}'.format(bool_symbol, name))
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unexpected value for {0} ({1}): {2}".format(
|
||||
name, str(type(value)), str(value)
|
||||
)
|
||||
)
|
||||
spec_str += ' ' + ' '.join(variant_strs)
|
||||
|
||||
spec, = spack.cmd.parse_specs(spec_str.split())
|
||||
|
||||
for ht in [hash_types.dag_hash, hash_types.build_hash,
|
||||
hash_types.full_hash]:
|
||||
setattr(spec, ht.attr, entry['hash'])
|
||||
|
||||
spec._concrete = True
|
||||
spec._hashes_final = True
|
||||
spec.external_path = entry['prefix']
|
||||
spec.origin = 'external-db'
|
||||
spack.spec.Spec.ensure_valid_variants(spec)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def entries_to_specs(entries):
|
||||
spec_dict = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
spec = spec_from_entry(entry)
|
||||
spec_dict[spec._hash] = spec
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug("Omitting package {0}: no corresponding repo package"
|
||||
.format(entry['name']))
|
||||
except spack.error.SpackError:
|
||||
raise
|
||||
except Exception:
|
||||
tty.warn("Could not parse entry: " + str(entry))
|
||||
|
||||
for entry in filter(lambda x: 'dependencies' in x, entries):
|
||||
dependencies = entry['dependencies']
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties['hash']
|
||||
deptypes = properties['type']
|
||||
if dep_hash in spec_dict:
|
||||
if entry['hash'] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry['hash']]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
def read(path, apply_updates):
|
||||
with open(path, 'r') as json_file:
|
||||
json_data = json.load(json_file)
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
|
||||
specs = entries_to_specs(json_data['specs'])
|
||||
tty.debug("{0}: {1} specs read from manifest".format(
|
||||
path,
|
||||
str(len(specs))))
|
||||
compilers = list()
|
||||
if 'compilers' in json_data:
|
||||
compilers.extend(compiler_from_entry(x)
|
||||
for x in json_data['compilers'])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||
path,
|
||||
str(len(compilers))))
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
compilers, init_config=False)
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.db.add(spec, directory_layout=None)
|
||||
@@ -91,7 +91,8 @@
|
||||
_pkg_lock_timeout = None
|
||||
|
||||
# Types of dependencies tracked by the database
|
||||
_tracked_deps = ('link', 'run')
|
||||
# We store by DAG hash, so we track the dependencies that the DAG hash includes.
|
||||
_tracked_deps = ht.dag_hash.deptype
|
||||
|
||||
# Default list of fields written for each install record
|
||||
default_install_record_fields = [
|
||||
@@ -187,6 +188,7 @@ def __init__(
|
||||
installation_time=None,
|
||||
deprecated_for=None,
|
||||
in_buildcache=False,
|
||||
origin=None
|
||||
):
|
||||
self.spec = spec
|
||||
self.path = str(path) if path else None
|
||||
@@ -196,6 +198,7 @@ def __init__(
|
||||
self.installation_time = installation_time or _now()
|
||||
self.deprecated_for = deprecated_for
|
||||
self.in_buildcache = in_buildcache
|
||||
self.origin = origin
|
||||
|
||||
def install_type_matches(self, installed):
|
||||
installed = InstallStatuses.canonicalize(installed)
|
||||
@@ -217,6 +220,9 @@ def to_dict(self, include_fields=default_install_record_fields):
|
||||
else:
|
||||
rec_dict.update({field_name: getattr(self, field_name)})
|
||||
|
||||
if self.origin:
|
||||
rec_dict['origin'] = self.origin
|
||||
|
||||
return rec_dict
|
||||
|
||||
@classmethod
|
||||
@@ -350,10 +356,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
|
||||
|
||||
# Create needed directories and files
|
||||
if not os.path.exists(self._db_dir):
|
||||
if not is_upstream and not os.path.exists(self._db_dir):
|
||||
fs.mkdirp(self._db_dir)
|
||||
|
||||
if not os.path.exists(self._failure_dir) and not is_upstream:
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
@@ -428,7 +434,7 @@ def _failed_spec_path(self, spec):
|
||||
.format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir,
|
||||
'{0}-{1}'.format(spec.name, spec.full_hash()))
|
||||
'{0}-{1}'.format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self):
|
||||
"""Force remove install failure tracking files."""
|
||||
@@ -640,8 +646,12 @@ def _write_to_file(self, stream):
|
||||
# TODO: fix this before we support multiple install locations.
|
||||
database = {
|
||||
'database': {
|
||||
# TODO: move this to a top-level _meta section if we ever
|
||||
# TODO: bump the DB version to 7
|
||||
'version': str(_db_version),
|
||||
|
||||
# dictionary of installation records, keyed by DAG hash
|
||||
'installs': installs,
|
||||
'version': str(_db_version)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -681,6 +691,13 @@ def db_for_spec_hash(self, hash_key):
|
||||
return db
|
||||
|
||||
def query_by_spec_hash(self, hash_key, data=None):
|
||||
"""Get a spec for hash, and whether it's installed upstream.
|
||||
|
||||
Return:
|
||||
(tuple): (bool, optional InstallRecord): bool tells us whether
|
||||
the spec is installed upstream. Its InstallRecord is also
|
||||
returned if it's installed at all; otherwise None.
|
||||
"""
|
||||
if data and hash_key in data:
|
||||
return False, data[hash_key]
|
||||
if not data:
|
||||
@@ -1047,9 +1064,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError(
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
tty.warn('upstream not found: {0}'.format(self._index_path))
|
||||
|
||||
def _add(
|
||||
self,
|
||||
@@ -1087,6 +1102,7 @@ def _add(
|
||||
"Specs added to DB must be concrete.")
|
||||
|
||||
key = spec.dag_hash()
|
||||
spec_pkg_hash = spec._package_hash
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
if upstream:
|
||||
return
|
||||
@@ -1131,6 +1147,10 @@ def _add(
|
||||
'explicit': explicit,
|
||||
'installation_time': installation_time
|
||||
}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, 'origin'):
|
||||
extra_args['origin'] = spec.origin
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec, path, installed, ref_count=0, **extra_args
|
||||
)
|
||||
@@ -1144,10 +1164,10 @@ def _add(
|
||||
record.ref_count += 1
|
||||
|
||||
# Mark concrete once everything is built, and preserve
|
||||
# the original hash of concrete specs.
|
||||
# the original hashes of concrete specs.
|
||||
new_spec._mark_concrete()
|
||||
new_spec._hash = key
|
||||
new_spec._full_hash = spec._full_hash
|
||||
new_spec._package_hash = spec_pkg_hash
|
||||
|
||||
else:
|
||||
# It is already in the database
|
||||
@@ -1462,6 +1482,7 @@ def _query(
|
||||
end_date=None,
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None
|
||||
):
|
||||
"""Run a query on the database."""
|
||||
|
||||
@@ -1490,6 +1511,9 @@ def _query(
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
|
||||
if origin and not (origin == rec.origin):
|
||||
continue
|
||||
|
||||
if not rec.install_type_matches(installed):
|
||||
continue
|
||||
|
||||
@@ -1583,11 +1607,12 @@ def unused_specs(self):
|
||||
needed, visited = set(), set()
|
||||
with self.read_transaction():
|
||||
for key, rec in self._data.items():
|
||||
if rec.explicit:
|
||||
# recycle `visited` across calls to avoid
|
||||
# redundantly traversing
|
||||
for spec in rec.spec.traverse(visited=visited):
|
||||
needed.add(spec.dag_hash())
|
||||
if not rec.explicit:
|
||||
continue
|
||||
|
||||
# recycle `visited` across calls to avoid redundantly traversing
|
||||
for spec in rec.spec.traverse(visited=visited, deptype=("link", "run")):
|
||||
needed.add(spec.dag_hash())
|
||||
|
||||
unused = [rec.spec for key, rec in self._data.items()
|
||||
if key not in needed and rec.installed]
|
||||
|
||||
@@ -74,7 +74,8 @@ def executables_in_path(path_hints=None):
|
||||
|
||||
|
||||
def libraries_in_ld_library_path(path_hints=None):
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH.
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the library paths and the values are the names of the libraries
|
||||
@@ -85,9 +86,15 @@ def libraries_in_ld_library_path(path_hints=None):
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH environment variable.
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables.
|
||||
"""
|
||||
path_hints = path_hints or spack.util.environment.get_path('LD_LIBRARY_PATH')
|
||||
path_hints = path_hints or \
|
||||
spack.util.environment.get_path('LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('LD_LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('DYLD_LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('DYLD_FALLBACK_LIBRARY_PATH')
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
|
||||
path_to_lib = {}
|
||||
@@ -247,12 +254,6 @@ def by_executable(packages_to_check, path_hints=None):
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
@@ -276,6 +277,13 @@ def by_executable(packages_to_check, path_hints=None):
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ', '.join(
|
||||
_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
@@ -48,13 +48,13 @@ class OpenMpi(Package):
|
||||
from spack.resource import Resource
|
||||
from spack.version import Version, VersionChecksumError
|
||||
|
||||
__all__ = ['DirectiveError', 'DirectiveMeta']
|
||||
__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
|
||||
'extends', 'provides', 'patch', 'variant', 'resource']
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = ['patches', 'dev_path']
|
||||
|
||||
#: Names of possible directives. This list is populated elsewhere in the file and then
|
||||
#: added to `__all__` at the bottom.
|
||||
#: Names of possible directives. This list is populated elsewhere in the file.
|
||||
directive_names = []
|
||||
|
||||
_patch_order_index = 0
|
||||
@@ -731,7 +731,3 @@ class DependencyPatchError(DirectiveError):
|
||||
|
||||
class UnsupportedPackageDirective(DirectiveError):
|
||||
"""Raised when an invalid or unsupported package directive is specified."""
|
||||
|
||||
|
||||
#: add all directive names to __all__
|
||||
__all__.extend(directive_names)
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -24,6 +25,7 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
is_windows = sys.platform == 'win32'
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
@@ -108,13 +110,9 @@ def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
_check_concrete(spec)
|
||||
with open(path, 'w') as f:
|
||||
# The hash the the projection is the DAG hash but we write out the
|
||||
# full provenance by full hash so it's availabe if we want it later
|
||||
# extension = os.path.splitext(path)[-1].lower()
|
||||
# if 'json' in extension:
|
||||
spec.to_json(f, hash=ht.full_hash)
|
||||
# elif 'yaml' in extension:
|
||||
# spec.to_yaml(f, hash=ht.full_hash)
|
||||
# The hash of the projection is the DAG hash which contains
|
||||
# the full provenance, so it's availabe if we want it later
|
||||
spec.to_json(f, hash=ht.dag_hash)
|
||||
|
||||
def write_host_environment(self, spec):
|
||||
"""The host environment is a json file with os, kernel, and spack
|
||||
@@ -240,10 +238,10 @@ def create_install_directory(self, spec):
|
||||
|
||||
def ensure_installed(self, spec):
|
||||
"""
|
||||
Throws DirectoryLayoutError if:
|
||||
Throws InconsistentInstallDirectoryError if:
|
||||
1. spec prefix does not exist
|
||||
2. spec prefix does not contain a spec file
|
||||
3. the spec file does not correspond to the spec
|
||||
2. spec prefix does not contain a spec file, or
|
||||
3. We read a spec with the wrong DAG hash out of an existing install directory.
|
||||
"""
|
||||
_check_concrete(spec)
|
||||
path = self.path_for_spec(spec)
|
||||
@@ -259,25 +257,7 @@ def ensure_installed(self, spec):
|
||||
" " + path)
|
||||
|
||||
installed_spec = self.read_spec(spec_file_path)
|
||||
if installed_spec == spec:
|
||||
return
|
||||
|
||||
# DAG hashes currently do not include build dependencies.
|
||||
#
|
||||
# TODO: remove this when we do better concretization and don't
|
||||
# ignore build-only deps in hashes.
|
||||
elif (installed_spec.copy(deps=('link', 'run')) ==
|
||||
spec.copy(deps=('link', 'run'))):
|
||||
# The directory layout prefix is based on the dag hash, so among
|
||||
# specs with differing full-hash but matching dag-hash, only one
|
||||
# may be installed. This means for example that for two instances
|
||||
# that differ only in CMake version used to build, only one will
|
||||
# be installed.
|
||||
return
|
||||
|
||||
if spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise SpecHashCollisionError(spec, installed_spec)
|
||||
else:
|
||||
if installed_spec.dag_hash() != spec.dag_hash():
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Spec file in %s does not match hash!' % spec_file_path)
|
||||
|
||||
@@ -349,6 +329,14 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
path = self.path_for_spec(spec)
|
||||
assert(path.startswith(self.root))
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly, change permissions before attempting to remove
|
||||
if is_windows:
|
||||
kwargs = {'ignore_errors': False,
|
||||
'onerror': fs.readonly_file_handler(ignore_errors=False)}
|
||||
else:
|
||||
kwargs = {} # the default value for ignore_errors is false
|
||||
|
||||
if deprecated:
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
@@ -357,10 +345,9 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
os.remove(metapath)
|
||||
except OSError as e:
|
||||
raise six.raise_from(RemoveFailedError(spec, path, e), e)
|
||||
|
||||
elif os.path.exists(path):
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
shutil.rmtree(path, **kwargs)
|
||||
except OSError as e:
|
||||
raise six.raise_from(RemoveFailedError(spec, path, e), e)
|
||||
|
||||
@@ -458,8 +445,8 @@ def add_extension(self, spec, ext_spec):
|
||||
def check_extension_conflict(self, spec, ext_spec):
|
||||
exts = self._extension_map(spec)
|
||||
if ext_spec.name in exts:
|
||||
installed_spec = exts[ext_spec.name].copy(deps=('link', 'run'))
|
||||
if ext_spec.copy(deps=('link', 'run')) == installed_spec:
|
||||
installed_spec = exts[ext_spec.name]
|
||||
if ext_spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise ExtensionAlreadyInstalledError(spec, ext_spec)
|
||||
else:
|
||||
raise ExtensionConflictError(spec, ext_spec, installed_spec)
|
||||
@@ -579,15 +566,6 @@ def __init__(self, message, long_msg=None):
|
||||
super(DirectoryLayoutError, self).__init__(message, long_msg)
|
||||
|
||||
|
||||
class SpecHashCollisionError(DirectoryLayoutError):
|
||||
"""Raised when there is a hash collision in an install layout."""
|
||||
|
||||
def __init__(self, installed_spec, new_spec):
|
||||
super(SpecHashCollisionError, self).__init__(
|
||||
'Specs %s and %s have the same SHA-1 prefix!'
|
||||
% (installed_spec, new_spec))
|
||||
|
||||
|
||||
class RemoveFailedError(DirectoryLayoutError):
|
||||
"""Raised when a DirectoryLayout cannot remove an install prefix."""
|
||||
|
||||
|
||||
@@ -1,7 +1,334 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This package implements Spack environments.
|
||||
|
||||
.. _lockfile-format:
|
||||
|
||||
`spack.lock` format
|
||||
===================
|
||||
|
||||
Spack environments have existed since Spack ``v0.12.0``, and there have been 4 different
|
||||
``spack.lock`` formats since then. The formats are documented here.
|
||||
|
||||
The high-level format of a Spack lockfile hasn't changed much between versions, but the
|
||||
contents have. Lockfiles are JSON-formatted and their top-level sections are:
|
||||
|
||||
1. ``_meta`` (object): this contains deatails about the file format, including:
|
||||
* ``file-type``: always ``"spack-lockfile"``
|
||||
* ``lockfile-version``: an integer representing the lockfile format version
|
||||
* ``specfile-version``: an integer representing the spec format version (since
|
||||
``v0.17``)
|
||||
|
||||
2. ``roots`` (list): an ordered list of records representing the roots of the Spack
|
||||
environment. Each has two fields:
|
||||
* ``hash``: a Spack spec hash uniquely identifying the concrete root spec
|
||||
* ``spec``: a string representation of the abstract spec that was concretized
|
||||
|
||||
3. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
New versions of Spack can (so far) read all old lockfile formats -- they are
|
||||
backward-compatible. Old versions cannot read new lockfile formats, and you'll need to
|
||||
upgrade Spack to use them.
|
||||
|
||||
.. list-table:: Lockfile version compatibility across Spack versions
|
||||
:header-rows: 1
|
||||
|
||||
* - Spack version
|
||||
- ``v1``
|
||||
- ``v2``
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
|
||||
Version 1
|
||||
---------
|
||||
|
||||
When lockfiles were first created, there was only one hash in Spack: the DAG hash. This
|
||||
DAG hash (we'll call it the old DAG hash) did *not* include build dependencies -- it
|
||||
only included transitive link and run dependencies.
|
||||
|
||||
The spec format at this time was keyed by name. Each spec started with a key for its
|
||||
name, whose value was a dictionary of other spec attributes. The lockfile put these
|
||||
name-keyed specs into dictionaries keyed by their DAG hash, and the spec records did not
|
||||
actually have a "hash" field in the lockfile -- you have to associate the hash from the
|
||||
key with the spec record after the fact.
|
||||
|
||||
Dependencies in original lockfiles were keyed by ``"hash"``, i.e. the old DAG hash.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 1
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<old_dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<old_dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<old_dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_1": {
|
||||
"hash": "<old_dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_2": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 1>"
|
||||
},
|
||||
"<old_dag_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_3": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_4": {
|
||||
"hash": "<old_dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
},
|
||||
"hash": "<old_dag_hash 2>"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 2
|
||||
---------
|
||||
|
||||
Version 2 changes one thing: specs in the lockfile are now keyed by ``build_hash``
|
||||
instead of the old ``dag_hash``. Specs have a ``hash`` attribute with their real DAG
|
||||
hash, so you can't go by the dictionary key anymore to identify a spec -- you have to
|
||||
read it in and look at ``"hash"``. Dependencies are still keyed by old DAG hash.
|
||||
|
||||
Even though we key lockfiles by ``build_hash``, specs in Spack were still deployed with
|
||||
the old, coarser DAG hash. This means that in v2 and v3 lockfiles (which are keyed by
|
||||
build hash), there may be multiple versions of the same spec with different build
|
||||
dependencies, which means they will have different build hashes but the same DAG hash.
|
||||
Spack would only have been able to actually install one of these.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<build_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<build_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<build_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_1": {
|
||||
"hash": "<old_dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_2": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 1>",
|
||||
},
|
||||
"<build_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_3": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_4": {
|
||||
"hash": "<old_dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 3
|
||||
---------
|
||||
|
||||
Version 3 doesn't change the top-level lockfile format, but this was when we changed the
|
||||
specfile format. Specs in ``concrete_specs`` are now keyed by the build hash, with no
|
||||
inner dictionary keyed by their package name. The package name is in a ``name`` field
|
||||
inside each spec dictionary. The ``dependencies`` field in the specs is a list instead
|
||||
of a dictionary, and each element of the list is a record with the name, dependency
|
||||
types, and hash of the dependency. Instead of a key called ``hash``, dependencies are
|
||||
keyed by ``build_hash``. Each spec still has a ``hash`` attribute.
|
||||
|
||||
Version 3 adds the ``specfile_version`` field to ``_meta`` and uses the new JSON spec
|
||||
format.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 3,
|
||||
"specfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<build_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<build_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
},
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<build_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"build_hash": "<build_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"build_hash": "<build_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
],
|
||||
"hash": "<old_dag_hash 1>",
|
||||
},
|
||||
"<build_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"build_hash": "<build_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"build_hash": "<build_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
],
|
||||
"hash": "<old_dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 4
|
||||
---------
|
||||
|
||||
Version 4 removes build hashes and is keyed by the new DAG hash (``hash``). The ``hash``
|
||||
now includes build dependencies and a canonical hash of the ``package.py`` file.
|
||||
Dependencies are keyed by ``hash`` (DAG hash) as well. There are no more ``build_hash``
|
||||
fields in the specs, and there are no more issues with lockfiles being able to store
|
||||
multiple specs with the same DAG hash (because the DAG hash is now finer-grained).
|
||||
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 3,
|
||||
"specfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
|
||||
@@ -79,8 +79,9 @@
|
||||
env_subdir_name = '.spack-env'
|
||||
|
||||
|
||||
#: default spack.yaml file to put in new environments
|
||||
default_manifest_yaml = """\
|
||||
def default_manifest_yaml():
|
||||
"""default spack.yaml file to put in new environments"""
|
||||
return """\
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
@@ -89,12 +90,16 @@
|
||||
# add package specs to the `specs` list
|
||||
specs: []
|
||||
view: true
|
||||
"""
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format('true' if spack.config.get('concretizer:unify') else 'false')
|
||||
|
||||
|
||||
#: regex for validating enviroment names
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 3
|
||||
lockfile_format_version = 4
|
||||
|
||||
# Magic names
|
||||
# The name of the standalone spec list in the manifest yaml
|
||||
@@ -302,7 +307,7 @@ def _is_dev_spec_and_has_changed(spec):
|
||||
return False
|
||||
|
||||
# Now we can check whether the code changed since the last installation
|
||||
if not spec.package.installed:
|
||||
if not spec.installed:
|
||||
# Not installed -> nothing to compare against
|
||||
return False
|
||||
|
||||
@@ -315,7 +320,7 @@ def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
"""Check whether the current spec needs to be overwritten because either it has
|
||||
changed itself or one of its dependencies have changed"""
|
||||
# if it's not installed, we don't need to overwrite it
|
||||
if not spec.package.installed:
|
||||
if not spec.installed:
|
||||
return False
|
||||
|
||||
# If the spec itself has changed this is a trivial decision
|
||||
@@ -330,7 +335,7 @@ def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
# If any dep needs overwrite, or any dep is missing and is a dev build then
|
||||
# overwrite this package
|
||||
if any(
|
||||
((not dep.package.installed) and dep.satisfies('dev_path=*')) or
|
||||
((not dep.installed) and dep.satisfies('dev_path=*')) or
|
||||
_spec_needs_overwrite(dep, changed_dev_specs)
|
||||
for dep in spec.traverse(root=False)
|
||||
):
|
||||
@@ -439,7 +444,7 @@ def _next_root(self, specs):
|
||||
def content_hash(self, specs):
|
||||
d = syaml.syaml_dict([
|
||||
('descriptor', self.to_dict()),
|
||||
('specs', [(spec.full_hash(), spec.prefix) for spec in sorted(specs)])
|
||||
('specs', [(spec.dag_hash(), spec.prefix) for spec in sorted(specs)])
|
||||
])
|
||||
contents = sjson.dump(d)
|
||||
return spack.util.hash.b32_hash(contents)
|
||||
@@ -518,7 +523,7 @@ def specs_for_view(self, concretized_root_specs):
|
||||
|
||||
# Filter selected, installed specs
|
||||
with spack.store.db.read_transaction():
|
||||
specs = [s for s in specs if s in self and s.package.installed]
|
||||
specs = [s for s in specs if s in self and s.installed]
|
||||
|
||||
return specs
|
||||
|
||||
@@ -632,11 +637,11 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# the init file.
|
||||
with fs.open_if_filename(init_file) as f:
|
||||
if hasattr(f, 'name') and f.name.endswith('.lock'):
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
self._read_lockfile(f)
|
||||
self._set_user_specs_from_lockfile()
|
||||
else:
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml)
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml())
|
||||
|
||||
# Rewrite relative develop paths when initializing a new
|
||||
# environment in a different location from the spack.yaml file.
|
||||
@@ -700,7 +705,7 @@ def _read(self):
|
||||
default_manifest = not os.path.exists(self.manifest_path)
|
||||
if default_manifest:
|
||||
# No manifest, use default yaml
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
else:
|
||||
with open(self.manifest_path) as f:
|
||||
self._read_manifest(f)
|
||||
@@ -766,8 +771,11 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
self.views = {}
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
# default concretization to separately
|
||||
self.concretization = configuration.get('concretization', 'separately')
|
||||
|
||||
# Let `concretization` overrule `concretize:unify` config for now.
|
||||
unify = spack.config.get('concretizer:unify')
|
||||
self.concretization = configuration.get(
|
||||
'concretization', 'together' if unify else 'separately')
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get('develop', {})
|
||||
@@ -1010,14 +1018,9 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
|
||||
if not matches:
|
||||
# concrete specs match against concrete specs in the env
|
||||
# by *dag hash*, not build hash.
|
||||
dag_hashes_in_order = [
|
||||
self.specs_by_hash[build_hash].dag_hash()
|
||||
for build_hash in self.concretized_order
|
||||
]
|
||||
|
||||
# by dag hash.
|
||||
specs_hashes = zip(
|
||||
self.concretized_user_specs, dag_hashes_in_order
|
||||
self.concretized_user_specs, self.concretized_order
|
||||
)
|
||||
|
||||
matches = [
|
||||
@@ -1274,7 +1277,7 @@ def _concretize_separately(self, tests=False):
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.build_hash()] = concrete
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
@@ -1331,7 +1334,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
spec = next(
|
||||
s for s in self.user_specs if s.satisfies(user_spec)
|
||||
)
|
||||
concrete = self.specs_by_hash.get(spec.build_hash())
|
||||
concrete = self.specs_by_hash.get(spec.dag_hash())
|
||||
if not concrete:
|
||||
concrete = spec.concretized(tests=tests)
|
||||
self._add_concrete_spec(spec, concrete)
|
||||
@@ -1380,9 +1383,10 @@ def check_views(self):
|
||||
# default view if they are installed.
|
||||
for view_name, view in self.views.items():
|
||||
for _, spec in self.concretized_specs():
|
||||
if spec in view and spec.package.installed:
|
||||
tty.debug(
|
||||
'Spec %s in view %s' % (spec.name, view_name))
|
||||
if spec in view and spec.package and spec.installed:
|
||||
msg = '{0} in view "{1}"'
|
||||
tty.debug(msg.format(spec.name, view_name))
|
||||
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
@@ -1398,7 +1402,8 @@ def _env_modifications_for_default_view(self, reverse=False):
|
||||
|
||||
errors = []
|
||||
for _, root_spec in self.concretized_specs():
|
||||
if root_spec in self.default_view and root_spec.package.installed:
|
||||
if (root_spec in self.default_view and
|
||||
root_spec.installed and root_spec.package):
|
||||
for spec in root_spec.traverse(deptype='run', root=True):
|
||||
if spec.name in visited:
|
||||
# It is expected that only one instance of the package
|
||||
@@ -1497,7 +1502,7 @@ def _add_concrete_spec(self, spec, concrete, new=True):
|
||||
# update internal lists of specs
|
||||
self.concretized_user_specs.append(spec)
|
||||
|
||||
h = concrete.build_hash()
|
||||
h = concrete.dag_hash()
|
||||
self.concretized_order.append(h)
|
||||
self.specs_by_hash[h] = concrete
|
||||
|
||||
@@ -1537,7 +1542,7 @@ def uninstalled_specs(self):
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
if not spec.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
@@ -1572,7 +1577,7 @@ def install_specs(self, specs=None, **install_args):
|
||||
|
||||
# ensure specs already installed are marked explicit
|
||||
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
||||
specs_installed = [s for s in all_specs if s.package.installed]
|
||||
specs_installed = [s for s in all_specs if s.installed]
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
@@ -1599,7 +1604,7 @@ def install_specs(self, specs=None, **install_args):
|
||||
finally:
|
||||
# Ensure links are set appropriately
|
||||
for spec in specs_to_install:
|
||||
if spec.package.installed:
|
||||
if spec.installed:
|
||||
self.new_installs.append(spec)
|
||||
try:
|
||||
self._install_log_links(spec)
|
||||
@@ -1614,14 +1619,19 @@ def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
all_specs = set()
|
||||
for h in self.concretized_order:
|
||||
all_specs.update(self.specs_by_hash[h].traverse())
|
||||
try:
|
||||
spec = self.specs_by_hash[h]
|
||||
except KeyError:
|
||||
tty.warn(
|
||||
'Environment %s appears to be corrupt: missing spec '
|
||||
'"%s"' % (self.name, h))
|
||||
continue
|
||||
all_specs.update(spec.traverse())
|
||||
|
||||
return sorted(all_specs)
|
||||
|
||||
def all_hashes(self):
|
||||
"""Return hashes of all specs.
|
||||
|
||||
Note these hashes exclude build dependencies."""
|
||||
"""Return hashes of all specs."""
|
||||
return list(set(s.dag_hash() for s in self.all_specs()))
|
||||
|
||||
def roots(self):
|
||||
@@ -1649,7 +1659,7 @@ def added_specs(self):
|
||||
concrete = concretized.get(spec)
|
||||
if not concrete:
|
||||
yield spec
|
||||
elif not concrete.package.installed:
|
||||
elif not concrete.installed:
|
||||
yield concrete
|
||||
|
||||
def concretized_specs(self):
|
||||
@@ -1657,6 +1667,15 @@ def concretized_specs(self):
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def get_by_hash(self, dag_hash):
|
||||
matches = {}
|
||||
for _, root in self.concretized_specs():
|
||||
for spec in root.traverse(root=True):
|
||||
dep_hash = spec.dag_hash()
|
||||
if dep_hash.startswith(dag_hash):
|
||||
matches[dep_hash] = spec
|
||||
return list(matches.values())
|
||||
|
||||
def matching_spec(self, spec):
|
||||
"""
|
||||
Given a spec (likely not concretized), find a matching concretized
|
||||
@@ -1684,13 +1703,7 @@ def matching_spec(self, spec):
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
# Deal with concrete specs differently
|
||||
if spec.concrete:
|
||||
# Matching a concrete spec is more restrictive
|
||||
# than just matching the dag hash
|
||||
is_match = (
|
||||
spec in concretized_user_spec and
|
||||
concretized_user_spec[spec.name].build_hash() == spec.build_hash()
|
||||
)
|
||||
if is_match:
|
||||
if spec in concretized_user_spec:
|
||||
matches[spec] = spec
|
||||
continue
|
||||
|
||||
@@ -1770,12 +1783,12 @@ def _to_lockfile_dict(self):
|
||||
concrete_specs = {}
|
||||
for spec in self.specs_by_hash.values():
|
||||
for s in spec.traverse():
|
||||
build_hash = s.build_hash()
|
||||
if build_hash not in concrete_specs:
|
||||
spec_dict = s.to_node_dict(hash=ht.build_hash)
|
||||
dag_hash = s.dag_hash()
|
||||
if dag_hash not in concrete_specs:
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[build_hash] = spec_dict
|
||||
concrete_specs[dag_hash] = spec_dict
|
||||
|
||||
hash_spec_list = zip(
|
||||
self.concretized_order, self.concretized_user_specs)
|
||||
@@ -1809,47 +1822,56 @@ def _read_lockfile(self, file_or_json):
|
||||
|
||||
def _read_lockfile_dict(self, d):
|
||||
"""Read a lockfile dictionary into this environment."""
|
||||
self.specs_by_hash = {}
|
||||
|
||||
roots = d['roots']
|
||||
self.concretized_user_specs = [Spec(r['spec']) for r in roots]
|
||||
self.concretized_order = [r['hash'] for r in roots]
|
||||
|
||||
json_specs_by_hash = d['concrete_specs']
|
||||
root_hashes = set(self.concretized_order)
|
||||
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
if d['_meta']['lockfile-version'] > 1:
|
||||
# Build hash is stored as a key, but not as part of the node dict
|
||||
# To ensure build hashes are not recomputed, we reattach here
|
||||
setattr(spec, ht.build_hash.attr, build_hash)
|
||||
specs_by_hash[build_hash] = spec
|
||||
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
if not spec._hash:
|
||||
# in v1 lockfiles, the hash only occurs as a key
|
||||
spec._hash = lockfile_key
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
for _, dep_hash, deptypes, _ in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[build_hash]._add_dependency(
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
||||
# If we are reading an older lockfile format (which uses dag hashes
|
||||
# that exclude build deps), we use this to convert the old
|
||||
# concretized_order to the full hashes (preserving the order)
|
||||
old_hash_to_new = {}
|
||||
self.specs_by_hash = {}
|
||||
for _, spec in specs_by_hash.items():
|
||||
dag_hash = spec.dag_hash()
|
||||
build_hash = spec.build_hash()
|
||||
if dag_hash in root_hashes:
|
||||
old_hash_to_new[dag_hash] = build_hash
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||
# one-to-many.
|
||||
for lockfile_key in self.concretized_order:
|
||||
for s in specs_by_hash[lockfile_key].traverse():
|
||||
if s.dag_hash() not in first_seen:
|
||||
first_seen[s.dag_hash()] = s
|
||||
|
||||
if (dag_hash in root_hashes or build_hash in root_hashes):
|
||||
self.specs_by_hash[build_hash] = spec
|
||||
# Now make sure concretized_order and our internal specs dict
|
||||
# contains the keys used by modern spack (i.e. the dag_hash
|
||||
# that includes build deps and package hash).
|
||||
self.concretized_order = [specs_by_hash[h_key].dag_hash()
|
||||
for h_key in self.concretized_order]
|
||||
|
||||
if old_hash_to_new:
|
||||
# Replace any older hashes in concretized_order with hashes
|
||||
# that include build deps
|
||||
self.concretized_order = [
|
||||
old_hash_to_new.get(h, h) for h in self.concretized_order]
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
def write(self, regenerate=True):
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
@@ -1862,17 +1884,15 @@ def write(self, regenerate=True):
|
||||
regenerate (bool): regenerate views and run post-write hooks as
|
||||
well as writing if True.
|
||||
"""
|
||||
# Intercept environment not using the latest schema format and prevent
|
||||
# them from being modified
|
||||
manifest_exists = os.path.exists(self.manifest_path)
|
||||
if manifest_exists and not is_latest_format(self.manifest_path):
|
||||
msg = ('The environment "{0}" needs to be written to disk, but '
|
||||
'is currently using a deprecated format. Please update it '
|
||||
'using:\n\n'
|
||||
'\tspack env update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
# Warn that environments are not in the latest format.
|
||||
if not is_latest_format(self.manifest_path):
|
||||
ver = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
msg = ('The environment "{}" is written to disk in a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack env update {}\n\n'
|
||||
'Note that versions of Spack older than {} may not be able to '
|
||||
'use the updated configuration.')
|
||||
raise RuntimeError(msg.format(self.name))
|
||||
tty.warn(msg.format(self.name, self.name, ver))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
@@ -2224,14 +2244,16 @@ def _top_level_key(data):
|
||||
|
||||
|
||||
def is_latest_format(manifest):
|
||||
"""Return True if the manifest file is at the latest schema format,
|
||||
False otherwise.
|
||||
"""Return False if the manifest file exists and is not in the latest schema format.
|
||||
|
||||
Args:
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
try:
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
return True
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
return not changed
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: whether we should write stack traces or short error messages
|
||||
#: at what level we should write stack traces or short error messages
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = False
|
||||
debug = 0
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
|
||||
@@ -406,12 +406,12 @@ def write(self, spec, color=None, out=None):
|
||||
# Colors associated with each node in the DAG.
|
||||
# Edges are colored by the node they point to.
|
||||
self._name_to_color = {
|
||||
spec.full_hash(): self.colors[i % len(self.colors)]
|
||||
spec.dag_hash(): self.colors[i % len(self.colors)]
|
||||
for i, spec in enumerate(nodes_in_topological_order)
|
||||
}
|
||||
|
||||
# Frontier tracks open edges of the graph as it's written out.
|
||||
self._frontier = [[spec.full_hash()]]
|
||||
self._frontier = [[spec.dag_hash()]]
|
||||
while self._frontier:
|
||||
# Find an unexpanded part of frontier
|
||||
i = find(self._frontier, lambda f: len(f) > 1)
|
||||
@@ -488,14 +488,16 @@ def write(self, spec, color=None, out=None):
|
||||
node = nodes_in_topological_order.pop()
|
||||
|
||||
# Find the named node in the frontier and draw it.
|
||||
i = find(self._frontier, lambda f: node.full_hash() in f)
|
||||
i = find(self._frontier, lambda f: node.dag_hash() in f)
|
||||
self._node_line(i, node)
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
deps = node.dependencies(deptype=self.deptype)
|
||||
if deps:
|
||||
deps = sorted((d.full_hash() for d in deps), reverse=True)
|
||||
edges = sorted(
|
||||
node.edges_to_dependencies(deptype=self.deptype), reverse=True
|
||||
)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
|
||||
elif self._frontier:
|
||||
|
||||
@@ -33,15 +33,14 @@ def attr(self):
|
||||
"""Private attribute stored on spec"""
|
||||
return '_' + self.name
|
||||
|
||||
def __call__(self, spec):
|
||||
"""Run this hash on the provided spec."""
|
||||
return spec.spec_hash(self)
|
||||
|
||||
#: Default Hash descriptor, used by Spec.dag_hash() and stored in the DB.
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(
|
||||
deptype=('link', 'run'), package_hash=False, name='hash')
|
||||
|
||||
|
||||
#: Hash descriptor that includes build dependencies.
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='hash')
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
@@ -51,12 +50,19 @@ def attr(self):
|
||||
name='process_hash'
|
||||
)
|
||||
|
||||
#: Full hash used in build pipelines to determine when to rebuild packages.
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=lambda s: s.package.content_hash())
|
||||
|
||||
|
||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||
# spec formats
|
||||
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
||||
|
||||
|
||||
#: Package hash used as part of full hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=lambda s: s.package.content_hash())
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
|
||||
@@ -140,7 +140,7 @@ def _handle_external_and_upstream(pkg, explicit):
|
||||
.format(pkg.prefix, package_id(pkg)))
|
||||
return True
|
||||
|
||||
if pkg.installed_upstream:
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose('{0} is installed in an upstream Spack instance at {1}'
|
||||
.format(package_id(pkg), pkg.spec.prefix))
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
@@ -260,8 +260,7 @@ def _hms(seconds):
|
||||
return ' '.join(parts)
|
||||
|
||||
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
||||
full_hash_match=False):
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
"""
|
||||
Extract the package from binary cache
|
||||
|
||||
@@ -278,7 +277,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
||||
``False`` otherwise
|
||||
"""
|
||||
installed_from_cache = _try_install_from_binary_cache(
|
||||
pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
|
||||
pkg, explicit, unsigned=unsigned)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
pre = 'No binary for {0} found'.format(pkg_id)
|
||||
@@ -390,8 +389,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
return True
|
||||
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
||||
full_hash_match=False):
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
"""
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
@@ -403,8 +401,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
|
||||
matches = binary_distribution.get_mirrors_for_spec(
|
||||
pkg.spec, full_hash_match=full_hash_match)
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
|
||||
if not matches:
|
||||
return False
|
||||
@@ -561,6 +558,10 @@ def log(pkg):
|
||||
# Archive the environment modifications for the build.
|
||||
fs.install(pkg.env_mods_path, pkg.install_env_path)
|
||||
|
||||
# Archive the install-phase test log, if present
|
||||
if pkg.test_install_log_path and os.path.exists(pkg.test_install_log_path):
|
||||
fs.install(pkg.test_install_log_path, pkg.install_test_install_log_path)
|
||||
|
||||
if os.path.exists(pkg.configure_args_path):
|
||||
# Archive the args used for the build
|
||||
fs.install(pkg.configure_args_path, pkg.install_configure_args_path)
|
||||
@@ -853,7 +854,7 @@ def _check_deps_status(self, request):
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
|
||||
# Flag external and upstream packages as being installed
|
||||
if dep_pkg.spec.external or dep_pkg.installed_upstream:
|
||||
if dep_pkg.spec.external or dep_pkg.spec.installed_upstream:
|
||||
self._flag_installed(dep_pkg)
|
||||
continue
|
||||
|
||||
@@ -995,7 +996,7 @@ def _ensure_install_ready(self, pkg):
|
||||
raise ExternalPackageError('{0} {1}'.format(pre, 'is external'))
|
||||
|
||||
# Upstream packages cannot be installed locally.
|
||||
if pkg.installed_upstream:
|
||||
if pkg.spec.installed_upstream:
|
||||
raise UpstreamPackageError('{0} {1}'.format(pre, 'is upstream'))
|
||||
|
||||
# The package must have a prefix lock at this stage.
|
||||
@@ -1200,7 +1201,6 @@ def _install_task(self, task):
|
||||
install_args = task.request.install_args
|
||||
cache_only = install_args.get('cache_only')
|
||||
explicit = task.explicit
|
||||
full_hash_match = install_args.get('full_hash_match')
|
||||
tests = install_args.get('tests')
|
||||
unsigned = install_args.get('unsigned')
|
||||
use_cache = install_args.get('use_cache')
|
||||
@@ -1213,8 +1213,7 @@ def _install_task(self, task):
|
||||
|
||||
# Use the binary cache if requested
|
||||
if use_cache and \
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned,
|
||||
full_hash_match):
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
@@ -2018,11 +2017,10 @@ def build_process(pkg, install_args):
|
||||
|
||||
|
||||
class OverwriteInstall(object):
|
||||
def __init__(self, installer, database, task, tmp_root=None):
|
||||
def __init__(self, installer, database, task):
|
||||
self.installer = installer
|
||||
self.database = database
|
||||
self.task = task
|
||||
self.tmp_root = tmp_root
|
||||
|
||||
def install(self):
|
||||
"""
|
||||
@@ -2032,7 +2030,7 @@ def install(self):
|
||||
install error if installation fails.
|
||||
"""
|
||||
try:
|
||||
with fs.replace_directory_transaction(self.task.pkg.prefix, self.tmp_root):
|
||||
with fs.replace_directory_transaction(self.task.pkg.prefix):
|
||||
self.installer._install_task(self.task)
|
||||
except fs.CouldNotRestoreDirectoryBackup as e:
|
||||
self.database.remove(self.task.pkg.spec)
|
||||
@@ -2303,7 +2301,6 @@ def _add_default_args(self):
|
||||
('dirty', False),
|
||||
('fail_fast', False),
|
||||
('fake', False),
|
||||
('full_hash_match', False),
|
||||
('install_deps', True),
|
||||
('install_package', True),
|
||||
('install_source', False),
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.log import log_output, winlog
|
||||
from llnl.util.tty.log import log_output
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
@@ -375,13 +375,6 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -405,9 +398,6 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -490,18 +480,11 @@ def setup_main_options(args):
|
||||
# errors raised by spack.config.
|
||||
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
spack.error.debug = args.debug
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
@@ -605,14 +588,9 @@ def __call__(self, *argv, **kwargs):
|
||||
|
||||
out = StringIO()
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
with winlog(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
else:
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
|
||||
except SystemExit as e:
|
||||
self.returncode = e.code
|
||||
|
||||
@@ -184,19 +184,38 @@ def _filter_compiler_wrappers_impl(self):
|
||||
|
||||
x = llnl.util.filesystem.FileFilter(*abs_files)
|
||||
|
||||
replacements = [
|
||||
compiler_vars = [
|
||||
('CC', self.compiler.cc),
|
||||
('CXX', self.compiler.cxx),
|
||||
('F77', self.compiler.f77),
|
||||
('FC', self.compiler.fc)
|
||||
]
|
||||
for env_var, compiler_path in replacements:
|
||||
|
||||
# Some paths to the compiler wrappers might be substrings of the others.
|
||||
# For example:
|
||||
# CC=/path/to/spack/lib/spack/env/cc (realpath to the wrapper)
|
||||
# FC=/path/to/spack/lib/spack/env/cce/ftn
|
||||
# Therefore, we perform the filtering in the reversed sorted order of
|
||||
# the substituted strings. If, however, the strings are identical (e.g.
|
||||
# both CC and FC are set using realpath), the filtering is done
|
||||
# according to the order in compiler_vars. To achieve that, we populate
|
||||
# the following array with tuples of three elements: path to the
|
||||
# wrapper, negated index of the variable in compiler_vars, path to the
|
||||
# real compiler. This way, the reversed sorted order of the resulting
|
||||
# array is the order of replacements that we need.
|
||||
replacements = []
|
||||
|
||||
for idx, (env_var, compiler_path) in enumerate(compiler_vars):
|
||||
if env_var in os.environ:
|
||||
# filter spack wrapper and links to spack wrapper in case
|
||||
# build system runs realpath
|
||||
wrapper = os.environ[env_var]
|
||||
for wrapper_path in (wrapper, os.path.realpath(wrapper)):
|
||||
x.filter(wrapper_path, compiler_path, **filter_kwargs)
|
||||
replacements.append((wrapper_path, -idx, compiler_path))
|
||||
|
||||
for wrapper_path, _, compiler_path in sorted(replacements,
|
||||
reverse=True):
|
||||
x.filter(wrapper_path, compiler_path, **filter_kwargs)
|
||||
|
||||
# Remove this linking flag if present (it turns RPATH into RUNPATH)
|
||||
x.filter('{0}--enable-new-dtags'.format(self.compiler.linker_arg), '',
|
||||
|
||||
@@ -370,7 +370,7 @@ def get_module(
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
upstream = spec.package.installed_upstream
|
||||
upstream = spec.installed_upstream
|
||||
except spack.repo.UnknownPackageError:
|
||||
upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
|
||||
if upstream:
|
||||
|
||||
@@ -132,7 +132,7 @@ def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||
self.tags = tags
|
||||
self.save_local = save_local
|
||||
|
||||
# We keey lookup of build_id by full_hash
|
||||
# We key lookup of build_id by dag_hash
|
||||
self.build_ids = {}
|
||||
self.setup_save()
|
||||
|
||||
@@ -412,6 +412,8 @@ def new_configuration(self, specs):
|
||||
spec.concretize()
|
||||
|
||||
# Remove extra level of nesting
|
||||
# This is the only place in Spack we still use full_hash, as `spack monitor`
|
||||
# requires specs with full_hash-keyed dependencies.
|
||||
as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
|
||||
"spack_version": self.spack_version}
|
||||
|
||||
@@ -437,8 +439,7 @@ def failed_concretization(self, specs):
|
||||
meta = spec.to_dict()['spec']
|
||||
nodes = []
|
||||
for node in meta.get("nodes", []):
|
||||
for hashtype in ["build_hash", "full_hash"]:
|
||||
node[hashtype] = "FAILED_CONCRETIZATION"
|
||||
node["full_hash"] = "FAILED_CONCRETIZATION"
|
||||
nodes.append(node)
|
||||
meta['nodes'] = nodes
|
||||
|
||||
@@ -470,13 +471,13 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
"""
|
||||
Retrieve a build id, either in the local cache, or query the server.
|
||||
"""
|
||||
full_hash = spec.full_hash()
|
||||
if full_hash in self.build_ids:
|
||||
return self.build_ids[full_hash]
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in self.build_ids:
|
||||
return self.build_ids[dag_hash]
|
||||
|
||||
# Prepare build environment data (including spack version)
|
||||
data = self.build_environment.copy()
|
||||
data['full_hash'] = full_hash
|
||||
data['full_hash'] = dag_hash
|
||||
|
||||
# If the build should be tagged, add it
|
||||
if self.tags:
|
||||
@@ -494,10 +495,10 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
data['spec'] = syaml.load(read_file(spec_file))
|
||||
|
||||
if self.save_local:
|
||||
return self.get_local_build_id(data, full_hash, return_response)
|
||||
return self.get_server_build_id(data, full_hash, return_response)
|
||||
return self.get_local_build_id(data, dag_hash, return_response)
|
||||
return self.get_server_build_id(data, dag_hash, return_response)
|
||||
|
||||
def get_local_build_id(self, data, full_hash, return_response):
|
||||
def get_local_build_id(self, data, dag_hash, return_response):
|
||||
"""
|
||||
Generate a local build id based on hashing the expected data
|
||||
"""
|
||||
@@ -510,15 +511,15 @@ def get_local_build_id(self, data, full_hash, return_response):
|
||||
return response
|
||||
return bid
|
||||
|
||||
def get_server_build_id(self, data, full_hash, return_response=False):
|
||||
def get_server_build_id(self, data, dag_hash, return_response=False):
|
||||
"""
|
||||
Retrieve a build id from the spack monitor server
|
||||
"""
|
||||
response = self.do_request("builds/new/", data=sjson.dump(data))
|
||||
|
||||
# Add the build id to the lookup
|
||||
bid = self.build_ids[full_hash] = response['data']['build']['build_id']
|
||||
self.build_ids[full_hash] = bid
|
||||
bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
|
||||
self.build_ids[dag_hash] = bid
|
||||
|
||||
# If the function is called directly, the user might want output
|
||||
if return_response:
|
||||
|
||||
@@ -26,13 +26,14 @@
|
||||
import time
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Optional # novm
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import memoized, nullcontext
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.compilers
|
||||
@@ -52,6 +53,7 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.path
|
||||
import spack.util.web
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import TestFailure, TestSuite
|
||||
@@ -59,7 +61,6 @@
|
||||
from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.path import win_exe_ext
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.version import Version
|
||||
|
||||
@@ -76,6 +77,9 @@
|
||||
# Filename for the Spack build/install environment modifications file.
|
||||
_spack_build_envmodsfile = 'spack-build-env-mods.txt'
|
||||
|
||||
# Filename for the Spack install phase-time test log.
|
||||
_spack_install_test_log = 'install-time-test-log.txt'
|
||||
|
||||
# Filename of json with total build and phase times (seconds)
|
||||
_spack_times_log = 'install_times.json'
|
||||
|
||||
@@ -196,9 +200,9 @@ def __init__(cls, name, bases, attr_dict):
|
||||
def platform_executables(self):
|
||||
def to_windows_exe(exe):
|
||||
if exe.endswith('$'):
|
||||
exe = exe.replace('$', '%s$' % win_exe_ext())
|
||||
exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
|
||||
else:
|
||||
exe += win_exe_ext()
|
||||
exe += spack.util.path.win_exe_ext()
|
||||
return exe
|
||||
plat_exe = []
|
||||
if hasattr(self, 'executables'):
|
||||
@@ -434,6 +438,11 @@ def name(self):
|
||||
self._name = self._name[self._name.rindex('.') + 1:]
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where license files for all packages are stored."""
|
||||
return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
|
||||
|
||||
|
||||
def run_before(*phases):
|
||||
"""Registers a method of a package to be run before a given phase"""
|
||||
@@ -790,15 +799,6 @@ def __init__(self, spec):
|
||||
|
||||
super(PackageBase, self).__init__()
|
||||
|
||||
@property
|
||||
def installed_upstream(self):
|
||||
if not hasattr(self, '_installed_upstream'):
|
||||
upstream, record = spack.store.db.query_by_spec_hash(
|
||||
self.spec.dag_hash())
|
||||
self._installed_upstream = upstream
|
||||
|
||||
return self._installed_upstream
|
||||
|
||||
@classmethod
|
||||
def possible_dependencies(
|
||||
cls, transitive=True, expand_virtuals=True, deptype='all',
|
||||
@@ -943,9 +943,8 @@ def name(self):
|
||||
|
||||
@property
|
||||
def global_license_dir(self):
|
||||
"""Returns the directory where global license files for all
|
||||
packages are stored."""
|
||||
return os.path.join(spack.paths.prefix, 'etc', 'spack', 'licenses')
|
||||
"""Returns the directory where global license files are stored."""
|
||||
return type(self).global_license_dir
|
||||
|
||||
@property
|
||||
def global_license_file(self):
|
||||
@@ -1252,6 +1251,16 @@ def configure_args_path(self):
|
||||
"""Return the configure args file path associated with staging."""
|
||||
return os.path.join(self.stage.path, _spack_configure_argsfile)
|
||||
|
||||
@property
|
||||
def test_install_log_path(self):
|
||||
"""Return the install phase-time test log file path, if set."""
|
||||
return getattr(self, 'test_log_file', None)
|
||||
|
||||
@property
|
||||
def install_test_install_log_path(self):
|
||||
"""Return the install location for the install phase-time test log."""
|
||||
return fsys.join_path(self.metadata_dir, _spack_install_test_log)
|
||||
|
||||
@property
|
||||
def times_log_path(self):
|
||||
"""Return the times log json file."""
|
||||
@@ -1267,6 +1276,20 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
return os.path.join(self.metadata_dir, 'test')
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
msg = ('the "PackageBase.installed" property is deprecated and will be '
|
||||
'removed in Spack v0.19, use "Spec.installed" instead')
|
||||
warnings.warn(msg)
|
||||
return self.spec.installed
|
||||
|
||||
@property
|
||||
def installed_upstream(self):
|
||||
msg = ('the "PackageBase.installed_upstream" property is deprecated and will '
|
||||
'be removed in Spack v0.19, use "Spec.installed_upstream" instead')
|
||||
warnings.warn(msg)
|
||||
return self.spec.installed_upstream
|
||||
|
||||
def _make_fetcher(self):
|
||||
# Construct a composite fetcher that always contains at least
|
||||
# one element (the root package). In case there are resources
|
||||
@@ -1380,7 +1403,7 @@ def is_activated(self, view):
|
||||
if not self.is_extension:
|
||||
raise ValueError(
|
||||
"is_activated called on package that is not an extension.")
|
||||
if self.extendee_spec.package.installed_upstream:
|
||||
if self.extendee_spec.installed_upstream:
|
||||
# If this extends an upstream package, it cannot be activated for
|
||||
# it. This bypasses construction of the extension map, which can
|
||||
# can fail when run in the context of a downstream Spack instance
|
||||
@@ -1406,22 +1429,6 @@ def virtuals_provided(self):
|
||||
return [vspec for vspec, constraints in self.provided.items()
|
||||
if any(self.spec.satisfies(c) for c in constraints)]
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
"""Installation status of a package.
|
||||
|
||||
Returns:
|
||||
True if the package has been installed, False otherwise.
|
||||
"""
|
||||
try:
|
||||
# If the spec is in the DB, check the installed
|
||||
# attribute of the record
|
||||
return spack.store.db.get_record(self.spec).installed
|
||||
except KeyError:
|
||||
# If the spec is not in the DB, the method
|
||||
# above raises a Key error
|
||||
return False
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
"""Get the prefix into which this package should be installed."""
|
||||
@@ -1670,39 +1677,65 @@ def all_patches(cls):
|
||||
return patches
|
||||
|
||||
def content_hash(self, content=None):
|
||||
"""Create a hash based on the sources and logic used to build the
|
||||
package. This includes the contents of all applied patches and the
|
||||
contents of applicable functions in the package subclass."""
|
||||
if not self.spec.concrete:
|
||||
err_msg = ("Cannot invoke content_hash on a package"
|
||||
" if the associated spec is not concrete")
|
||||
raise spack.error.SpackError(err_msg)
|
||||
"""Create a hash based on the artifacts and patches used to build this package.
|
||||
|
||||
hash_content = list()
|
||||
try:
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
except fs.ExtrapolationError:
|
||||
source_id = None
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
else:
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
hash_content.extend(':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches)
|
||||
This includes:
|
||||
* source artifacts (tarballs, repositories) used to build;
|
||||
* content hashes (``sha256``'s) of all patches applied by Spack; and
|
||||
* canonicalized contents the ``package.py`` recipe used to build.
|
||||
|
||||
This hash is only included in Spack's DAG hash for concrete specs, but if it
|
||||
happens to be called on a package with an abstract spec, only applicable (i.e.,
|
||||
determinable) portions of the hash will be included.
|
||||
|
||||
"""
|
||||
# list of components to make up the hash
|
||||
hash_content = []
|
||||
|
||||
# source artifacts/repositories
|
||||
# TODO: resources
|
||||
if self.spec.versions.concrete:
|
||||
try:
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
except (fs.ExtrapolationError, fs.InvalidArgsError):
|
||||
# ExtrapolationError happens if the package has no fetchers defined.
|
||||
# InvalidArgsError happens when there are version directives with args,
|
||||
# but none of them identifies an actual fetcher.
|
||||
source_id = None
|
||||
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
else:
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
|
||||
# patch sha256's
|
||||
# Only include these if they've been assigned by the concretizer.
|
||||
# We check spec._patches_assigned instead of spec.concrete because
|
||||
# we have to call package_hash *before* marking specs concrete
|
||||
if self.spec._patches_assigned():
|
||||
hash_content.extend(
|
||||
':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches
|
||||
)
|
||||
|
||||
# package.py contents
|
||||
hash_content.append(package_hash(self.spec, source=content).encode('utf-8'))
|
||||
|
||||
# put it all together and encode as base32
|
||||
b32_hash = base64.b32encode(
|
||||
hashlib.sha256(bytes().join(
|
||||
sorted(hash_content))).digest()).lower()
|
||||
hashlib.sha256(
|
||||
bytes().join(sorted(hash_content))
|
||||
).digest()
|
||||
).lower()
|
||||
|
||||
# convert from bytes if running python 3
|
||||
if sys.version_info[0] >= 3:
|
||||
@@ -1775,10 +1808,7 @@ def _if_make_target_execute(self, target, *args, **kwargs):
|
||||
"""
|
||||
if self._has_make_target(target):
|
||||
# Execute target
|
||||
if self.spec.satisfies('%emscripten'):
|
||||
inspect.getmodule(self).emmake(target, *args, **kwargs)
|
||||
else:
|
||||
inspect.getmodule(self).make(target, *args, **kwargs)
|
||||
inspect.getmodule(self).make(target, *args, **kwargs)
|
||||
|
||||
def _has_ninja_target(self, target):
|
||||
"""Checks to see if 'target' is a valid target in a Ninja build script.
|
||||
@@ -1929,6 +1959,33 @@ def cache_extra_test_sources(self, srcs):
|
||||
fsys.mkdirp(os.path.dirname(dest_path))
|
||||
fsys.copy(src_path, dest_path)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _setup_test(self, verbose, externals):
|
||||
self.test_failures = []
|
||||
if self.test_suite:
|
||||
self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
|
||||
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
|
||||
pkg_id = self.test_suite.test_pkg_id(self.spec)
|
||||
else:
|
||||
self.test_log_file = fsys.join_path(
|
||||
self.stage.path, _spack_install_test_log)
|
||||
pkg_id = self.spec.format('{name}-{version}-{hash:7}')
|
||||
fsys.touch(self.test_log_file) # Otherwise log_parse complains
|
||||
|
||||
with tty.log.log_output(self.test_log_file, verbose) as logger:
|
||||
with logger.force_echo():
|
||||
tty.msg('Testing package {0}'.format(pkg_id))
|
||||
|
||||
# use debug print levels for log file to record commands
|
||||
old_debug = tty.is_debug()
|
||||
tty.set_debug(True)
|
||||
|
||||
try:
|
||||
yield logger
|
||||
finally:
|
||||
# reset debug level
|
||||
tty.set_debug(old_debug)
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
if self.test_requires_compiler:
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
@@ -1940,19 +1997,14 @@ def do_test(self, dirty=False, externals=False):
|
||||
self.spec.compiler)
|
||||
return
|
||||
|
||||
# Clear test failures
|
||||
self.test_failures = []
|
||||
self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
|
||||
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
|
||||
fsys.touch(self.test_log_file) # Otherwise log_parse complains
|
||||
|
||||
kwargs = {
|
||||
'dirty': dirty, 'fake': False, 'context': 'test',
|
||||
'externals': externals
|
||||
}
|
||||
if tty.is_verbose():
|
||||
kwargs['verbose'] = True
|
||||
spack.build_environment.start_build_process(self, test_process, kwargs)
|
||||
spack.build_environment.start_build_process(
|
||||
self, test_process, kwargs)
|
||||
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
@@ -2146,21 +2198,21 @@ def build_log_path(self):
|
||||
to the staging build file until the software is successfully installed,
|
||||
when it points to the file in the installation directory.
|
||||
"""
|
||||
return self.install_log_path if self.installed else self.log_path
|
||||
return self.install_log_path if self.spec.installed else self.log_path
|
||||
|
||||
@classmethod
|
||||
def inject_flags(cls, name, flags):
|
||||
"""
|
||||
flag_handler that injects all flags through the compiler wrapper.
|
||||
"""
|
||||
return (flags, None, None)
|
||||
return flags, None, None
|
||||
|
||||
@classmethod
|
||||
def env_flags(cls, name, flags):
|
||||
"""
|
||||
flag_handler that adds all flags to canonical environment variables.
|
||||
"""
|
||||
return (None, flags, None)
|
||||
return None, flags, None
|
||||
|
||||
@classmethod
|
||||
def build_system_flags(cls, name, flags):
|
||||
@@ -2171,7 +2223,7 @@ def build_system_flags(cls, name, flags):
|
||||
implements it. Currently, AutotoolsPackage and CMakePackage
|
||||
implement it.
|
||||
"""
|
||||
return (None, None, flags)
|
||||
return None, None, flags
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
@@ -2326,7 +2378,11 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
|
||||
if not force:
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
spec, 'parents', True)
|
||||
spec,
|
||||
direction='parents',
|
||||
transitive=True,
|
||||
deptype=("link", "run"),
|
||||
)
|
||||
if dependents:
|
||||
raise PackageStillNeededError(spec, dependents)
|
||||
|
||||
@@ -2468,10 +2524,10 @@ def _sanity_check_extension(self):
|
||||
extendee_package = self.extendee_spec.package
|
||||
extendee_package._check_extendable()
|
||||
|
||||
if not extendee_package.installed:
|
||||
if not self.extendee_spec.installed:
|
||||
raise ActivationError(
|
||||
"Can only (de)activate extensions for installed packages.")
|
||||
if not self.installed:
|
||||
if not self.spec.installed:
|
||||
raise ActivationError("Extensions must first be installed.")
|
||||
if self.extendee_spec.name not in self.extendees:
|
||||
raise ActivationError("%s does not extend %s!" %
|
||||
@@ -2697,45 +2753,54 @@ def rpath_args(self):
|
||||
"""
|
||||
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
|
||||
|
||||
def _run_test_callbacks(self, method_names, callback_type='install'):
|
||||
"""Tries to call all of the listed methods, returning immediately
|
||||
if the list is None."""
|
||||
if method_names is None:
|
||||
return
|
||||
|
||||
fail_fast = spack.config.get('config:fail_fast', False)
|
||||
|
||||
with self._setup_test(verbose=False, externals=False) as logger:
|
||||
# Report running each of the methods in the build log
|
||||
print_test_message(
|
||||
logger, 'Running {0}-time tests'.format(callback_type), True)
|
||||
|
||||
for name in method_names:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
|
||||
msg = 'RUN-TESTS: {0}-time tests [{1}]' \
|
||||
.format(callback_type, name),
|
||||
print_test_message(logger, msg, True)
|
||||
|
||||
fn()
|
||||
except AttributeError as e:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]' \
|
||||
.format(name),
|
||||
print_test_message(logger, msg, True)
|
||||
|
||||
self.test_failures.append((e, msg))
|
||||
if fail_fast:
|
||||
break
|
||||
|
||||
# Raise any collected failures here
|
||||
if self.test_failures:
|
||||
raise TestFailure(self.test_failures)
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_build_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``build_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``build_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.build_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.build_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
else:
|
||||
tty.msg('RUN-TESTS: build-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
self._run_test_callbacks(self.build_time_test_callbacks, 'build')
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_install_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``install_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``install_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.install_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.install_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
else:
|
||||
tty.msg('RUN-TESTS: install-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
self._run_test_callbacks(self.install_time_test_callbacks, 'install')
|
||||
|
||||
|
||||
def has_test_method(pkg):
|
||||
@@ -2760,27 +2825,21 @@ def has_test_method(pkg):
|
||||
def print_test_message(logger, msg, verbose):
|
||||
if verbose:
|
||||
with logger.force_echo():
|
||||
print(msg)
|
||||
tty.msg(msg)
|
||||
else:
|
||||
print(msg)
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def test_process(pkg, kwargs):
|
||||
verbose = kwargs.get('verbose', False)
|
||||
externals = kwargs.get('externals', False)
|
||||
with tty.log.log_output(pkg.test_log_file, verbose) as logger:
|
||||
with logger.force_echo():
|
||||
tty.msg('Testing package {0}'
|
||||
.format(pkg.test_suite.test_pkg_id(pkg.spec)))
|
||||
|
||||
with pkg._setup_test(verbose, externals) as logger:
|
||||
if pkg.spec.external and not externals:
|
||||
print_test_message(logger, 'Skipped external package', verbose)
|
||||
print_test_message(
|
||||
logger, 'Skipped tests for external package', verbose)
|
||||
return
|
||||
|
||||
# use debug print levels for log file to record commands
|
||||
old_debug = tty.is_debug()
|
||||
tty.set_debug(True)
|
||||
|
||||
# run test methods from the package and all virtuals it
|
||||
# provides virtuals have to be deduped by name
|
||||
v_names = list(set([vspec.name
|
||||
@@ -2799,8 +2858,7 @@ def test_process(pkg, kwargs):
|
||||
|
||||
ran_actual_test_function = False
|
||||
try:
|
||||
with fsys.working_dir(
|
||||
pkg.test_suite.test_dir_for_spec(pkg.spec)):
|
||||
with fsys.working_dir(pkg.test_suite.test_dir_for_spec(pkg.spec)):
|
||||
for spec in test_specs:
|
||||
pkg.test_suite.current_test_spec = spec
|
||||
# Fail gracefully if a virtual has no package/tests
|
||||
@@ -2842,7 +2900,9 @@ def test_process(pkg, kwargs):
|
||||
|
||||
# Run the tests
|
||||
ran_actual_test_function = True
|
||||
test_fn(pkg)
|
||||
context = logger.force_echo if verbose else nullcontext
|
||||
with context():
|
||||
test_fn(pkg)
|
||||
|
||||
# If fail-fast was on, we error out above
|
||||
# If we collect errors, raise them in batch here
|
||||
@@ -2850,15 +2910,12 @@ def test_process(pkg, kwargs):
|
||||
raise TestFailure(pkg.test_failures)
|
||||
|
||||
finally:
|
||||
# reset debug level
|
||||
tty.set_debug(old_debug)
|
||||
|
||||
# flag the package as having been tested (i.e., ran one or more
|
||||
# non-pass-only methods
|
||||
if ran_actual_test_function:
|
||||
fsys.touch(pkg.tested_file)
|
||||
else:
|
||||
print_test_message(logger, 'No tests to run', verbose)
|
||||
print_test_message(logger, 'No tests to run', verbose)
|
||||
|
||||
|
||||
inject_flags = PackageBase.inject_flags
|
||||
|
||||
@@ -123,11 +123,11 @@ def accept(self, id):
|
||||
|
||||
def next_token_error(self, message):
|
||||
"""Raise an error about the next token in the stream."""
|
||||
raise ParseError(message, self.text, self.token.end)
|
||||
raise ParseError(message, self.text[0], self.token.end)
|
||||
|
||||
def last_token_error(self, message):
|
||||
"""Raise an error about the previous token in the stream."""
|
||||
raise ParseError(message, self.text, self.token.start)
|
||||
raise ParseError(message, self.text[0], self.token.start)
|
||||
|
||||
def unexpected_token(self):
|
||||
self.next_token_error("Unexpected token: '%s'" % self.next.value)
|
||||
|
||||
@@ -43,8 +43,12 @@
|
||||
hooks_path = os.path.join(module_path, "hooks")
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
share_path = os.path.join(prefix, "share", "spack")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
etc_path = os.path.join(prefix, "etc", "spack")
|
||||
|
||||
#
|
||||
# Things in $spack/etc/spack
|
||||
#
|
||||
default_license_dir = os.path.join(etc_path, "licenses")
|
||||
|
||||
#
|
||||
# Things in $spack/var/spack
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
|
||||
@@ -355,9 +355,17 @@ def list_packages(rev):
|
||||
ref = rev.replace('...', '')
|
||||
rev = git('merge-base', ref, 'HEAD', output=str).strip()
|
||||
|
||||
output = git('ls-tree', '--name-only', rev, output=str)
|
||||
return sorted(line for line in output.split('\n')
|
||||
if line and not line.startswith('.'))
|
||||
output = git('ls-tree', '-r', '--name-only', rev, output=str)
|
||||
|
||||
# recursively list the packages directory
|
||||
package_paths = [
|
||||
line.split(os.sep) for line in output.split("\n") if line.endswith("package.py")
|
||||
]
|
||||
|
||||
# take the directory names with one-level-deep package files
|
||||
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
|
||||
|
||||
return package_names
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
|
||||
@@ -112,8 +112,7 @@ def __enter__(self):
|
||||
# Check which specs are already installed and mark them as skipped
|
||||
# only for install_task
|
||||
if self.do_fn == '_install_task':
|
||||
for dep in filter(lambda x: x.package.installed,
|
||||
input_spec.traverse()):
|
||||
for dep in filter(lambda x: x.installed, input_spec.traverse()):
|
||||
package = {
|
||||
'name': dep.name,
|
||||
'id': dep.dag_hash(),
|
||||
@@ -140,7 +139,7 @@ def wrapper(instance, *args, **kwargs):
|
||||
raise Exception
|
||||
|
||||
# We accounted before for what is already installed
|
||||
installed_already = pkg.installed
|
||||
installed_already = pkg.spec.installed
|
||||
|
||||
package = {
|
||||
'name': pkg.name,
|
||||
|
||||
@@ -38,13 +38,13 @@ def rewire(spliced_spec):
|
||||
nodes in the DAG of that spec."""
|
||||
assert spliced_spec.spliced
|
||||
for spec in spliced_spec.traverse(order='post', root=True):
|
||||
if not spec.build_spec.package.installed:
|
||||
if not spec.build_spec.installed:
|
||||
# TODO: May want to change this at least for the root spec...
|
||||
# spec.build_spec.package.do_install(force=True)
|
||||
raise PackageNotInstalledError(spliced_spec,
|
||||
spec.build_spec,
|
||||
spec)
|
||||
if spec.build_spec is not spec and not spec.package.installed:
|
||||
if spec.build_spec is not spec and not spec.installed:
|
||||
explicit = spec is spliced_spec
|
||||
rewire_node(spec, explicit)
|
||||
|
||||
@@ -95,7 +95,8 @@ def rewire_node(spec, explicit):
|
||||
spec.prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate,
|
||||
prefixes=prefix_to_prefix)
|
||||
# copy package into place (shutil.copytree)
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
||||
ignore=shutil.ignore_patterns('spec.json',
|
||||
'install_manifest.json'))
|
||||
@@ -104,7 +105,10 @@ def rewire_node(spec, explicit):
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix)
|
||||
shutil.rmtree(tempdir)
|
||||
# handle all metadata changes; don't copy over spec.json file in .spack/
|
||||
# Above, we did not copy spec.json: instead, here we write the new
|
||||
# (spliced) spec into spec.json, without this, Database.add would fail on
|
||||
# the next line (because it checks the spec.json in the prefix against the
|
||||
# spec being added to look for mismatches)
|
||||
spack.store.layout.write_spec(spec, spack.store.layout.spec_file_path(spec))
|
||||
# add to database, not sure about explicit
|
||||
spack.store.db.add(spec, spack.store.layout, explicit=explicit)
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -49,10 +51,12 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
msg = msg_str_or_func.format(properties=deprecated_properties)
|
||||
else:
|
||||
msg = msg_str_or_func(instance, deprecated_properties)
|
||||
if msg is None:
|
||||
return
|
||||
|
||||
is_error = deprecated['error']
|
||||
if not is_error:
|
||||
llnl.util.tty.warn(msg)
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
import jsonschema
|
||||
yield jsonschema.ValidationError(msg)
|
||||
|
||||
@@ -15,6 +15,25 @@
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'reuse': {'type': 'boolean'},
|
||||
'minimal': {'type': 'boolean'},
|
||||
'targets': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host_compatible': {'type': 'boolean'},
|
||||
'granularity': {
|
||||
'type': 'string',
|
||||
'enum': ['generic', 'microarchitectures']
|
||||
}
|
||||
}
|
||||
},
|
||||
'unify': {
|
||||
'type': 'boolean'
|
||||
# Todo: add when_possible.
|
||||
# 'oneOf': [
|
||||
# {'type': 'boolean'},
|
||||
# {'type': 'string', 'enum': ['when_possible']}
|
||||
# ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
},
|
||||
'license_dir': {'type': 'string'},
|
||||
'source_cache': {'type': 'string'},
|
||||
'misc_cache': {'type': 'string'},
|
||||
'connect_timeout': {'type': 'integer', 'minimum': 0},
|
||||
@@ -90,7 +91,16 @@
|
||||
'additional_external_search_paths': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
}
|
||||
},
|
||||
'flags': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'keep_werror': {
|
||||
'type': 'string',
|
||||
'enum': ['all', 'specific', 'none'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'deprecatedProperties': {
|
||||
'properties': ['module_roots'],
|
||||
|
||||
131
lib/spack/spack/schema/cray_manifest.py
Normal file
131
lib/spack/spack/schema/cray_manifest.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for Cray descriptive manifest: this describes a set of
|
||||
installed packages on the system and also specifies dependency
|
||||
relationships between them (so this provides more information than
|
||||
external entries in packages configuration).
|
||||
|
||||
This does not specify a configuration - it is an input format
|
||||
that is consumed and transformed into Spack DB records.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/schema#",
|
||||
"title": "CPE manifest schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"_meta": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"file-type": {"type": "string", "minLength": 1},
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
}
|
||||
},
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"executables": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"type": "string", "minLength": 1},
|
||||
"cxx": {"type": "string", "minLength": 1},
|
||||
"fc": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"os": {"type": "string", "minLength": 1},
|
||||
"target": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"specs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version",
|
||||
"arch",
|
||||
"compiler",
|
||||
"prefix",
|
||||
"hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["platform", "platform_os", "target"],
|
||||
"additioanlProperties": False,
|
||||
"properties": {
|
||||
"platform": {"type": "string", "minLength": 1},
|
||||
"platform_os": {"type": "string", "minLength": 1},
|
||||
"target": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"\\w[\\w-]*": {
|
||||
"type": "object",
|
||||
"required": ["hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"prefix": {
|
||||
"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,6 +16,24 @@
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
warned_about_concretization = False
|
||||
|
||||
|
||||
def deprecate_concretization(instance, props):
|
||||
global warned_about_concretization
|
||||
if warned_about_concretization:
|
||||
return None
|
||||
# Deprecate `spack:concretization` in favor of `spack:concretizer:unify`.
|
||||
concretization_to_unify = {'together': 'true', 'separately': 'false'}
|
||||
concretization = instance['concretization']
|
||||
unify = concretization_to_unify[concretization]
|
||||
|
||||
return (
|
||||
'concretization:{} is deprecated and will be removed in Spack 0.19 in favor of '
|
||||
'the new concretizer:unify:{} config option.'.format(concretization, unify)
|
||||
)
|
||||
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ('spack', 'env')
|
||||
|
||||
@@ -61,6 +79,11 @@
|
||||
'type': 'object',
|
||||
'default': {},
|
||||
'additionalProperties': False,
|
||||
'deprecatedProperties': {
|
||||
'properties': ['concretization'],
|
||||
'message': deprecate_concretization,
|
||||
'error': False
|
||||
},
|
||||
'properties': union_dicts(
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
@@ -169,11 +192,33 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
updated = False
|
||||
if 'include' in data:
|
||||
msg = ("included configuration files should be updated manually"
|
||||
" [files={0}]")
|
||||
warnings.warn(msg.format(', '.join(data['include'])))
|
||||
|
||||
if 'packages' in data:
|
||||
return spack.schema.packages.update(data['packages'])
|
||||
return False
|
||||
updated |= spack.schema.packages.update(data['packages'])
|
||||
|
||||
# Spack 0.19 drops support for `spack:concretization` in favor of
|
||||
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
||||
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
||||
# forward compatible with `spack:concretizer:unify`.
|
||||
if 'concretization' in data:
|
||||
has_unify = 'unify' in data.get('concretizer', {})
|
||||
to_unify = {'together': True, 'separately': False}
|
||||
unify = to_unify[data['concretization']]
|
||||
|
||||
if has_unify and data['concretizer']['unify'] != unify:
|
||||
warnings.warn(
|
||||
'The following configuration conflicts: '
|
||||
'`spack:concretization:{}` and `spack:concretizer:unify:{}`'
|
||||
'. Please update manually.'.format(
|
||||
data['concretization'], data['concretizer']['unify']))
|
||||
else:
|
||||
data.update({'concretizer': {'unify': unify}})
|
||||
data.pop('concretization')
|
||||
updated = True
|
||||
|
||||
return updated
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.container
|
||||
import spack.schema.gitlab_ci
|
||||
@@ -27,6 +28,7 @@
|
||||
spack.schema.bootstrap.properties,
|
||||
spack.schema.cdash.properties,
|
||||
spack.schema.compilers.properties,
|
||||
spack.schema.concretizer.properties,
|
||||
spack.schema.config.properties,
|
||||
spack.schema.container.properties,
|
||||
spack.schema.gitlab_ci.properties,
|
||||
|
||||
@@ -110,9 +110,12 @@
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'hash': {'type': 'string'},
|
||||
'package_hash': {'type': 'string'},
|
||||
|
||||
# these hashes were used on some specs prior to 0.18
|
||||
'full_hash': {'type': 'string'},
|
||||
'build_hash': {'type': 'string'},
|
||||
'package_hash': {'type': 'string'},
|
||||
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
|
||||
@@ -55,14 +56,6 @@
|
||||
parse_files = None
|
||||
|
||||
|
||||
#: whether we should write ASP unsat cores quickly in debug mode when the cores
|
||||
#: may be very large or take the time (sometimes hours) to minimize them
|
||||
minimize_cores = True
|
||||
|
||||
#: whether we should include all facts in the unsat cores or only error messages
|
||||
full_cores = False
|
||||
|
||||
|
||||
# backward compatibility functions for clingo ASTs
|
||||
def ast_getter(*names):
|
||||
def getter(node):
|
||||
@@ -77,20 +70,25 @@ def getter(node):
|
||||
ast_type = ast_getter("ast_type", "type")
|
||||
ast_sym = ast_getter("symbol", "term")
|
||||
|
||||
#: Order of precedence for version origins. Topmost types are preferred.
|
||||
version_origin_fields = [
|
||||
'spec',
|
||||
'external',
|
||||
'packages_yaml',
|
||||
'package_py',
|
||||
'installed',
|
||||
]
|
||||
|
||||
#: Look up version precedence strings by enum id
|
||||
version_origin_str = {
|
||||
i: name for i, name in enumerate(version_origin_fields)
|
||||
}
|
||||
|
||||
#: Enumeration like object to mark version provenance
|
||||
version_provenance = collections.namedtuple( # type: ignore
|
||||
'VersionProvenance', ['external', 'packages_yaml', 'package_py', 'spec']
|
||||
)(spec=0, external=1, packages_yaml=2, package_py=3)
|
||||
|
||||
#: String representation of version origins, to emit legible
|
||||
# facts for the ASP solver
|
||||
version_origin_str = {
|
||||
0: 'spec',
|
||||
1: 'external',
|
||||
2: 'packages_yaml',
|
||||
3: 'package_py'
|
||||
}
|
||||
'VersionProvenance',
|
||||
version_origin_fields,
|
||||
)(**{name: i for i, name in enumerate(version_origin_fields)})
|
||||
|
||||
#: Named tuple to contain information on declared versions
|
||||
DeclaredVersion = collections.namedtuple(
|
||||
@@ -109,7 +107,7 @@ def getter(node):
|
||||
|
||||
|
||||
def build_criteria_names(costs, tuples):
|
||||
"""Construct an ordered mapping from criteria names to indices in the cost list."""
|
||||
"""Construct an ordered mapping from criteria names to costs."""
|
||||
# pull optimization criteria names out of the solution
|
||||
priorities_names = []
|
||||
|
||||
@@ -136,7 +134,10 @@ def build_criteria_names(costs, tuples):
|
||||
# sort the criteria by priority
|
||||
priorities_names = sorted(priorities_names, reverse=True)
|
||||
|
||||
assert len(priorities_names) == len(costs), "Wrong number of optimization criteria!"
|
||||
# We only have opt-criterion values for non-error types
|
||||
# error type criteria are excluded (they come first)
|
||||
error_criteria = len(costs) - len(priorities_names)
|
||||
costs = costs[error_criteria:]
|
||||
|
||||
# split list into three parts: build criteria, fixed criteria, non-build criteria
|
||||
num_criteria = len(priorities_names)
|
||||
@@ -149,12 +150,12 @@ def build_criteria_names(costs, tuples):
|
||||
# mapping from priority to index in cost list
|
||||
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
|
||||
|
||||
# make a list that has each name with its build and non-build priority
|
||||
# make a list that has each name with its build and non-build costs
|
||||
criteria = [
|
||||
(p - fixed_priority_offset + num_build, None, name) for p, name in fixed
|
||||
(costs[p - fixed_priority_offset + num_build], None, name) for p, name in fixed
|
||||
]
|
||||
for (i, name), (b, _) in zip(installed, build):
|
||||
criteria.append((indices[i], indices[b], name))
|
||||
criteria.append((costs[indices[i]], costs[indices[b]], name))
|
||||
|
||||
return criteria
|
||||
|
||||
@@ -326,9 +327,6 @@ def format_core(self, core):
|
||||
core_symbols = []
|
||||
for atom in core:
|
||||
sym = symbols[atom]
|
||||
if sym.name in ("rule", "error"):
|
||||
# these are special symbols we use to get messages in the core
|
||||
sym = sym.arguments[0].string
|
||||
core_symbols.append(sym)
|
||||
|
||||
return sorted(str(symbol) for symbol in core_symbols)
|
||||
@@ -387,7 +385,7 @@ def raise_if_unsat(self):
|
||||
"""
|
||||
Raise an appropriate error if the result is unsatisfiable.
|
||||
|
||||
The error is a UnsatisfiableSpecError, and includes the minimized cores
|
||||
The error is an InternalConcretizerError, and includes the minimized cores
|
||||
resulting from the solve, formatted to be human readable.
|
||||
"""
|
||||
if self.satisfiable:
|
||||
@@ -397,12 +395,8 @@ def raise_if_unsat(self):
|
||||
if len(constraints) == 1:
|
||||
constraints = constraints[0]
|
||||
|
||||
if minimize_cores:
|
||||
conflicts = self.format_minimal_cores()
|
||||
else:
|
||||
conflicts = self.format_cores()
|
||||
|
||||
raise UnsatisfiableSpecError(constraints, conflicts=conflicts)
|
||||
conflicts = self.format_minimal_cores()
|
||||
raise InternalConcretizerError(constraints, conflicts=conflicts)
|
||||
|
||||
@property
|
||||
def specs(self):
|
||||
@@ -502,13 +496,11 @@ def h2(self, name):
|
||||
def newline(self):
|
||||
self.out.write('\n')
|
||||
|
||||
def fact(self, head, assumption=False):
|
||||
def fact(self, head):
|
||||
"""ASP fact (a rule without a body).
|
||||
|
||||
Arguments:
|
||||
head (AspFunction): ASP function to generate as fact
|
||||
assumption (bool): If True and using cores, use this fact as a
|
||||
choice point in ASP and include it in unsatisfiable cores
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, 'symbol') else head
|
||||
|
||||
@@ -516,10 +508,9 @@ def fact(self, head, assumption=False):
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
# with `--show-cores=full or --show-cores=minimized, make all facts
|
||||
# choices/assumptions, otherwise only if assumption=True
|
||||
choice = self.cores and (full_cores or assumption)
|
||||
|
||||
# Only functions relevant for constructing bug reports for bad error messages
|
||||
# are assumptions, and only when using cores.
|
||||
choice = self.cores and symbol.name == 'internal_error'
|
||||
self.backend.add_rule([atom], [], choice=choice)
|
||||
if choice:
|
||||
self.assumptions.append(atom)
|
||||
@@ -577,9 +568,10 @@ def visit(node):
|
||||
for term in node.body:
|
||||
if ast_type(term) == ASTType.Literal:
|
||||
if ast_type(term.atom) == ASTType.SymbolicAtom:
|
||||
if ast_sym(term.atom).name == "error":
|
||||
name = ast_sym(term.atom).name
|
||||
if name == 'internal_error':
|
||||
arg = ast_sym(ast_sym(term.atom).arguments[0])
|
||||
self.fact(fn.error(arg.string), assumption=True)
|
||||
self.fact(AspFunction(name)(arg.string))
|
||||
|
||||
path = os.path.join(parent_dir, 'concretize.lp')
|
||||
parse_files([path], visit)
|
||||
@@ -662,7 +654,7 @@ def stringify(x):
|
||||
class SpackSolverSetup(object):
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
def __init__(self, reuse=False, tests=False):
|
||||
def __init__(self, reuse=None, minimal=None, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
@@ -687,24 +679,22 @@ def __init__(self, reuse=False, tests=False):
|
||||
# Caches to optimize the setup phase of the solver
|
||||
self.target_specs_cache = None
|
||||
|
||||
# whether to add installed/binary hashes to the solve
|
||||
self.reuse = reuse
|
||||
|
||||
# whether to add installed/binary hashes to the solve
|
||||
# Solver paramters that affect setup -- see Solver documentation
|
||||
self.reuse = spack.config.get(
|
||||
"concretizer:reuse", False) if reuse is None else reuse
|
||||
self.minimal = spack.config.get(
|
||||
"concretizer:minimal", False) if minimal is None else minimal
|
||||
self.tests = tests
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
This uses self.possible_versions so that we include any versions
|
||||
This uses self.declared_versions so that we include any versions
|
||||
that arise from a spec.
|
||||
"""
|
||||
def key_fn(version):
|
||||
# Origins are sorted by order of importance:
|
||||
# 1. Spec from command line
|
||||
# 2. Externals
|
||||
# 3. Package preferences
|
||||
# 4. Directives in package.py
|
||||
# Origins are sorted by precedence defined in `version_origin_str`,
|
||||
# then by order added.
|
||||
return version.origin, version.idx
|
||||
|
||||
pkg = packagize(pkg)
|
||||
@@ -735,7 +725,7 @@ def spec_versions(self, spec):
|
||||
|
||||
# record all version constraints for later
|
||||
self.version_constraints.add((spec.name, spec.versions))
|
||||
return [fn.version_satisfies(spec.name, spec.versions)]
|
||||
return [fn.node_version_satisfies(spec.name, spec.versions)]
|
||||
|
||||
def target_ranges(self, spec, single_target_fn):
|
||||
target = spec.architecture.target
|
||||
@@ -748,13 +738,24 @@ def target_ranges(self, spec, single_target_fn):
|
||||
return [fn.node_target_satisfies(spec.name, target)]
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
default_msg = "{0} '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0} conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name)
|
||||
self.gen.fact(fn.conflict_trigger(trigger_id))
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(
|
||||
spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
|
||||
for constraint, _ in constraints:
|
||||
constraint_id = self.condition(constraint, name=pkg.name)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id))
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
if constraint == spack.spec.Spec():
|
||||
conflict_msg = no_constraint_msg.format(pkg.name, trigger)
|
||||
else:
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(
|
||||
constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(
|
||||
fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
@@ -827,7 +828,7 @@ def package_compiler_defaults(self, pkg):
|
||||
pkg.name, cspec.name, cspec.version, -i * 100
|
||||
))
|
||||
|
||||
def pkg_rules(self, pkg, tests):
|
||||
def pkg_rules(self, pkg):
|
||||
pkg = packagize(pkg)
|
||||
|
||||
# versions
|
||||
@@ -838,9 +839,18 @@ def pkg_rules(self, pkg, tests):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
for w in when:
|
||||
cond_id = self.condition(w, name=pkg.name)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
msg = "%s has variant %s" % (pkg.name, name)
|
||||
if str(w):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
@@ -883,7 +893,9 @@ def pkg_rules(self, pkg, tests):
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
self.condition(
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (
|
||||
pkg.name, name, value, when)
|
||||
)
|
||||
|
||||
if variant.sticky:
|
||||
@@ -911,7 +923,7 @@ def pkg_rules(self, pkg, tests):
|
||||
)
|
||||
)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
@@ -920,7 +932,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
are imposed when this condition is triggered
|
||||
name (str or None): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
|
||||
msg (str or None): description of the condition
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -929,7 +941,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id))
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
|
||||
# requirements trigger the condition
|
||||
requirements = self.spec_clauses(
|
||||
@@ -961,7 +973,8 @@ def package_provider_rules(self, pkg):
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
for when in whens:
|
||||
condition_id = self.condition(when, provided, pkg.name)
|
||||
msg = '%s provides %s when %s' % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(
|
||||
condition_id, when.name, provided.name
|
||||
))
|
||||
@@ -985,7 +998,11 @@ def package_dependencies_rules(self, pkg):
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name)
|
||||
msg = '%s depends on %s' % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += ' when %s' % cond
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(
|
||||
condition_id, pkg.name, dep.spec.name
|
||||
))
|
||||
@@ -1065,7 +1082,8 @@ def external_packages(self):
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
condition_id = self.condition(spec)
|
||||
msg = '%s available as external when satisfying %s' % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(
|
||||
fn.possible_external(condition_id, pkg_name, local_idx)
|
||||
)
|
||||
@@ -1147,7 +1165,14 @@ def spec_clauses(self, *args, **kwargs):
|
||||
raise RuntimeError(msg)
|
||||
return clauses
|
||||
|
||||
def _spec_clauses(self, spec, body=False, transitive=True, expand_hashes=False):
|
||||
def _spec_clauses(
|
||||
self,
|
||||
spec,
|
||||
body=False,
|
||||
transitive=True,
|
||||
expand_hashes=False,
|
||||
concrete_build_deps=False,
|
||||
):
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
Arguments:
|
||||
@@ -1158,6 +1183,8 @@ def _spec_clauses(self, spec, body=False, transitive=True, expand_hashes=False):
|
||||
dependencies (default True)
|
||||
expand_hashes (bool): if True, descend into hashes of concrete specs
|
||||
(default False)
|
||||
concrete_build_deps (bool): if False, do not include pure build deps
|
||||
of concrete specs (as they have no effect on runtime constraints)
|
||||
|
||||
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
|
||||
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
|
||||
@@ -1265,18 +1292,34 @@ class Body(object):
|
||||
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
if spec.concrete:
|
||||
# TODO: We need to distinguish 2 specs from the same package later
|
||||
for edge in spec.edges_to_dependencies():
|
||||
for dtype in edge.deptypes:
|
||||
clauses.append(fn.depends_on(spec.name, edge.spec.name, dtype))
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
for dspec in spec.edges_to_dependencies():
|
||||
dep = dspec.spec
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
if spec.concrete:
|
||||
clauses.append(fn.hash(dep.name, dep.dag_hash()))
|
||||
# We know dependencies are real for concrete specs. For abstract
|
||||
# specs they just mean the dep is somehow in the DAG.
|
||||
for dtype in dspec.deptypes:
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.depends_on(spec.name, dep.name, dtype))
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.deptypes != ("build",):
|
||||
clauses.append(fn.hash(dep.name, dep.dag_hash()))
|
||||
|
||||
# if the spec is abstract, descend into dependencies.
|
||||
# if it's concrete, then the hashes above take care of dependency
|
||||
# constraints, but expand the hashes if asked for.
|
||||
if not spec.concrete or expand_hashes:
|
||||
clauses.extend(
|
||||
self._spec_clauses(dep, body, transitive=False)
|
||||
self._spec_clauses(
|
||||
dep,
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
)
|
||||
)
|
||||
|
||||
return clauses
|
||||
@@ -1411,23 +1454,48 @@ def target_defaults(self, specs):
|
||||
|
||||
self.gen.h2('Target compatibility')
|
||||
|
||||
compatible_targets = [uarch] + uarch.ancestors
|
||||
additional_targets_in_family = sorted([
|
||||
t for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == uarch.family.name and
|
||||
t not in compatible_targets)
|
||||
], key=lambda x: len(x.ancestors), reverse=True)
|
||||
compatible_targets += additional_targets_in_family
|
||||
# Construct the list of targets which are compatible with the host
|
||||
candidate_targets = [uarch] + uarch.ancestors
|
||||
|
||||
# Get configuration options
|
||||
granularity = spack.config.get('concretizer:targets:granularity')
|
||||
host_compatible = spack.config.get('concretizer:targets:host_compatible')
|
||||
|
||||
# Add targets which are not compatible with the current host
|
||||
if not host_compatible:
|
||||
additional_targets_in_family = sorted([
|
||||
t for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == uarch.family.name and
|
||||
t not in candidate_targets)
|
||||
], key=lambda x: len(x.ancestors), reverse=True)
|
||||
candidate_targets += additional_targets_in_family
|
||||
|
||||
# Check if we want only generic architecture
|
||||
if granularity == 'generic':
|
||||
candidate_targets = [t for t in candidate_targets if t.vendor == 'generic']
|
||||
|
||||
compilers = self.possible_compilers
|
||||
|
||||
# this loop can be used to limit the number of targets
|
||||
# considered. Right now we consider them all, but it seems that
|
||||
# many targets can make things slow.
|
||||
# TODO: investigate this.
|
||||
# Add targets explicitly requested from specs
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
continue
|
||||
|
||||
if target not in candidate_targets and not host_compatible:
|
||||
candidate_targets.append(target)
|
||||
for ancestor in target.ancestors:
|
||||
if ancestor not in candidate_targets:
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = set([uarch.family.name])
|
||||
for compiler in sorted(compilers):
|
||||
supported = self._supported_targets(
|
||||
compiler.name, compiler.version, compatible_targets
|
||||
compiler.name, compiler.version, candidate_targets
|
||||
)
|
||||
|
||||
# If we can't find supported targets it may be due to custom
|
||||
@@ -1440,7 +1508,7 @@ def target_defaults(self, specs):
|
||||
supported = self._supported_targets(
|
||||
compiler.name,
|
||||
compiler_obj.real_version,
|
||||
compatible_targets
|
||||
candidate_targets
|
||||
)
|
||||
|
||||
if not supported:
|
||||
@@ -1456,21 +1524,8 @@ def target_defaults(self, specs):
|
||||
compiler.name, compiler.version, uarch.family.name
|
||||
))
|
||||
|
||||
# add any targets explicitly mentioned in specs
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
continue
|
||||
|
||||
if target not in compatible_targets:
|
||||
compatible_targets.append(target)
|
||||
|
||||
i = 0
|
||||
for target in compatible_targets:
|
||||
for target in candidate_targets:
|
||||
self.gen.fact(fn.target(target.name))
|
||||
self.gen.fact(fn.target_family(target.name, target.family.name))
|
||||
for parent in sorted(target.parents):
|
||||
@@ -1512,9 +1567,12 @@ def generate_possible_compilers(self, specs):
|
||||
continue
|
||||
|
||||
if strict and s.compiler not in cspecs:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
if not s.concrete:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
# Allow unknown compilers to exist if the associated spec
|
||||
# is already built
|
||||
else:
|
||||
cspecs.add(s.compiler)
|
||||
self.gen.fact(fn.allow_compiler(
|
||||
@@ -1645,6 +1703,12 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
# be dependencies (don't tell it about the others)
|
||||
h = spec.dag_hash()
|
||||
if spec.name in possible and h not in self.seen_hashes:
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.path.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
|
||||
@@ -1652,8 +1716,16 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
self.impose(h, spec, body=True)
|
||||
self.gen.newline()
|
||||
|
||||
# add OS to possible OS's
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
# - Add OS to possible OS's
|
||||
for dep in spec.traverse():
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
self.declared_versions[dep.name].append(DeclaredVersion(
|
||||
version=dep.version,
|
||||
idx=0,
|
||||
origin=version_provenance.installed
|
||||
))
|
||||
self.possible_oses.add(dep.os)
|
||||
|
||||
# add the hash to the one seen so far
|
||||
@@ -1718,7 +1790,7 @@ def setup(self, driver, specs):
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [d for d in spec.traverse()
|
||||
missing_deps = [str(d) for d in spec.traverse()
|
||||
if d.name not in possible and not d.virtual]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
@@ -1738,10 +1810,14 @@ def setup(self, driver, specs):
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
self.gen.h1("Concretizer options")
|
||||
if self.reuse:
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
if self.minimal:
|
||||
self.gen.fact(fn.minimal_installs())
|
||||
|
||||
if self.reuse:
|
||||
self.gen.h1("Installed packages")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
self.gen.newline()
|
||||
self.define_installed_packages(specs, possible)
|
||||
|
||||
self.gen.h1('General Constraints')
|
||||
@@ -1762,7 +1838,7 @@ def setup(self, driver, specs):
|
||||
self.gen.h1('Package Constraints')
|
||||
for pkg in sorted(pkgs):
|
||||
self.gen.h2('Package rules: %s' % pkg)
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.pkg_rules(pkg)
|
||||
self.gen.h2('Package preferences: %s' % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
self.preferred_targets(pkg)
|
||||
@@ -1781,12 +1857,14 @@ def setup(self, driver, specs):
|
||||
fn.virtual_root(spec.name) if spec.virtual
|
||||
else fn.root(spec.name)
|
||||
)
|
||||
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(clause)
|
||||
if clause.name == 'variant_set':
|
||||
self.gen.fact(fn.variant_default_value_from_cli(
|
||||
*clause.args
|
||||
))
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_cli(*clause.args)
|
||||
)
|
||||
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
@@ -1809,6 +1887,7 @@ class SpecBuilder(object):
|
||||
ignored_attributes = ["opt_criterion"]
|
||||
|
||||
def __init__(self, specs):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
@@ -1861,6 +1940,17 @@ def node_os(self, pkg, os):
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
|
||||
def error(self, priority, msg, *args):
|
||||
msg = msg.format(*args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
specs_to_construct = re.findall(r'Spec\(([^)]*)\)', msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace('Spec(%s)' % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == 'dev_path':
|
||||
@@ -1983,15 +2073,27 @@ def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
tty.warn(msg.format(pkg, version))
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple):
|
||||
name = function_tuple[0]
|
||||
if name == 'error':
|
||||
priority = function_tuple[1][0]
|
||||
return (-4, priority)
|
||||
elif name == 'hash':
|
||||
return (-3, 0)
|
||||
elif name == 'node':
|
||||
return (-2, 0)
|
||||
elif name == 'node_compiler':
|
||||
return (-1, 0)
|
||||
else:
|
||||
return (0, 0)
|
||||
|
||||
def build_specs(self, function_tuples):
|
||||
# Functions don't seem to be in particular order in output. Sort
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
function_tuples.sort(key=lambda f: {
|
||||
"hash": -3,
|
||||
"node": -2,
|
||||
"node_compiler": -1,
|
||||
}.get(f[0], 0))
|
||||
self.function_tuples = function_tuples
|
||||
self.function_tuples.sort(key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in function_tuples:
|
||||
@@ -1999,7 +2101,6 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
action = getattr(self, name, None)
|
||||
|
||||
# print out unknown actions so we can display them for debugging
|
||||
if not action:
|
||||
msg = "%s(%s)" % (name, ", ".join(str(a) for a in args))
|
||||
@@ -2009,22 +2110,26 @@ def build_specs(self, function_tuples):
|
||||
assert action and callable(action)
|
||||
|
||||
# ignore predicates on virtual packages, as they're used for
|
||||
# solving but don't construct anything
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != 'error':
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
# namespace assignment is done after the fact, as it is not
|
||||
# currently part of the solve
|
||||
for spec in self._specs.values():
|
||||
if spec.namespace:
|
||||
continue
|
||||
repo = spack.repo.path.repo_for_pkg(spec)
|
||||
spec.namespace = repo.namespace
|
||||
|
||||
@@ -2034,7 +2139,7 @@ def build_specs(self, function_tuples):
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values()]
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -2046,8 +2151,9 @@ def build_specs(self, function_tuples):
|
||||
for s in self._specs.values():
|
||||
_develop_specs_from_env(s, ev.active_environment())
|
||||
|
||||
for s in self._specs.values():
|
||||
s._mark_concrete()
|
||||
# mark concrete and assign hashes to all specs in the solve
|
||||
for root in roots.values():
|
||||
root._finalize_concretization()
|
||||
|
||||
for s in self._specs.values():
|
||||
spack.spec.Spec.ensure_no_deprecated(s)
|
||||
@@ -2090,6 +2196,10 @@ class Solver(object):
|
||||
``reuse (bool)``
|
||||
Whether to try to reuse existing installs/binaries
|
||||
|
||||
``minimal (bool)``
|
||||
If ``True`` make minimizing nodes the top priority, even higher
|
||||
than defaults from packages and preferences.
|
||||
|
||||
"""
|
||||
def __init__(self):
|
||||
self.driver = PyclingoDriver()
|
||||
@@ -2097,6 +2207,7 @@ def __init__(self):
|
||||
# These properties are settable via spack configuration, and overridable
|
||||
# by setting them directly as properties.
|
||||
self.reuse = spack.config.get("concretizer:reuse", False)
|
||||
self.minimal = spack.config.get("concretizer:minimal", False)
|
||||
|
||||
def solve(
|
||||
self,
|
||||
@@ -2127,7 +2238,7 @@ def solve(
|
||||
continue
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
|
||||
setup = SpackSolverSetup(reuse=self.reuse, tests=tests)
|
||||
setup = SpackSolverSetup(reuse=self.reuse, minimal=self.minimal, tests=tests)
|
||||
return self.driver.solve(
|
||||
setup,
|
||||
specs,
|
||||
@@ -2140,25 +2251,27 @@ def solve(
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
self.provided = None
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, provided, conflicts):
|
||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||
conflict_msg = ''.join(indented)
|
||||
issue = 'conflicts' if full_cores else 'errors'
|
||||
msg = '%s is unsatisfiable, %s are:\n%s' % (provided, issue, conflict_msg)
|
||||
|
||||
newline_indent = '\n '
|
||||
if not full_cores:
|
||||
msg += newline_indent + 'To see full clingo unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=full`'
|
||||
if not minimize_cores or not full_cores:
|
||||
# not solver.minimalize_cores and not solver.full_cores impossible
|
||||
msg += newline_indent + 'For full, subset-minimal unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=minimized'
|
||||
msg += newline_indent
|
||||
msg += 'Warning: This may take (up to) hours for some specs'
|
||||
error_msg = ''.join(indented)
|
||||
msg = 'Spack concretizer internal error. Please submit a bug report'
|
||||
msg += '\n Please include the command, environment if applicable,'
|
||||
msg += '\n and the following error message.'
|
||||
msg = '\n %s is unsatisfiable, errors are:\n%s' % (provided, error_msg)
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -7,22 +7,6 @@
|
||||
% This logic program implements Spack's concretizer
|
||||
%=============================================================================
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Generic constraints on nodes
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% each node must have a single version
|
||||
:- not 1 { version(Package, _) } 1, node(Package).
|
||||
|
||||
% each node must have a single platform, os and target
|
||||
:- not 1 { node_platform(Package, _) } 1, node(Package), error("A node must have exactly one platform").
|
||||
:- not 1 { node_os(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_target(Package, _) } 1, node(Package).
|
||||
|
||||
% each node has a single compiler associated with it
|
||||
:- not 1 { node_compiler(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_compiler_version(Package, _, _) } 1, node(Package).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -35,18 +19,40 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
|
||||
:- version_declared(Package, Version, Weight, Origin1),
|
||||
version_declared(Package, Version, Weight, Origin2),
|
||||
Origin1 < Origin2,
|
||||
error("Internal error: two versions with identical weights").
|
||||
internal_error("Two versions with identical weights").
|
||||
|
||||
% We cannot use a version declared for an installed package if we end up building it
|
||||
:- version_declared(Package, Version, Weight, "installed"),
|
||||
version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not hash(Package, _).
|
||||
|
||||
% versions are declared w/priority -- declared with priority implies declared
|
||||
version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
|
||||
% If something is a package, it has only one version and that must be a
|
||||
% declared version.
|
||||
1 { version(Package, Version) : version_declared(Package, Version) } 1
|
||||
:- node(Package), error("Each node must have exactly one version").
|
||||
% We allow clingo to choose any version(s), and infer an error if there
|
||||
% is not precisely one version chosen. Error facts are heavily optimized
|
||||
% against to ensure they cannot be inferred when a non-error solution is
|
||||
% possible
|
||||
{ version(Package, Version) : version_declared(Package, Version) }
|
||||
:- node(Package).
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- node(Package),
|
||||
version(Package, Version1),
|
||||
version(Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% A virtual package may have or not a version, but never has more than one
|
||||
:- virtual_node(Package), 2 { version(Package, _) }.
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- node(Package), not version(Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Virtual, Version1, Version2)
|
||||
:- virtual_node(Virtual),
|
||||
version(Virtual, Version1),
|
||||
version(Virtual, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% If we select a deprecated version, mark the package as deprecated
|
||||
deprecated(Package, Version) :- version(Package, Version), deprecated_version(Package, Version).
|
||||
@@ -55,14 +61,27 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_declared(Package, Version, Weight).
|
||||
|
||||
1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package), error("Internal error: Package version must have a unique weight").
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
node(Package),
|
||||
Weight = #min{W : version_declared(Package, Version, W)}.
|
||||
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1
|
||||
:- version_satisfies(Package, Constraint),
|
||||
error("no version satisfies the given constraints").
|
||||
version_satisfies(Package, Constraint)
|
||||
% While this choice rule appears redundant with the initial choice rule for
|
||||
% versions, virtual nodes with version constraints require this rule to be
|
||||
% able to choose versions
|
||||
{ version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
:- node_version_satisfies(Package, Constraint),
|
||||
C = #count{ Version : version(Package, Version), version_satisfies(Package, Constraint, Version)},
|
||||
C < 1.
|
||||
|
||||
node_version_satisfies(Package, Constraint)
|
||||
:- version(Package, Version), version_satisfies(Package, Constraint, Version).
|
||||
|
||||
#defined version_satisfies/3.
|
||||
@@ -81,7 +100,7 @@ version_satisfies(Package, Constraint)
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
condition(ID, _);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
@@ -100,7 +119,12 @@ attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
variant_value(Package, Variant, Value),
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value).
|
||||
|
||||
#defined condition/1.
|
||||
% we cannot have additional flag values when we are working with concrete specs
|
||||
:- node(Package), hash(Package, Hash),
|
||||
node_flag(Package, FlagType, Flag),
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag).
|
||||
|
||||
#defined condition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
@@ -127,9 +151,9 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
build(Package),
|
||||
not external(Package).
|
||||
not external(Package),
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
@@ -155,17 +179,18 @@ node(Dependency) :- node(Package), depends_on(Package, Dependency).
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
needed(Package) :- root(Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
:- node(Package), not needed(Package),
|
||||
error("All dependencies must be reachable from root").
|
||||
error(1, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
:- node(Package),
|
||||
not needed(Package).
|
||||
|
||||
% Avoid cycles in the DAG
|
||||
% some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A), error("Cyclic dependencies are not allowed").
|
||||
|
||||
#defined error/1.
|
||||
error(2, "Cyclic dependency detected between '{0}' and '{1}'\n Consider changing variants to avoid the cycle", A, B)
|
||||
:- path(A, B),
|
||||
path(B, A).
|
||||
|
||||
#defined dependency_type/2.
|
||||
#defined dependency_condition/3.
|
||||
@@ -173,14 +198,13 @@ path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
error(0, Msg) :- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID, Msg),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID),
|
||||
not external(Package), % ignore conflicts for externals
|
||||
error("A conflict was triggered").
|
||||
not external(Package). % ignore conflicts for externals
|
||||
|
||||
#defined conflict/3.
|
||||
#defined conflict/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
@@ -200,8 +224,17 @@ virtual_node(Virtual)
|
||||
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
1 { provider(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual), error("Virtual packages must be satisfied by a unique provider").
|
||||
{ provider(Package, Virtual) : possible_provider(Package, Virtual) }
|
||||
:- virtual_node(Virtual).
|
||||
error(2, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- virtual_node(Virtual),
|
||||
P = #count{ Package : provider(Package, Virtual)},
|
||||
P < 1.
|
||||
error(2, "Spec cannot include multiple providers for virtual '{0}'\n Requested '{1}' and '{2}'", Virtual, P1, P2)
|
||||
:- virtual_node(Virtual),
|
||||
provider(P1, Virtual),
|
||||
provider(P2, Virtual),
|
||||
P1 < P2.
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
@@ -226,7 +259,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
||||
error("Internal error: virtual when provides not respected").
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
@@ -239,7 +272,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% we select the weight, among the possible ones, that minimizes the overall objective function.
|
||||
1 { provider_weight(Dependency, Virtual, Weight, Reason) :
|
||||
possible_provider_weight(Dependency, Virtual, Weight, Reason) } 1
|
||||
:- provider(Dependency, Virtual), error("Internal error: package provider weights must be unique").
|
||||
:- provider(Dependency, Virtual), internal_error("Package provider weights must be unique").
|
||||
|
||||
% Get rid or the reason for enabling the possible weight (useful for debugging)
|
||||
provider_weight(Dependency, Virtual, Weight) :- provider_weight(Dependency, Virtual, Weight, _).
|
||||
@@ -282,9 +315,10 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
||||
% These allow us to easily define conditional dependency and conflict rules
|
||||
% without enumerating all spec attributes every time.
|
||||
node(Package) :- attr("node", Package).
|
||||
virtual_node(Virtual) :- attr("virtual_node", Virtual).
|
||||
hash(Package, Hash) :- attr("hash", Package, Hash).
|
||||
version(Package, Version) :- attr("version", Package, Version).
|
||||
version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint).
|
||||
node_version_satisfies(Package, Constraint) :- attr("node_version_satisfies", Package, Constraint).
|
||||
node_platform(Package, Platform) :- attr("node_platform", Package, Platform).
|
||||
node_os(Package, OS) :- attr("node_os", Package, OS).
|
||||
node_target(Package, Target) :- attr("node_target", Package, Target).
|
||||
@@ -300,9 +334,10 @@ node_compiler_version_satisfies(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version_satisfies", Package, Compiler, Version).
|
||||
|
||||
attr("node", Package) :- node(Package).
|
||||
attr("virtual_node", Virtual) :- virtual_node(Virtual).
|
||||
attr("hash", Package, Hash) :- hash(Package, Hash).
|
||||
attr("version", Package, Version) :- version(Package, Version).
|
||||
attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint).
|
||||
attr("node_version_satisfies", Package, Constraint) :- node_version_satisfies(Package, Constraint).
|
||||
attr("node_platform", Package, Platform) :- node_platform(Package, Platform).
|
||||
attr("node_os", Package, OS) :- node_os(Package, OS).
|
||||
attr("node_target", Package, Target) :- node_target(Package, Target).
|
||||
@@ -330,7 +365,7 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
#defined external_only/1.
|
||||
#defined pkg_provider_preference/4.
|
||||
#defined default_provider_preference/3.
|
||||
#defined version_satisfies/2.
|
||||
#defined node_version_satisfies/2.
|
||||
#defined node_compiler_version_satisfies/3.
|
||||
#defined root/1.
|
||||
|
||||
@@ -339,9 +374,17 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if a package is external its version must be one of the external versions
|
||||
1 { external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") } 1
|
||||
:- external(Package), error("External package version does not satisfy external spec").
|
||||
{ external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") }
|
||||
:- external(Package).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_version(Package, _, _).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
external_version(Package, Version1, Weight1),
|
||||
external_version(Package, Version2, Weight2),
|
||||
(Version1, Weight1) < (Version2, Weight2). % see[1]
|
||||
|
||||
version_weight(Package, Weight) :- external_version(Package, Version, Weight).
|
||||
version(Package, Version) :- external_version(Package, Version, Weight).
|
||||
@@ -361,7 +404,7 @@ external(Package) :- external_spec_selected(Package, _).
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package),
|
||||
error("Internal error: external weight used for internal spec").
|
||||
internal_error("External weight used for internal spec").
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
@@ -373,8 +416,9 @@ external_conditions_hold(Package, LocalIndex) :-
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_conditions_hold(Package, _),
|
||||
error("External package does not satisfy external spec").
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
@@ -391,16 +435,16 @@ variant(Package, Variant) :- variant_condition(ID, Package, Variant),
|
||||
condition_holds(ID).
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot be set").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot take on a variant value").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% if a variant is sticky and not set its value is the default value
|
||||
variant_value(Package, Variant, Value) :-
|
||||
@@ -410,27 +454,30 @@ variant_value(Package, Variant, Value) :-
|
||||
variant_default_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% one variant value for single-valued variants.
|
||||
1 {
|
||||
% at most one variant value for single-valued variants.
|
||||
{
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
} 1
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Single valued variants must have a single value").
|
||||
|
||||
% at least one variant value for multi-valued variants.
|
||||
1 {
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
}
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
not variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Internal error: All variants must have a value").
|
||||
build(Package).
|
||||
|
||||
|
||||
error(2, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
Value1 < Value2. % see[1]
|
||||
error(2, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
build(Package),
|
||||
C = #count{ Value : variant_value(Package, Variant, Value) },
|
||||
C < 1.
|
||||
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
@@ -438,21 +485,21 @@ variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
% This only applies to packages we need to build -- concrete packages may
|
||||
% have been built w/different variants from older/different package versions.
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package),
|
||||
error("Variant set to invalid value").
|
||||
error(1, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2,
|
||||
build(Package),
|
||||
error("Variant values selected from multiple disjoint sets").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come from disjoing value sets", Package, Variant, Value1, Value2)
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2, % see[1]
|
||||
build(Package).
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
@@ -510,12 +557,11 @@ variant_default_value(Package, Variant, Value) :- variant_default_value_from_cli
|
||||
|
||||
% Treat 'none' in a special way - it cannot be combined with other
|
||||
% values even if the variant is multi-valued
|
||||
:- 2 {
|
||||
variant_value(Package, Variant, Value) : variant_possible_value(Package, Variant, Value)
|
||||
},
|
||||
variant_value(Package, Variant, "none"),
|
||||
build(Package),
|
||||
error("Variant value 'none' cannot be combined with any other value").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_value(Package, Variant, "none"),
|
||||
Value != "none",
|
||||
build(Package).
|
||||
|
||||
% patches and dev_path are special variants -- they don't have to be
|
||||
% declared in the package, so we just allow them to spring into existence
|
||||
@@ -559,6 +605,18 @@ node_platform(Package, Platform)
|
||||
% platform is set if set to anything
|
||||
node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
|
||||
% each node must have a single platform
|
||||
error(2, "No valid platform found for {0}", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Platform : node_platform(Package, Platform)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple platforms\n Requested 'platform={1}' and 'platform={2}'", Package, Platform1, Platform2)
|
||||
:- node(Package),
|
||||
node_platform(Package, Platform1),
|
||||
node_platform(Package, Platform2),
|
||||
Platform1 < Platform2. % see[1]
|
||||
|
||||
#defined node_platform_set/2. % avoid warnings
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -568,20 +626,32 @@ node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
os(OS) :- os(OS, _).
|
||||
|
||||
% one os per node
|
||||
1 { node_os(Package, OS) : os(OS) } 1 :-
|
||||
node(Package), error("Each node must have exactly one OS").
|
||||
{ node_os(Package, OS) : os(OS) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid operating system for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ OS : node_os(Package, OS)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple operating systems\n Requested 'os={1}' and 'os={2}'", Package, OS1, OS2)
|
||||
:- node(Package),
|
||||
node_os(Package, OS1),
|
||||
node_os(Package, OS2),
|
||||
OS1 < OS2. %see [1]
|
||||
|
||||
% can't have a non-buildable OS on a node we need to build
|
||||
:- build(Package), node_os(Package, OS), not buildable_os(OS),
|
||||
error("No available OS can be built for").
|
||||
error(2, "Cannot concretize '{0} os={1}'. Operating system '{1}' is not buildable", Package, OS)
|
||||
:- build(Package),
|
||||
node_os(Package, OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package),
|
||||
error("Dependencies must have compatible OS's with their dependents").
|
||||
error(2, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageOS, DependencyOS)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package).
|
||||
|
||||
% give OS choice weights according to os declarations
|
||||
node_os_weight(Package, Weight)
|
||||
@@ -613,14 +683,24 @@ node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Each node has only one target chosen among the known targets
|
||||
1 { node_target(Package, Target) : target(Target) } 1 :- node(Package), error("Each node must have exactly one target").
|
||||
{ node_target(Package, Target) : target(Target) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid target for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{Target : node_target(Package, Target)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize '{0}' with multiple targets\n Requested 'target={1}' and 'target={2}'", Package, Target1, Target2)
|
||||
:- node(Package),
|
||||
node_target(Package, Target1),
|
||||
node_target(Package, Target2),
|
||||
Target1 < Target2. % see[1]
|
||||
|
||||
% If a node must satisfy a target constraint, enforce it
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target),
|
||||
error("Node targets must satisfy node target constraints").
|
||||
|
||||
error(1, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target and the target satisfies a constraint, then the target
|
||||
% associated with the node satisfies the same constraint
|
||||
@@ -628,10 +708,10 @@ node_target_satisfies(Package, Constraint)
|
||||
:- node_target(Package, Target), target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target, all of its dependencies must be compatible with that target
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target),
|
||||
error("Dependency node targets must be compatible with dependent targets").
|
||||
error(2, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target).
|
||||
|
||||
% Intermediate step for performance reasons
|
||||
% When the integrity constraint above was formulated including this logic
|
||||
@@ -672,12 +752,12 @@ target_weight(Target, Package, Weight)
|
||||
:- package_target_weight(Target, Package, Weight).
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying target").
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
node_target(Package, Target)
|
||||
@@ -704,8 +784,10 @@ node_target_mismatch(Parent, Dependency)
|
||||
not node_target_match(Parent, Dependency).
|
||||
|
||||
% disallow reusing concrete specs that don't have a compatible target
|
||||
:- node(Package), node_target(Package, Target), not target(Target),
|
||||
error("No satisfying package's target is compatible with this machine").
|
||||
error(2, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
:- node(Package),
|
||||
node_target(Package, Target),
|
||||
not target(Target).
|
||||
|
||||
#defined node_target_set/2.
|
||||
#defined package_target_weight/3.
|
||||
@@ -717,10 +799,19 @@ compiler(Compiler) :- compiler_version(Compiler, _).
|
||||
|
||||
% There must be only one compiler set per built node. The compiler
|
||||
% is chosen among available versions.
|
||||
1 { node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } 1 :-
|
||||
{ node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } :-
|
||||
node(Package),
|
||||
build(Package),
|
||||
error("Each node must have exactly one compiler").
|
||||
build(Package).
|
||||
|
||||
error(2, "No valid compiler version found for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Version : node_compiler_version(Package, _, Version)},
|
||||
C < 1.
|
||||
error(2, "'{0}' compiler constraints '%{1}@{2}' and '%{3}@{4}' are incompatible", Package, Compiler1, Version1, Compiler2, Version2)
|
||||
:- node(Package),
|
||||
node_compiler_version(Package, Compiler1, Version1),
|
||||
node_compiler_version(Package, Compiler2, Version2),
|
||||
(Compiler1, Version1) < (Compiler2, Version2). % see[1]
|
||||
|
||||
% Sometimes we just need to know the compiler and not the version
|
||||
node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
@@ -729,14 +820,22 @@ node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, _),
|
||||
Compiler1 != Compiler2,
|
||||
error("Internal error: mismatch between selected compiler and compiler version").
|
||||
internal_error("Mismatch between selected compiler and compiler version").
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, ":"),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, ":", Version) },
|
||||
C < 1.
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
1 { node_compiler_version(Package, Compiler, Version)
|
||||
: compiler_version_satisfies(Compiler, Constraint, Version) } 1 :-
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
error("Internal error: node compiler version mismatch").
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, Constraint, Version) },
|
||||
C < 1.
|
||||
|
||||
% If the node is associated with a compiler and the compiler satisfy a constraint, then
|
||||
% the compiler associated with the node satisfy the same constraint
|
||||
@@ -754,11 +853,12 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
:- node_compiler_version(Package, Compiler, Version), node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying os").
|
||||
error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- node_compiler_version(Package, Compiler, Version),
|
||||
node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
@@ -851,7 +951,7 @@ no_flags(Package, FlagType)
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ hash(Package, Hash) : installed_hash(Package, Hash) } 1
|
||||
:- node(Package), error("Internal error: package must resolve to at most one hash").
|
||||
:- node(Package), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- hash(Package, Hash), variant_value(Package, "dev_path", _).
|
||||
@@ -862,25 +962,72 @@ impose(Hash) :- hash(Package, Hash).
|
||||
% if we haven't selected a hash for a package, we'll be building it
|
||||
build(Package) :- not hash(Package, _), node(Package).
|
||||
|
||||
% Minimizing builds is tricky. We want a minimizing criterion
|
||||
|
||||
% because we want to reuse what is avaialble, but
|
||||
% we also want things that are built to stick to *default preferences* from
|
||||
% the package and from the user. We therefore treat built specs differently and apply
|
||||
% a different set of optimization criteria to them. Spack's *first* priority is to
|
||||
% reuse what it *can*, but if it builds something, the built specs will respect
|
||||
% defaults and preferences. This is implemented by bumping the priority of optimization
|
||||
% criteria for built specs -- so that they take precedence over the otherwise
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
% Minimizing builds is tricky. We want a minimizing criterion because we want to reuse
|
||||
% what is avaialble, but we also want things that are built to stick to *default
|
||||
% preferences* from the package and from the user. We therefore treat built specs
|
||||
% differently and apply a different set of optimization criteria to them. Spack's first
|
||||
% priority is to reuse what it can, but if it builds something, the built specs will
|
||||
% respect defaults and preferences.
|
||||
%
|
||||
% This is implemented by bumping the priority of optimization criteria for built specs
|
||||
% -- so that they take precedence over the otherwise topmost-priority criterion to reuse
|
||||
% what is installed.
|
||||
%
|
||||
% If the user explicitly asks for *minimal* installs, we don't differentiate between
|
||||
% built and reused specs - the top priority is just minimizing builds.
|
||||
%
|
||||
% The priority ranges are:
|
||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(Package, 200) :- build(Package), node(Package).
|
||||
build_priority(Package, 0) :- not build(Package), node(Package).
|
||||
build_priority(Package, 200) :- node(Package), build(Package), optimize_for_reuse(),
|
||||
not minimal_installs().
|
||||
build_priority(Package, 0) :- node(Package), not build(Package), optimize_for_reuse().
|
||||
|
||||
% Don't adjust build priorities if reusing, or if doing minimal installs
|
||||
% With minimal, minimizing builds is the TOP priority
|
||||
build_priority(Package, 0) :- node(Package), not optimize_for_reuse().
|
||||
build_priority(Package, 0) :- node(Package), minimal_installs().
|
||||
|
||||
% Minimize builds with both --reuse and with --minimal
|
||||
minimize_builds() :- optimize_for_reuse().
|
||||
minimize_builds() :- minimal_installs().
|
||||
|
||||
% don't assign versions from installed packages unless reuse is enabled
|
||||
% NOTE: that "installed" means the declared version was only included because
|
||||
% that package happens to be installed, NOT because it was asked for on the
|
||||
% command line. If the user specifies a hash, the origin will be "spec".
|
||||
%
|
||||
% TODO: There's a slight inconsistency with this: if the user concretizes
|
||||
% and installs `foo ^bar`, for some build dependency `bar`, and then later
|
||||
% does a `spack install --fresh foo ^bar/abcde` (i.e.,the hash of `bar`, it
|
||||
% currently *won't* force versions for `bar`'s build dependencies -- `--fresh`
|
||||
% will instead build the latest bar. When we actually include transitive
|
||||
% build deps in the solve, consider using them as a preference to resolve this.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
#defined minimal_installs/0.
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
% Some errors are handled as rules instead of constraints because
|
||||
% it allows us to explain why something failed. Here we optimize
|
||||
% HEAVILY against the facts generated by those rules.
|
||||
#minimize{ 0@1000: #true}.
|
||||
#minimize{ 0@1001: #true}.
|
||||
#minimize{ 0@1002: #true}.
|
||||
|
||||
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% How to optimize the spec (high to low priority)
|
||||
@@ -893,7 +1040,7 @@ build_priority(Package, 0) :- not build(Package), node(Package).
|
||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||
opt_criterion(100, "number of packages to build (vs. reuse)").
|
||||
#minimize { 0@100: #true }.
|
||||
#minimize { 1@100,Package : build(Package), optimize_for_reuse() }.
|
||||
#minimize { 1@100,Package : build(Package), minimize_builds() }.
|
||||
#defined optimize_for_reuse/0.
|
||||
|
||||
% Minimize the number of deprecated versions being used
|
||||
@@ -1061,3 +1208,11 @@ opt_criterion(1, "non-preferred targets").
|
||||
#heuristic variant_value(Package, Variant, Value) : variant_default_value(Package, Variant, Value), node(Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node(Package) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
%-----------
|
||||
|
||||
% [1] Clingo ensures a total ordering among all atoms. We rely on that total ordering
|
||||
% to reduce symmetry in the solution by checking `<` instead of `!=` in symmetric
|
||||
% cases. These choices are made without loss of generality.
|
||||
|
||||
@@ -34,3 +34,13 @@
|
||||
|
||||
% deprecated packages
|
||||
#show deprecated/2.
|
||||
|
||||
% error types
|
||||
#show error/2.
|
||||
#show error/3.
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
|
||||
% debug
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user