Compare commits
424 Commits
v0.17.2.2-
...
my_branch
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f70d2c7ccb | ||
|
|
62d18d9af7 | ||
|
|
8824040eda | ||
|
|
d744b83584 | ||
|
|
14601b59ef | ||
|
|
8d7360106f | ||
|
|
881f184eab | ||
|
|
63bec85a24 | ||
|
|
159975a561 | ||
|
|
25ba7d071a | ||
|
|
54b79d5661 | ||
|
|
235f93c241 | ||
|
|
a73c5ffb0b | ||
|
|
ce53ce284b | ||
|
|
ae68318475 | ||
|
|
bd63c19b94 | ||
|
|
3f3bcacd16 | ||
|
|
d349d4ab0b | ||
|
|
e61b3c96f6 | ||
|
|
bcbb0a3b85 | ||
|
|
3c556ab318 | ||
|
|
563ae5188e | ||
|
|
f4b3561f71 | ||
|
|
0748a1b290 | ||
|
|
0bfece0c5e | ||
|
|
6e9b16279a | ||
|
|
cf71baff30 | ||
|
|
b9831acb44 | ||
|
|
5f2edb1860 | ||
|
|
8b6809cf66 | ||
|
|
a93a4274aa | ||
|
|
59bb42ee7f | ||
|
|
11d382bff1 | ||
|
|
57889ec446 | ||
|
|
66bda49c44 | ||
|
|
94a2ab5359 | ||
|
|
6d0044f703 | ||
|
|
3fa447a84a | ||
|
|
d501ce0c7e | ||
|
|
5cb40cbcd2 | ||
|
|
c93e465134 | ||
|
|
521c206030 | ||
|
|
15eb98368d | ||
|
|
7c1d566959 | ||
|
|
7ab46e26b5 | ||
|
|
6db215dd89 | ||
|
|
72b38851eb | ||
|
|
9d9e970367 | ||
|
|
283a4e6068 | ||
|
|
d20cc7b124 | ||
|
|
0dd373846f | ||
|
|
c202953528 | ||
|
|
be0e3f4458 | ||
|
|
fd3bb5177b | ||
|
|
9de61c0197 | ||
|
|
84cfb3b7fe | ||
|
|
cb0d12b9d5 | ||
|
|
f6e7c0b740 | ||
|
|
512645ff2e | ||
|
|
32a2c22b2b | ||
|
|
e02020c80a | ||
|
|
d900ac2003 | ||
|
|
faa277778e | ||
|
|
b60d3dcd29 | ||
|
|
e0bed2d6a7 | ||
|
|
745c191d73 | ||
|
|
42e9430fbc | ||
|
|
f11572166f | ||
|
|
aa6665d5ee | ||
|
|
3e8f31a068 | ||
|
|
3625ea4726 | ||
|
|
245b95223d | ||
|
|
e00c8a7d98 | ||
|
|
97792f04e9 | ||
|
|
ca069f6906 | ||
|
|
35a91bdd72 | ||
|
|
c866a50446 | ||
|
|
3033abb5bd | ||
|
|
d37f439557 | ||
|
|
18710936f1 | ||
|
|
12b0278f08 | ||
|
|
34fd6e36ce | ||
|
|
363536fd92 | ||
|
|
d57d343b6d | ||
|
|
1b254d19c4 | ||
|
|
82b916be36 | ||
|
|
66d3648200 | ||
|
|
d2fc7b9f7d | ||
|
|
1067749371 | ||
|
|
8bd893367d | ||
|
|
2ed542b744 | ||
|
|
5cb7a5db45 | ||
|
|
f84991b5a8 | ||
|
|
928ecd1f4e | ||
|
|
72e594fb10 | ||
|
|
f9d701f9cf | ||
|
|
63f7053fe8 | ||
|
|
1c51d6313b | ||
|
|
c164e6fe03 | ||
|
|
2d823dcf90 | ||
|
|
a12c638224 | ||
|
|
0739691688 | ||
|
|
62ffc8c1dd | ||
|
|
61969566f8 | ||
|
|
49948bb3e7 | ||
|
|
c9c347f0f0 | ||
|
|
ee9b61be7a | ||
|
|
be45292a9c | ||
|
|
61f8c97bb7 | ||
|
|
b0403624cf | ||
|
|
4baed234be | ||
|
|
9de1edee80 | ||
|
|
9ce726eed5 | ||
|
|
c45ee381bd | ||
|
|
56d76766b7 | ||
|
|
6c309bbb32 | ||
|
|
cfed42ecfc | ||
|
|
01b79abcdf | ||
|
|
4d84c774d1 | ||
|
|
00e9780136 | ||
|
|
a65e00392c | ||
|
|
250fa6dada | ||
|
|
dd7822fdf7 | ||
|
|
555202833f | ||
|
|
b76fc61deb | ||
|
|
186abe525e | ||
|
|
31d8607b3c | ||
|
|
622841063c | ||
|
|
23b7071bb0 | ||
|
|
359229f5f8 | ||
|
|
2db545ffdc | ||
|
|
3986ac3828 | ||
|
|
c47c5d75e4 | ||
|
|
a0d4630448 | ||
|
|
7f1659786b | ||
|
|
b5da0d02bf | ||
|
|
8575afac4e | ||
|
|
7997dfcf80 | ||
|
|
19c8e02e32 | ||
|
|
320e6e06e6 | ||
|
|
d7b66dd286 | ||
|
|
70a8b91ec2 | ||
|
|
1883fedae7 | ||
|
|
c4412306da | ||
|
|
c49508648a | ||
|
|
9bcf496f21 | ||
|
|
060e88387e | ||
|
|
a24bae1986 | ||
|
|
d08520cb15 | ||
|
|
5397dcee51 | ||
|
|
d62e4b1d66 | ||
|
|
9ed1c76486 | ||
|
|
10efbc071f | ||
|
|
d517dcdc71 | ||
|
|
104d60887f | ||
|
|
1bde91735b | ||
|
|
335083d2dc | ||
|
|
0858c281e4 | ||
|
|
adc8a2ca00 | ||
|
|
dc6d45c8b4 | ||
|
|
fcaf9c8cdf | ||
|
|
1f74dc63dc | ||
|
|
ceaad43e54 | ||
|
|
4c8eb92314 | ||
|
|
89775e32c0 | ||
|
|
5ab526185a | ||
|
|
654a07d642 | ||
|
|
dfdbd1151d | ||
|
|
22f3ef0a21 | ||
|
|
165bcf5cc3 | ||
|
|
27462bc982 | ||
|
|
c2afb4b916 | ||
|
|
6c6685b5fa | ||
|
|
17c32811fb | ||
|
|
ad1391db75 | ||
|
|
d8e010a9f5 | ||
|
|
5a55e78073 | ||
|
|
c1007efe5a | ||
|
|
2f14695882 | ||
|
|
afc2d4284a | ||
|
|
999eee64b8 | ||
|
|
5d0f2bb461 | ||
|
|
ec295a13fd | ||
|
|
1b1770ea9e | ||
|
|
d3a0ac1c0a | ||
|
|
3137e7c61b | ||
|
|
202214d855 | ||
|
|
b6e1cbd86d | ||
|
|
e6d1c2d9f3 | ||
|
|
1a368419da | ||
|
|
6898b7c2f6 | ||
|
|
2836648904 | ||
|
|
0dd9e5c86f | ||
|
|
011a491b16 | ||
|
|
c9714533f3 | ||
|
|
faeffdfaf2 | ||
|
|
a15a69a769 | ||
|
|
9a33121859 | ||
|
|
e88396e5ed | ||
|
|
dcd2f8a4ed | ||
|
|
381ec8abac | ||
|
|
fd6d226524 | ||
|
|
cef9245ee1 | ||
|
|
24fecdc738 | ||
|
|
4a228055e2 | ||
|
|
17ede26cea | ||
|
|
8bcccbeac7 | ||
|
|
e6346eb033 | ||
|
|
a5d06325e7 | ||
|
|
9bb23a7f46 | ||
|
|
5a434cb840 | ||
|
|
0be5dea13f | ||
|
|
b6f2a70f7b | ||
|
|
0d3d1ea7d0 | ||
|
|
39c4a66e5b | ||
|
|
0bd0ba53a3 | ||
|
|
e24373f262 | ||
|
|
b7f33fb393 | ||
|
|
250d5d2c00 | ||
|
|
8b0f6187e0 | ||
|
|
8bf988abb9 | ||
|
|
aab7dcaad9 | ||
|
|
5b68fa1ecb | ||
|
|
4d03a2768e | ||
|
|
6b6147d5a0 | ||
|
|
6702e87ee4 | ||
|
|
7ad5ca2cc3 | ||
|
|
2418cfb79b | ||
|
|
9486c76d70 | ||
|
|
dc99fe98b9 | ||
|
|
cb97b25646 | ||
|
|
a84593a510 | ||
|
|
2f6556ea82 | ||
|
|
b4213b2c60 | ||
|
|
e378d96d15 | ||
|
|
715686f0ec | ||
|
|
d0fdaf6d03 | ||
|
|
b8ebaa0813 | ||
|
|
5c7d6c6e10 | ||
|
|
96f9a1d88b | ||
|
|
f5eb9fb501 | ||
|
|
dd8f533e97 | ||
|
|
bab41de538 | ||
|
|
17c1808ef7 | ||
|
|
157ee3458f | ||
|
|
3ce8bff22e | ||
|
|
281165693a | ||
|
|
f8653dfb9b | ||
|
|
a6589daa87 | ||
|
|
43a503c195 | ||
|
|
1eff83990b | ||
|
|
17e71a675a | ||
|
|
872aa32a00 | ||
|
|
4c19410669 | ||
|
|
a5e92893d3 | ||
|
|
bdef031d4e | ||
|
|
a10f5656ab | ||
|
|
653ed78645 | ||
|
|
bc7fc8f456 | ||
|
|
55bbbe8657 | ||
|
|
a6dcce4cf2 | ||
|
|
e301de98cb | ||
|
|
6ecee4e6d5 | ||
|
|
2d89bc350c | ||
|
|
14e5497758 | ||
|
|
4576fbe648 | ||
|
|
84611b5f29 | ||
|
|
cbf0c3a8c4 | ||
|
|
d6c1619b67 | ||
|
|
d23d611f35 | ||
|
|
8ed8922af5 | ||
|
|
01c17562f5 | ||
|
|
c97d931ea7 | ||
|
|
21bf0cf43c | ||
|
|
e2c72e583f | ||
|
|
4219b89faa | ||
|
|
253b208537 | ||
|
|
4509e96704 | ||
|
|
f0bb7c74a9 | ||
|
|
c93fd4c600 | ||
|
|
76d9df2cf1 | ||
|
|
d2f67ff7b9 | ||
|
|
b5b62b0c82 | ||
|
|
e691d6df64 | ||
|
|
25206c86c4 | ||
|
|
144d7cd932 | ||
|
|
e7eceaf4e6 | ||
|
|
f943cc0149 | ||
|
|
879949b78e | ||
|
|
38e5b96431 | ||
|
|
9a028e3b15 | ||
|
|
f2c1b40e58 | ||
|
|
06b5217c01 | ||
|
|
52bf7f4157 | ||
|
|
31eb759892 | ||
|
|
b5f2c20f74 | ||
|
|
cf48588c45 | ||
|
|
e59cde9b7f | ||
|
|
7e54bddc0c | ||
|
|
7f1411d131 | ||
|
|
f0afceeb9c | ||
|
|
7ee15553e4 | ||
|
|
d062d2e92b | ||
|
|
7c631d1c55 | ||
|
|
6df71118fb | ||
|
|
ed7812b8be | ||
|
|
ff03ac3e06 | ||
|
|
b6ae2436be | ||
|
|
ec266a86b6 | ||
|
|
ecbac17217 | ||
|
|
43a84f58e9 | ||
|
|
9ed37742e9 | ||
|
|
dbe2c44a25 | ||
|
|
f40780310b | ||
|
|
c06f69d0bf | ||
|
|
8a6b73bb2c | ||
|
|
9e6298569e | ||
|
|
ad0430f463 | ||
|
|
7d26d56e59 | ||
|
|
06988c38fd | ||
|
|
f6d2b07368 | ||
|
|
bb43308c44 | ||
|
|
6a9df34abd | ||
|
|
1006dd54de | ||
|
|
cf905ec14a | ||
|
|
19bb4bdeb8 | ||
|
|
47f9e71302 | ||
|
|
2e9da47a2d | ||
|
|
3e863848f8 | ||
|
|
e7a0b952ab | ||
|
|
8b85b33ba5 | ||
|
|
90dafdd9f0 | ||
|
|
d989478154 | ||
|
|
ebd930ace9 | ||
|
|
53e0e7aabe | ||
|
|
27cbc4ebc8 | ||
|
|
2515cafb9c | ||
|
|
5593611b5e | ||
|
|
f544b051c4 | ||
|
|
c534e70950 | ||
|
|
9ffb3b4ac0 | ||
|
|
893c5271ac | ||
|
|
f7a9456553 | ||
|
|
8250235207 | ||
|
|
eb51591b02 | ||
|
|
06e7249850 | ||
|
|
c4ad003af2 | ||
|
|
1243717012 | ||
|
|
da3d315cba | ||
|
|
48ff4c7679 | ||
|
|
9fc6494a28 | ||
|
|
9d4cedac51 | ||
|
|
f66139dfe4 | ||
|
|
79e0a3dad0 | ||
|
|
3dbbf3a101 | ||
|
|
f273e7d329 | ||
|
|
351072cd9f | ||
|
|
0b23f1be05 | ||
|
|
b8015a71e9 | ||
|
|
739f040fa8 | ||
|
|
d5fc859f46 | ||
|
|
753f4a4bc3 | ||
|
|
8c280d98ca | ||
|
|
d5e1fa5771 | ||
|
|
f30b79b2c5 | ||
|
|
54146d44f9 | ||
|
|
0c31ab87c9 | ||
|
|
e49cccb0d9 | ||
|
|
7a1841c464 | ||
|
|
40ebeb2dc8 | ||
|
|
e5444b9a77 | ||
|
|
89cc16a9cd | ||
|
|
a24070d532 | ||
|
|
f473fd8084 | ||
|
|
5055f5e3e2 | ||
|
|
0b26103c07 | ||
|
|
f99614be02 | ||
|
|
c02e83092e | ||
|
|
85e99fa154 | ||
|
|
83b91246b1 | ||
|
|
0f4b228eff | ||
|
|
2abbad1ca5 | ||
|
|
43d8fe212f | ||
|
|
84befcdfe2 | ||
|
|
def8fce250 | ||
|
|
3a0aba0835 | ||
|
|
254cd624fe | ||
|
|
802a48fb43 | ||
|
|
c81affa551 | ||
|
|
3e51304b68 | ||
|
|
62abbeaf6f | ||
|
|
80e24f3f69 | ||
|
|
24f5069584 | ||
|
|
5da78991d4 | ||
|
|
cfb5d5f988 | ||
|
|
a10d262f5f | ||
|
|
16e926d374 | ||
|
|
b9d6a5103d | ||
|
|
834f8e04ca | ||
|
|
3ade5516a2 | ||
|
|
b41de6d86b | ||
|
|
c72fba7e4c | ||
|
|
96113a5dc6 | ||
|
|
db0335fa54 | ||
|
|
268c671dc8 | ||
|
|
3dd4999fd7 | ||
|
|
c1ed51e767 | ||
|
|
2a8a0aa156 | ||
|
|
5073613c6e | ||
|
|
4a76ca1f5e | ||
|
|
878e6b6712 | ||
|
|
aeba9daea6 | ||
|
|
25d115ba1a | ||
|
|
f37e07a882 | ||
|
|
4c0cc5a295 | ||
|
|
57968e7ad4 | ||
|
|
02a7fc69ed | ||
|
|
d729b4e72b | ||
|
|
35a4c2325e | ||
|
|
6253445b13 | ||
|
|
dea5fe87f7 | ||
|
|
6162ea95b0 | ||
|
|
44b409d696 | ||
|
|
a9fbc0175d |
123
.github/workflows/bootstrap.yml
vendored
123
.github/workflows/bootstrap.yml
vendored
@@ -31,14 +31,20 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -61,22 +67,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -98,22 +102,20 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -121,7 +123,6 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
@@ -134,9 +135,12 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
@@ -154,7 +158,8 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -173,8 +178,9 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -190,11 +196,12 @@ jobs:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
@@ -218,22 +225,20 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -255,22 +260,20 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Work around CVE-2022-24765
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# Apparently Ubuntu patched git v2.25.1 with a security patch that introduces
|
||||
# a breaking behavior. See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd -m spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
@@ -288,7 +291,8 @@ jobs:
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -304,7 +308,8 @@ jobs:
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -312,3 +317,11 @@ jobs:
|
||||
spack bootstrap untrust github-actions-v0.2
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
||||
12
.github/workflows/build-containers.yml
vendored
12
.github/workflows/build-containers.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -81,13 +81,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # @v1
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -95,13 +95,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a # @v2
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
6
.github/workflows/macos_python.yml
vendored
6
.github/workflows/macos_python.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
name: gcc with clang
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
|
||||
28
.github/workflows/unit_tests.yaml
vendored
28
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
@@ -171,7 +171,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -200,7 +200,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
@@ -218,7 +218,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -274,7 +274,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
@@ -320,7 +320,7 @@ jobs:
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
@@ -331,7 +331,7 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
@@ -350,7 +350,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@e3c560433a6cc60aec8812599b7844a7b4fa0d71 # @v2.1.0
|
||||
- uses: codecov/codecov-action@81cd2dc8148241f03f5839d295e000b8f761e378 # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
12
.github/workflows/windows_python.yml
vendored
12
.github/workflows/windows_python.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
with:
|
||||
python-version: 3.9
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
style:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
needs: [ validate, style ]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
@@ -120,7 +120,7 @@ jobs:
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
|
||||
@@ -14,4 +14,17 @@ concretizer:
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
# considered.
|
||||
targets:
|
||||
# Determine whether we want to target specific or generic microarchitectures.
|
||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
||||
granularity: microarchitectures
|
||||
# If "false" allow targets that are incompatible with the current host (for
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
|
||||
@@ -35,7 +35,8 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libllvm: [llvm, llvm-amdgpu]
|
||||
lua-lang: [lua, lua-luajit]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
|
||||
@@ -192,32 +192,32 @@ you can use them to customize an installation in :ref:`sec-specs`.
|
||||
Reusing installed dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. warning::
|
||||
By default, when you run ``spack install``, Spack tries hard to reuse existing installations
|
||||
as dependencies, either from a local store or from remote buildcaches if configured.
|
||||
This minimizes unwanted rebuilds of common dependencies, in particular if
|
||||
you update Spack frequently.
|
||||
|
||||
The ``--reuse`` option described here will become the default installation
|
||||
method in the next Spack version, and you will be able to get the current
|
||||
behavior by using ``spack install --fresh``.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
its dependencies. This gets you the latest versions and configurations,
|
||||
but it can result in unwanted rebuilds if you update Spack frequently.
|
||||
|
||||
If you want Spack to try hard to reuse existing installations as dependencies,
|
||||
you can add the ``--reuse`` option:
|
||||
In case you want the latest versions and configurations to be installed instead,
|
||||
you can add the ``--fresh`` option:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --reuse mpich
|
||||
$ spack install --fresh mpich
|
||||
|
||||
This will not do anything if ``mpich`` is already installed. If ``mpich``
|
||||
is not installed, but dependencies like ``hwloc`` and ``libfabric`` are,
|
||||
the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
Reusing installations in this mode is "accidental", and happening only if
|
||||
there's a match between existing installations and what Spack would have installed
|
||||
anyhow.
|
||||
|
||||
You can use the ``spack spec -I mpich`` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
||||
``concretizer.yaml``.
|
||||
You can configure Spack to use the ``--fresh`` behavior by default in
|
||||
``concretizer.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
|
||||
@@ -219,33 +219,65 @@ Concretizer options
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. _code-block: yaml
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
``reuse``
|
||||
^^^^^^^^^^^^^^^^
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
||||
.. code-block:: yaml
|
||||
|
||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
||||
enabled by default.
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
.. note::
|
||||
|
||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
||||
to get the old behavior.
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
:maxdepth: 1
|
||||
:caption: Language-specific
|
||||
|
||||
build_systems/luapackage
|
||||
build_systems/octavepackage
|
||||
build_systems/perlpackage
|
||||
build_systems/pythonpackage
|
||||
|
||||
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
105
lib/spack/docs/build_systems/luapackage.rst
Normal file
@@ -0,0 +1,105 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _luapackage:
|
||||
|
||||
------------
|
||||
LuaPackage
|
||||
------------
|
||||
|
||||
LuaPackage is a helper for the common case of Lua packages that provide
|
||||
a rockspec file. This is not meant to take a rock archive, but to build
|
||||
a source archive or repository that provides a rockspec, which should cover
|
||||
most lua packages. In the case a Lua package builds by Make rather than
|
||||
luarocks, prefer MakefilePackage.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``LuaPackage`` base class comes with the following phases:
|
||||
|
||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||
#. ``install`` - install the project
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# If the archive is a source rock
|
||||
$ luarocks unpack <archive>.src.rock
|
||||
$ # preprocess is a noop by default
|
||||
$ luarocks make <name>.rockspec
|
||||
|
||||
|
||||
Any of these phases can be overridden in your package as necessary.
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Packages that use the Lua/LuaRocks build system can be identified by the
|
||||
presence of a ``*.rockspec`` file in their sourcetree, or can be fetched as
|
||||
a source rock archive (``.src.rock``). This file declares things like build
|
||||
instructions and dependencies, the ``.src.rock`` also contains all code.
|
||||
|
||||
It is common for the rockspec file to list the lua version required in
|
||||
a dependency. The LuaPackage class adds appropriate dependencies on a Lua
|
||||
implementation, but it is a good idea to specify the version required with
|
||||
a ``depends_on`` statement. The block normally will be a table definition like
|
||||
this:
|
||||
|
||||
.. code-block:: lua
|
||||
|
||||
dependencies = {
|
||||
"lua >= 5.1",
|
||||
}
|
||||
|
||||
The LuaPackage class supports source repositories and archives containing
|
||||
a rockspec and directly downloading source rock files. It *does not* support
|
||||
downloading dependencies listed inside a rockspec, and thus does not support
|
||||
directly downloading a rockspec as an archive.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
All base dependencies are added by the build system, but LuaRocks is run to
|
||||
avoid downloading extra Lua dependencies during build. If the package needs
|
||||
Lua libraries outside the standard set, they should be added as dependencies.
|
||||
|
||||
To specify a Lua version constraint but allow all lua implementations, prefer
|
||||
to use ``depends_on("lua-lang@5.1:5.1.99")`` to express any 5.1 compatible
|
||||
version. If the package requires LuaJit rather than Lua,
|
||||
a ``depends_on("luajit")`` should be used to ensure a LuaJit distribution is
|
||||
used instead of the Lua interpreter. Alternately, if only interpreted Lua will
|
||||
work ``depends_on("lua")`` will express that.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to luarocks make
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you need to pass any arguments to the ``luarocks make`` call, you can
|
||||
override the ``luarocks_args`` method like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["CFLAGS='-Wno-error=implicit-function-declaration'"]
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on the LuaRocks build system, see:
|
||||
https://luarocks.org/
|
||||
@@ -95,7 +95,7 @@ class of your package. For example, you can add it to your
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix])
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
|
||||
@@ -23,7 +23,10 @@
|
||||
import sys
|
||||
from glob import glob
|
||||
|
||||
from docutils.statemachine import StringList
|
||||
from sphinx.domains.python import PythonDomain
|
||||
from sphinx.ext.apidoc import main as sphinx_apidoc
|
||||
from sphinx.parsers import RSTParser
|
||||
|
||||
# -- Spack customizations -----------------------------------------------------
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
@@ -82,9 +85,6 @@
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
from sphinx.domains.python import PythonDomain
|
||||
|
||||
|
||||
class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if 'refspecific' in node:
|
||||
@@ -92,8 +92,20 @@ def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode)
|
||||
|
||||
#
|
||||
# Disable tabs to space expansion in code blocks
|
||||
# since Makefiles require tabs.
|
||||
#
|
||||
class NoTabExpansionRSTParser(RSTParser):
|
||||
def parse(self, inputstring, document):
|
||||
if isinstance(inputstring, str):
|
||||
lines = inputstring.splitlines()
|
||||
inputstring = StringList(lines, document.current_source)
|
||||
super().parse(inputstring, document)
|
||||
|
||||
def setup(sphinx):
|
||||
sphinx.add_domain(PatchedPythonDomain, override=True)
|
||||
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
|
||||
@@ -349,6 +349,24 @@ If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
Every ``spack install`` process builds one package at a time with multiple build
|
||||
jobs, controlled by the ``-j`` flag and the ``config:build_jobs`` option
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install & spack install & spack install & spack install
|
||||
|
||||
Another option is to generate a ``Makefile`` and run ``make -j<N>`` to control
|
||||
the number of parallel install processes. See :ref:`env-generate-depfile`
|
||||
for details.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
@@ -910,3 +928,93 @@ environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
|
||||
|
||||
------------------------------------------
|
||||
Generating Depfiles from Environments
|
||||
------------------------------------------
|
||||
|
||||
Spack can generate ``Makefile``\s to make it easier to build multiple
|
||||
packages in an environment in parallel. Generated ``Makefile``\s expose
|
||||
targets that can be included in existing ``Makefile``\s, to allow
|
||||
other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
spack -e . concretize
|
||||
spack -e . env depfile > Makefile
|
||||
make -j64
|
||||
|
||||
This generates a ``Makefile`` from a concretized environment in the
|
||||
current working directory, and ``make -j64`` installs the environment,
|
||||
exploiting parallelism across packages as much as possible. Spack
|
||||
respects the Make jobserver and forwards it to the build environment
|
||||
of packages, meaning that a single ``-j`` flag is enough to control the
|
||||
load, even when packages are built in parallel.
|
||||
|
||||
By default the following phony convenience targets are available:
|
||||
|
||||
- ``make all``: installs the environment (default target);
|
||||
- ``make fetch-all``: only fetch sources of all packages;
|
||||
- ``make clean``: cleans files used by make, but does not uninstall packages.
|
||||
|
||||
.. tip::
|
||||
|
||||
GNU Make version 4.3 and above have great support for output synchronization
|
||||
through the ``-O`` and ``--output-sync`` flags, which ensure that output is
|
||||
printed orderly per package install. To get synchronized output with colors,
|
||||
use ``make -j<N> SPACK_COLOR=always --output-sync=recurse``.
|
||||
|
||||
The following advanced example shows how generated targets can be used in a
|
||||
``Makefile``:
|
||||
|
||||
.. code:: Makefile
|
||||
|
||||
SPACK ?= spack
|
||||
|
||||
.PHONY: all clean fetch env
|
||||
|
||||
all: env
|
||||
|
||||
spack.lock: spack.yaml
|
||||
$(SPACK) -e . concretize -f
|
||||
|
||||
env.mk: spack.lock
|
||||
$(SPACK) -e . env depfile -o $@ --make-target-prefix spack
|
||||
|
||||
fetch: spack/fetch
|
||||
$(info Environment fetched!)
|
||||
|
||||
env: spack/env
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
|
||||
ifeq (,$(filter clean,$(MAKECMDGOALS)))
|
||||
include env.mk
|
||||
endif
|
||||
|
||||
When ``make`` is invoked, it first "remakes" the missing include ``env.mk``
|
||||
from its rule, which triggers concretization. When done, the generated targets
|
||||
``spack/fetch`` and ``spack/env`` are available. In the above
|
||||
example, the ``env`` target uses the latter as a prerequisite, meaning
|
||||
that it can make use of the installed packages in its commands.
|
||||
|
||||
As it is typically undesirable to remake ``env.mk`` as part of ``make clean``,
|
||||
the include is conditional.
|
||||
|
||||
.. note::
|
||||
|
||||
When including generated ``Makefile``\s, it is important to use
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
@@ -64,6 +64,7 @@
|
||||
'is_exe',
|
||||
'join_path',
|
||||
'last_modification_time_recursive',
|
||||
'library_extensions',
|
||||
'mkdirp',
|
||||
'partition_path',
|
||||
'prefixes',
|
||||
@@ -109,12 +110,15 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
|
||||
|
||||
def possible_library_filenames(library_names):
|
||||
"""Given a collection of library names like 'libfoo', generate the set of
|
||||
library filenames that may be found on the system (e.g. libfoo.so). This
|
||||
generates the library filenames that may appear on any OS.
|
||||
library filenames that may be found on the system (e.g. libfoo.so).
|
||||
"""
|
||||
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
|
||||
lib_extensions = library_extensions
|
||||
return set(
|
||||
'.'.join((lib, extension)) for lib, extension in
|
||||
itertools.product(library_names, lib_extensions))
|
||||
@@ -764,39 +768,36 @@ def __init__(self, inner_exception, outer_exception):
|
||||
|
||||
@contextmanager
|
||||
@system_path_filter
|
||||
def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
"""Moves a directory to a temporary space. If the operations executed
|
||||
within the context manager don't raise an exception, the directory is
|
||||
deleted. If there is an exception, the move is undone.
|
||||
def replace_directory_transaction(directory_name):
|
||||
"""Temporarily renames a directory in the same parent dir. If the operations
|
||||
executed within the context manager don't raise an exception, the renamed directory
|
||||
is deleted. If there is an exception, the move is undone.
|
||||
|
||||
Args:
|
||||
directory_name (path): absolute path of the directory name
|
||||
tmp_root (path): absolute path of the parent directory where to create
|
||||
the temporary
|
||||
|
||||
Returns:
|
||||
temporary directory where ``directory_name`` has been moved
|
||||
"""
|
||||
# Check the input is indeed a directory with absolute path.
|
||||
# Raise before anything is done to avoid moving the wrong directory
|
||||
assert os.path.isdir(directory_name), \
|
||||
'Invalid directory: ' + directory_name
|
||||
assert os.path.isabs(directory_name), \
|
||||
'"directory_name" must contain an absolute path: ' + directory_name
|
||||
directory_name = os.path.abspath(directory_name)
|
||||
assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
|
||||
|
||||
directory_basename = os.path.basename(directory_name)
|
||||
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
|
||||
# so dirname is the directory's parent not the directory itself.
|
||||
tmpdir = tempfile.mkdtemp(
|
||||
dir=os.path.dirname(directory_name),
|
||||
prefix='.backup')
|
||||
|
||||
if tmp_root is not None:
|
||||
assert os.path.isabs(tmp_root)
|
||||
|
||||
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
|
||||
tty.debug('Temporary directory created [{0}]'.format(tmp_dir))
|
||||
|
||||
shutil.move(src=directory_name, dst=tmp_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, tmp_dir))
|
||||
# We have to jump through hoops to support Windows, since
|
||||
# os.rename(directory_name, tmpdir) errors there.
|
||||
backup_dir = os.path.join(tmpdir, 'backup')
|
||||
os.rename(directory_name, backup_dir)
|
||||
tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
|
||||
|
||||
try:
|
||||
yield tmp_dir
|
||||
yield backup_dir
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as inner_exception:
|
||||
# Try to recover the original directory, if this fails, raise a
|
||||
# composite exception.
|
||||
@@ -804,10 +805,7 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
# Delete what was there, before copying back the original content
|
||||
if os.path.exists(directory_name):
|
||||
shutil.rmtree(directory_name)
|
||||
shutil.move(
|
||||
src=os.path.join(tmp_dir, directory_basename),
|
||||
dst=os.path.dirname(directory_name)
|
||||
)
|
||||
os.rename(backup_dir, directory_name)
|
||||
except Exception as outer_exception:
|
||||
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
|
||||
|
||||
@@ -815,8 +813,8 @@ def replace_directory_transaction(directory_name, tmp_root=None):
|
||||
raise
|
||||
else:
|
||||
# Otherwise delete the temporary directory
|
||||
shutil.rmtree(tmp_dir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmp_dir))
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1097,7 +1095,32 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
for f in dir_entries:
|
||||
if sys.version_info >= (3, 5, 0):
|
||||
rel_child = os.path.join(rel_path, f.name)
|
||||
islink, isdir = f.is_symlink(), f.is_dir()
|
||||
islink = f.is_symlink()
|
||||
# On Windows, symlinks to directories are distinct from
|
||||
# symlinks to files, and it is possible to create a
|
||||
# broken symlink to a directory (e.g. using os.symlink
|
||||
# without `target_is_directory=True`), invoking `isdir`
|
||||
# on a symlink on Windows that is broken in this manner
|
||||
# will result in an error. In this case we can work around
|
||||
# the issue by reading the target and resolving the
|
||||
# directory ourselves
|
||||
try:
|
||||
isdir = f.is_dir()
|
||||
except OSError as e:
|
||||
if is_windows and hasattr(e, 'winerror')\
|
||||
and e.winerror == 5 and islink:
|
||||
# if path is a symlink, determine destination and
|
||||
# evaluate file vs directory
|
||||
link_target = resolve_link_target_relative_to_the_link(f)
|
||||
# link_target might be relative but
|
||||
# resolve_link_target_relative_to_the_link
|
||||
# will ensure that if so, that it is relative
|
||||
# to the CWD and therefore
|
||||
# makes sense
|
||||
isdir = os.path.isdir(link_target)
|
||||
else:
|
||||
raise e
|
||||
|
||||
else:
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
||||
@@ -1105,7 +1128,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
|
||||
continue
|
||||
|
||||
if not isdir:
|
||||
# Handle files
|
||||
# handle files
|
||||
visitor.visit_file(root, rel_child, depth)
|
||||
elif not islink and visitor.before_visit_dir(root, rel_child, depth):
|
||||
# Handle ordinary directories
|
||||
@@ -1180,6 +1203,35 @@ def remove_if_dead_link(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
def readonly_file_handler(ignore_errors=False):
|
||||
# TODO: generate stages etc. with write permissions wherever
|
||||
# so this callback is no-longer required
|
||||
"""
|
||||
Generate callback for shutil.rmtree to handle permissions errors on
|
||||
Windows. Some files may unexpectedly lack write permissions even
|
||||
though they were generated by Spack on behalf of the user (e.g. the
|
||||
stage), so this callback will detect such cases and modify the
|
||||
permissions if that is the issue. For other errors, the fallback
|
||||
is either to raise (if ignore_errors is False) or ignore (if
|
||||
ignore_errors is True). This is only intended for Windows systems
|
||||
and will raise a separate error if it is ever invoked (by accident)
|
||||
on a non-Windows system.
|
||||
"""
|
||||
def error_remove_readonly(func, path, exc):
|
||||
if not is_windows:
|
||||
raise RuntimeError("This method should only be invoked on Windows")
|
||||
excvalue = exc[1]
|
||||
if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
|
||||
excvalue.errno == errno.EACCES:
|
||||
# change the file to be readable,writable,executable: 0777
|
||||
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||
# retry
|
||||
func(path)
|
||||
elif not ignore_errors:
|
||||
raise
|
||||
return error_remove_readonly
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def remove_linked_tree(path):
|
||||
"""Removes a directory and its contents.
|
||||
@@ -1187,23 +1239,18 @@ def remove_linked_tree(path):
|
||||
If the directory is a symlink, follows the link and removes the real
|
||||
directory before removing the link.
|
||||
|
||||
This method will force-delete files on Windows
|
||||
|
||||
Parameters:
|
||||
path (str): Directory to be removed
|
||||
"""
|
||||
# On windows, cleaning a Git stage can be an issue
|
||||
# as git leaves readonly files that Python handles
|
||||
# poorly on Windows. Remove readonly status and try again
|
||||
def onerror(func, path, exe_info):
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
try:
|
||||
func(path)
|
||||
except Exception as e:
|
||||
tty.warn(e)
|
||||
pass
|
||||
|
||||
kwargs = {'ignore_errors': True}
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly.
|
||||
if is_windows:
|
||||
kwargs = {'onerror': onerror}
|
||||
kwargs['ignore_errors'] = False
|
||||
kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
|
||||
@@ -809,19 +809,23 @@ def __enter__(self):
|
||||
def background_reader(reader, echo_writer, _kill):
|
||||
# for each line printed to logfile, read it
|
||||
# if echo: write line to user
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
line = reader.readline()
|
||||
while line:
|
||||
if self.echo:
|
||||
self.echo_writer.write('{0}'.format(line.decode()))
|
||||
self.echo_writer.flush()
|
||||
line = reader.readline()
|
||||
try:
|
||||
while True:
|
||||
is_killed = _kill.wait(.1)
|
||||
# Flush buffered build output to file
|
||||
# stdout/err fds refer to log file
|
||||
self.stderr.flush()
|
||||
self.stdout.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
line = reader.readline()
|
||||
if self.echo and line:
|
||||
echo_writer.write('{0}'.format(line.decode()))
|
||||
echo_writer.flush()
|
||||
|
||||
if is_killed:
|
||||
break
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
self._active = True
|
||||
with replace_environment(self.env):
|
||||
@@ -837,7 +841,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self._ioflag = False
|
||||
else:
|
||||
self.writer.close()
|
||||
self.reader.close()
|
||||
self.echo_writer.flush()
|
||||
self.stdout.flush()
|
||||
self.stderr.flush()
|
||||
@@ -853,10 +856,7 @@ def force_echo(self):
|
||||
if not self._active:
|
||||
raise RuntimeError(
|
||||
"Can't call force_echo() outside log_output region!")
|
||||
try:
|
||||
yield self
|
||||
finally:
|
||||
pass
|
||||
yield
|
||||
|
||||
|
||||
def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
|
||||
|
||||
@@ -276,6 +276,24 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
test_callbacks = pkg.build_time_test_callbacks
|
||||
|
||||
if test_callbacks and 'test' in test_callbacks:
|
||||
msg = ('{0} package contains "test" method in '
|
||||
'build_time_test_callbacks')
|
||||
instr = ('Remove "test" from: [{0}]'
|
||||
.format(', '.join(test_callbacks)))
|
||||
errors.append(error_cls(msg.format(pkg.name), [instr]))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -182,7 +181,6 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
dag_hash = indexed_spec.dag_hash()
|
||||
full_hash = indexed_spec._full_hash
|
||||
|
||||
if dag_hash not in self._mirrors_for_spec:
|
||||
self._mirrors_for_spec[dag_hash] = []
|
||||
@@ -190,11 +188,8 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
for entry in self._mirrors_for_spec[dag_hash]:
|
||||
# A binary mirror can only have one spec per DAG hash, so
|
||||
# if we already have an entry under this DAG hash for this
|
||||
# mirror url, we may need to replace the spec associated
|
||||
# with it (but only if it has a different full_hash).
|
||||
# mirror url, we're done.
|
||||
if entry['mirror_url'] == mirror_url:
|
||||
if full_hash and full_hash != entry['spec']._full_hash:
|
||||
entry['spec'] = indexed_spec
|
||||
break
|
||||
else:
|
||||
self._mirrors_for_spec[dag_hash].append({
|
||||
@@ -403,6 +398,11 @@ def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
|
||||
hash_fetch_url = url_util.join(
|
||||
mirror_url, _build_cache_relative_path, 'index.json.hash')
|
||||
|
||||
if not web_util.url_exists(index_fetch_url):
|
||||
# A binary mirror is not required to have an index, so avoid
|
||||
# raising FetchCacheError in that case.
|
||||
return False
|
||||
|
||||
old_cache_key = None
|
||||
fetched_hash = None
|
||||
|
||||
@@ -762,6 +762,62 @@ def sign_tarball(key, force, specfile_path):
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
s = None
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
return s
|
||||
|
||||
|
||||
def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
|
||||
for file_path in file_list:
|
||||
try:
|
||||
s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path))
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
if s:
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute its hash
|
||||
with open(index_json_path) as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(db_root_dir, 'index.json.hash')
|
||||
with open(index_hash_path, 'w') as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, 'index.json.hash'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/plain'})
|
||||
|
||||
|
||||
def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
@@ -790,35 +846,6 @@ def generate_package_index(cache_prefix):
|
||||
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
|
||||
all_mirror_specs = {}
|
||||
|
||||
for file_path in file_list:
|
||||
try:
|
||||
spec_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
spec_dict = sjson.load(spec_file_contents)
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
spec_dict = syaml.load(spec_file_contents)
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
all_mirror_specs[s.dag_hash()] = {
|
||||
'spec_url': spec_url,
|
||||
'spec': s,
|
||||
'num_deps': len(list(s.traverse(root=False))),
|
||||
'binary_cache_checksum': spec_dict['binary_cache_checksum'],
|
||||
'buildinfo': spec_dict['buildinfo'],
|
||||
}
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading specfile: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
sorted_specs = sorted(all_mirror_specs.keys(),
|
||||
key=lambda k: all_mirror_specs[k]['num_deps'])
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
@@ -826,85 +853,7 @@ def generate_package_index(cache_prefix):
|
||||
record_fields=['spec', 'ref_count', 'in_buildcache'])
|
||||
|
||||
try:
|
||||
tty.debug('Specs sorted by number of dependencies:')
|
||||
for dag_hash in sorted_specs:
|
||||
spec_record = all_mirror_specs[dag_hash]
|
||||
s = spec_record['spec']
|
||||
num_deps = spec_record['num_deps']
|
||||
tty.debug(' {0}/{1} -> {2}'.format(
|
||||
s.name, dag_hash[:7], num_deps))
|
||||
if num_deps > 0:
|
||||
# Check each of this spec's dependencies (which we have already
|
||||
# processed), as they are the source of truth for their own
|
||||
# full hash. If the full hash we have for any deps does not
|
||||
# match what those deps have themselves, then we need to splice
|
||||
# this spec with those deps, and push this spliced spec
|
||||
# (spec.json file) back to the mirror, as well as update the
|
||||
# all_mirror_specs dictionary with this spliced spec.
|
||||
to_splice = []
|
||||
for dep in s.dependencies():
|
||||
dep_dag_hash = dep.dag_hash()
|
||||
if dep_dag_hash in all_mirror_specs:
|
||||
true_dep = all_mirror_specs[dep_dag_hash]['spec']
|
||||
if true_dep.full_hash() != dep.full_hash():
|
||||
to_splice.append(true_dep)
|
||||
|
||||
if to_splice:
|
||||
tty.debug(' needs the following deps spliced:')
|
||||
for true_dep in to_splice:
|
||||
tty.debug(' {0}/{1}'.format(
|
||||
true_dep.name, true_dep.dag_hash()[:7]))
|
||||
s = s.splice(true_dep, True)
|
||||
|
||||
# Push this spliced spec back to the mirror
|
||||
spliced_spec_dict = s.to_dict(hash=ht.full_hash)
|
||||
for key in ['binary_cache_checksum', 'buildinfo']:
|
||||
spliced_spec_dict[key] = spec_record[key]
|
||||
|
||||
temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
|
||||
with open(temp_json_path, 'w') as fd:
|
||||
fd.write(sjson.dump(spliced_spec_dict))
|
||||
|
||||
spliced_spec_url = spec_record['spec_url']
|
||||
web_util.push_to_url(
|
||||
temp_json_path, spliced_spec_url, keep_original=False)
|
||||
tty.debug(' spliced and wrote {0}'.format(
|
||||
spliced_spec_url))
|
||||
spec_record['spec'] = s
|
||||
|
||||
db.add(s, None)
|
||||
db.mark(s, 'in_buildcache', True)
|
||||
|
||||
# Now that we have fixed any old specfiles that might have had the wrong
|
||||
# full hash for their dependencies, we can generate the index, compute
|
||||
# the hash, and push those files to the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Read the index back in and compute it's hash
|
||||
with open(index_json_path) as f:
|
||||
index_string = f.read()
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(db_root_dir, 'index.json.hash')
|
||||
with open(index_hash_path, 'w') as f:
|
||||
f.write(index_hash)
|
||||
|
||||
# Push the index itself
|
||||
web_util.push_to_url(
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
|
||||
# Push the hash
|
||||
web_util.push_to_url(
|
||||
index_hash_path,
|
||||
url_util.join(cache_prefix, 'index.json.hash'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/plain'})
|
||||
_read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir)
|
||||
except Exception as err:
|
||||
msg = 'Encountered problem pushing package index to {0}: {1}'.format(
|
||||
cache_prefix, err)
|
||||
@@ -1568,12 +1517,11 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
"""
|
||||
package = spack.repo.get(spec)
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||
return
|
||||
elif spec.concrete and package.installed and not force:
|
||||
elif spec.concrete and spec.installed and not force:
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
@@ -1611,16 +1559,14 @@ def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
|
||||
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
||||
|
||||
|
||||
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_is_json = True
|
||||
lenient = not full_hash_match
|
||||
found_specs = []
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_yaml = url_util.join(
|
||||
@@ -1650,29 +1596,21 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
# Do not recompute the full hash for the fetched spec, instead just
|
||||
# read the property.
|
||||
if lenient or fetched_spec._full_hash == spec_full_hash:
|
||||
found_specs.append({
|
||||
'mirror_url': mirror.fetch_url,
|
||||
'spec': fetched_spec,
|
||||
})
|
||||
found_specs.append({
|
||||
'mirror_url': mirror.fetch_url,
|
||||
'spec': fetched_spec,
|
||||
})
|
||||
|
||||
return found_specs
|
||||
|
||||
|
||||
def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
mirrors_to_check=None, index_only=False):
|
||||
def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
"""
|
||||
Check if concrete spec exists on mirrors and return a list
|
||||
indicating the mirrors on which it can be found
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): The spec to look for in binary mirrors
|
||||
full_hash_match (bool): If True, only includes mirrors where the spec
|
||||
full hash matches the locally computed full hash of the ``spec``
|
||||
argument. If False, any mirror which has a matching DAG hash
|
||||
is included in the results.
|
||||
mirrors_to_check (dict): Optionally override the configured mirrors
|
||||
with the mirrors in this dictionary.
|
||||
index_only (bool): Do not attempt direct fetching of ``spec.json``
|
||||
@@ -1689,29 +1627,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = []
|
||||
lenient = not full_hash_match
|
||||
spec_full_hash = spec.full_hash()
|
||||
|
||||
def filter_candidates(candidate_list):
|
||||
filtered_candidates = []
|
||||
for candidate in candidate_list:
|
||||
candidate_full_hash = candidate['spec']._full_hash
|
||||
if lenient or spec_full_hash == candidate_full_hash:
|
||||
filtered_candidates.append(candidate)
|
||||
return filtered_candidates
|
||||
|
||||
candidates = binary_index.find_built_spec(spec)
|
||||
if candidates:
|
||||
results = filter_candidates(candidates)
|
||||
results = binary_index.find_built_spec(spec)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
if not results and not index_only:
|
||||
results = try_direct_fetch(spec,
|
||||
full_hash_match=full_hash_match,
|
||||
mirrors=mirrors_to_check)
|
||||
|
||||
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
|
||||
# We found a spec by the direct fetch approach, we might as well
|
||||
# add it to our mapping.
|
||||
if results:
|
||||
binary_index.update_spec(spec, results)
|
||||
|
||||
@@ -1861,124 +1784,35 @@ def push_keys(*mirrors, **kwargs):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
def needs_rebuild(spec, mirror_url):
|
||||
if not spec.concrete:
|
||||
raise ValueError('spec must be concrete to check against mirror')
|
||||
|
||||
pkg_name = spec.name
|
||||
pkg_version = spec.version
|
||||
|
||||
pkg_hash = spec.dag_hash()
|
||||
pkg_full_hash = spec.full_hash()
|
||||
|
||||
tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
|
||||
pkg_name, pkg_version, pkg_hash, pkg_full_hash))
|
||||
tty.debug('Checking {0}-{1}, dag_hash = {2}'.format(
|
||||
pkg_name, pkg_version, pkg_hash))
|
||||
tty.debug(spec.tree())
|
||||
|
||||
# Try to retrieve the specfile directly, based on the known
|
||||
# format of the name, in order to determine if the package
|
||||
# needs to be rebuilt.
|
||||
cache_prefix = build_cache_prefix(mirror_url)
|
||||
specfile_is_json = True
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||
deprecated_specfile_path = os.path.join(cache_prefix,
|
||||
deprecated_specfile_name)
|
||||
|
||||
result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
|
||||
spec.short_spec, '' if rebuild_on_errors else 'not ')
|
||||
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(specfile_path)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError) as url_err_y:
|
||||
err_msg = [
|
||||
'Unable to determine whether {0} needs rebuilding,',
|
||||
' caught exception attempting to read from {1} or {2}.',
|
||||
]
|
||||
tty.error(''.join(err_msg).format(
|
||||
spec.short_spec,
|
||||
specfile_path,
|
||||
deprecated_specfile_path))
|
||||
tty.debug(url_err)
|
||||
tty.debug(url_err_y)
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
if not spec_file_contents:
|
||||
tty.error('Reading {0} returned nothing'.format(
|
||||
specfile_path if specfile_is_json else deprecated_specfile_path))
|
||||
tty.warn(result_of_error)
|
||||
return rebuild_on_errors
|
||||
|
||||
spec_dict = (sjson.load(spec_file_contents)
|
||||
if specfile_is_json else syaml.load(spec_file_contents))
|
||||
|
||||
try:
|
||||
nodes = spec_dict['spec']['nodes']
|
||||
except KeyError:
|
||||
# Prior node dict format omitted 'nodes' key
|
||||
nodes = spec_dict['spec']
|
||||
name = spec.name
|
||||
|
||||
# In the old format:
|
||||
# The "spec" key represents a list of objects, each with a single
|
||||
# key that is the package name. While the list usually just contains
|
||||
# a single object, we iterate over the list looking for the object
|
||||
# with the name of this concrete spec as a key, out of an abundance
|
||||
# of caution.
|
||||
# In format version 2:
|
||||
# ['spec']['nodes'] is still a list of objects, but with a
|
||||
# multitude of keys. The list will commonly contain many objects, and in the
|
||||
# case of build specs, it is highly likely that the same name will occur
|
||||
# once as the actual package, and then again as the build provenance of that
|
||||
# same package. Hence format version 2 matches on the dag hash, not name.
|
||||
if nodes and 'name' not in nodes[0]:
|
||||
# old style
|
||||
cached_pkg_specs = [item[name] for item in nodes if name in item]
|
||||
elif nodes and spec_dict['spec']['_meta']['version'] == 2:
|
||||
cached_pkg_specs = [item for item in nodes
|
||||
if item[ht.dag_hash.name] == spec.dag_hash()]
|
||||
cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
|
||||
|
||||
# If either the full_hash didn't exist in the specfile, or it
|
||||
# did, but didn't match the one we computed locally, then we should
|
||||
# just rebuild. This can be simplified once the dag_hash and the
|
||||
# full_hash become the same thing.
|
||||
rebuild = False
|
||||
|
||||
if not cached_target:
|
||||
reason = 'did not find spec in specfile contents'
|
||||
rebuild = True
|
||||
elif ht.full_hash.name not in cached_target:
|
||||
reason = 'full_hash was missing from remote specfile'
|
||||
rebuild = True
|
||||
else:
|
||||
full_hash = cached_target[ht.full_hash.name]
|
||||
if full_hash != pkg_full_hash:
|
||||
reason = 'hash mismatch, remote = {0}, local = {1}'.format(
|
||||
full_hash, pkg_full_hash)
|
||||
rebuild = True
|
||||
|
||||
if rebuild:
|
||||
tty.msg('Rebuilding {0}, reason: {1}'.format(
|
||||
spec.short_spec, reason))
|
||||
tty.msg(spec.tree())
|
||||
|
||||
return rebuild
|
||||
# Only check for the presence of the json version of the spec. If the
|
||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
||||
# need to rebuild.
|
||||
return not web_util.url_exists(specfile_path)
|
||||
|
||||
|
||||
def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
rebuild_on_errors=False):
|
||||
def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
"""Check all the given specs against buildcaches on the given mirrors and
|
||||
determine if any of the specs need to be rebuilt. Reasons for needing to
|
||||
rebuild include binary cache for spec isn't present on a mirror, or it is
|
||||
present but the full_hash has changed since last time spec was built.
|
||||
determine if any of the specs need to be rebuilt. Specs need to be rebuilt
|
||||
when their hash doesn't exist in the mirror.
|
||||
|
||||
Arguments:
|
||||
mirrors (dict): Mirrors to check against
|
||||
@@ -1986,8 +1820,6 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
output_file (str): Path to output file to be written. If provided,
|
||||
mirrors with missing or out-of-date specs will be formatted as a
|
||||
JSON object and written to this file.
|
||||
rebuild_on_errors (bool): Treat any errors encountered while
|
||||
checking specs as a signal to rebuild package.
|
||||
|
||||
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
|
||||
|
||||
@@ -1999,7 +1831,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
rebuild_list = []
|
||||
|
||||
for spec in specs:
|
||||
if needs_rebuild(spec, mirror.fetch_url, rebuild_on_errors):
|
||||
if needs_rebuild(spec, mirror.fetch_url):
|
||||
rebuild_list.append({
|
||||
'short_spec': spec.short_spec,
|
||||
'hash': spec.dag_hash()
|
||||
|
||||
@@ -111,6 +111,20 @@
|
||||
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
|
||||
|
||||
def should_set_parallel_jobs(jobserver_support=False):
|
||||
"""Returns true in general, except when:
|
||||
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
||||
- jobserver_support is enabled, and a jobserver was found.
|
||||
"""
|
||||
if (
|
||||
jobserver_support and
|
||||
'MAKEFLAGS' in os.environ and
|
||||
'--jobserver' in os.environ['MAKEFLAGS']
|
||||
):
|
||||
return False
|
||||
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
|
||||
|
||||
class MakeExecutable(Executable):
|
||||
"""Special callable executable object for make so the user can specify
|
||||
parallelism options on a per-invocation basis. Specifying
|
||||
@@ -120,9 +134,6 @@ class MakeExecutable(Executable):
|
||||
call will name an environment variable which will be set to the
|
||||
parallelism level (without affecting the normal invocation with
|
||||
-j).
|
||||
|
||||
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
|
||||
everything.
|
||||
"""
|
||||
|
||||
def __init__(self, name, jobs):
|
||||
@@ -133,9 +144,8 @@ def __call__(self, *args, **kwargs):
|
||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||
remaining arguments are passed through to the superclass.
|
||||
"""
|
||||
|
||||
disable = env_flag(SPACK_NO_PARALLEL_MAKE)
|
||||
parallel = (not disable) and kwargs.pop('parallel', self.jobs > 1)
|
||||
parallel = should_set_parallel_jobs(jobserver_support=True) and \
|
||||
kwargs.pop('parallel', self.jobs > 1)
|
||||
|
||||
if parallel:
|
||||
args = ('-j{0}'.format(self.jobs),) + args
|
||||
@@ -181,7 +191,7 @@ def clean_environment():
|
||||
env.unset('PYTHONPATH')
|
||||
|
||||
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
|
||||
env.unset('MAKEFLAGS')
|
||||
# env.unset('MAKEFLAGS')
|
||||
|
||||
# Avoid that libraries of build dependencies get hijacked.
|
||||
env.unset('LD_PRELOAD')
|
||||
@@ -829,7 +839,7 @@ def setup_package(pkg, dirty, context='build'):
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
if on_cray and not dirty:
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
|
||||
@@ -1028,7 +1038,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd):
|
||||
input_multiprocess_fd, jsfd1, jsfd2):
|
||||
|
||||
context = kwargs.get('context', 'build')
|
||||
|
||||
@@ -1135,6 +1145,8 @@ def child_fun():
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
serialized_pkg = spack.subprocess_context.PackageInstallContext(pkg)
|
||||
|
||||
@@ -1144,11 +1156,17 @@ def child_fun():
|
||||
'fileno'):
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get('MAKEFLAGS', False)
|
||||
if mflags:
|
||||
m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
|
||||
if m:
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
args=(serialized_pkg, function, kwargs, child_pipe,
|
||||
input_multiprocess_fd))
|
||||
input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
|
||||
|
||||
p.start()
|
||||
|
||||
|
||||
@@ -176,6 +176,7 @@ def _std_args(pkg):
|
||||
'-G', generator,
|
||||
define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
|
||||
define('CMAKE_BUILD_TYPE', build_type),
|
||||
define('BUILD_TESTING', pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -361,6 +362,7 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
|
||||
|
||||
@@ -107,10 +107,10 @@ def cuda_flags(arch_list):
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
|
||||
|
||||
102
lib/spack/spack/build_systems/lua.py
Normal file
102
lib/spack/spack/build_systems/lua.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.filesystem import find
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
class LuaPackage(PackageBase):
|
||||
"""Specialized class for lua packages"""
|
||||
|
||||
phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
build_system_class = 'LuaPackage'
|
||||
|
||||
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||
depends_on('lua-lang')
|
||||
extends('lua', when='^lua')
|
||||
with when('^lua-luajit'):
|
||||
extends('lua-luajit')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit+lualinks')
|
||||
with when('^lua-luajit-openresty'):
|
||||
extends('lua-luajit-openresty')
|
||||
depends_on('luajit')
|
||||
depends_on('lua-luajit-openresty+lualinks')
|
||||
|
||||
def unpack(self, spec, prefix):
|
||||
if os.path.splitext(self.stage.archive_file)[1] == '.rock':
|
||||
directory = self.luarocks('unpack', self.stage.archive_file, output=str)
|
||||
dirlines = directory.split('\n')
|
||||
# TODO: figure out how to scope this better
|
||||
os.chdir(dirlines[2])
|
||||
|
||||
def _generate_tree_line(self, name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def _luarocks_config_path(self):
|
||||
return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
|
||||
|
||||
def generate_luarocks_config(self, spec, prefix):
|
||||
spec = self.spec
|
||||
table_entries = []
|
||||
for d in spec.traverse(
|
||||
deptypes=("build", "run"), deptype_query="run"
|
||||
):
|
||||
if d.package.extends(self.extendee_spec):
|
||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||
|
||||
path = self._luarocks_config_path()
|
||||
with open(path, 'w') as config:
|
||||
config.write(
|
||||
"""
|
||||
deps_mode="all"
|
||||
rocks_trees={{
|
||||
{}
|
||||
}}
|
||||
""".format(
|
||||
"\n".join(table_entries)
|
||||
)
|
||||
)
|
||||
return path
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
|
||||
|
||||
def preprocess(self, spec, prefix):
|
||||
"""Override this to preprocess source before building with luarocks"""
|
||||
pass
|
||||
|
||||
@property
|
||||
def lua(self):
|
||||
return Executable(self.spec['lua-lang'].prefix.bin.lua)
|
||||
|
||||
@property
|
||||
def luarocks(self):
|
||||
lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
|
||||
return lr
|
||||
|
||||
def luarocks_args(self):
|
||||
return []
|
||||
|
||||
def install(self, spec, prefix):
|
||||
rock = '.'
|
||||
specs = find('.', '*.rockspec', recursive=False)
|
||||
if specs:
|
||||
rock = specs[0]
|
||||
rocks_args = self.luarocks_args()
|
||||
rocks_args.append(rock)
|
||||
self.luarocks('--tree=' + prefix, 'make', *rocks_args)
|
||||
@@ -30,6 +30,15 @@ class IntelOneApiPackage(Package):
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
"""Updates oneapi package descriptions with common text."""
|
||||
|
||||
text = """ LICENSE INFORMATION: By downloading and using this software, you agree to the terms
|
||||
and conditions of the software license agreements at https://intel.ly/393CijO."""
|
||||
cls.__doc__ = cls.__doc__ + text
|
||||
return cls
|
||||
|
||||
@property
|
||||
def component_dir(self):
|
||||
"""Subdirectory for this component in the install prefix."""
|
||||
|
||||
@@ -90,8 +90,8 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
return build_group_id
|
||||
|
||||
|
||||
def populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
def _populate_buildgroup(job_names, group_name, project, site,
|
||||
credentials, cdash_url):
|
||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||
|
||||
headers = {
|
||||
@@ -132,16 +132,30 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
||||
msg = 'Error response code ({0}) in _populate_buildgroup'.format(
|
||||
response_code)
|
||||
tty.warn(msg)
|
||||
|
||||
|
||||
def is_main_phase(phase_name):
|
||||
def _is_main_phase(phase_name):
|
||||
return True if phase_name == 'specs' else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
""" Given the necessary parts, format the gitlab job name
|
||||
|
||||
Arguments:
|
||||
phase (str): Either 'specs' for the main phase, or the name of a
|
||||
bootstrapping phase
|
||||
strip_compiler (bool): Should compiler be stripped from job name
|
||||
spec (spack.spec.Spec): Spec job will build
|
||||
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
|
||||
but sphinx doesn't recognize the type and fails).
|
||||
build_group (str): Name of build group this job belongs to (a CDash
|
||||
notion)
|
||||
|
||||
Returns: The job name
|
||||
"""
|
||||
item_idx = 0
|
||||
format_str = ''
|
||||
format_args = []
|
||||
@@ -163,7 +177,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if is_main_phase(phase) is True or strip_compiler is False:
|
||||
if _is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
@@ -180,12 +194,12 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
return format_str.format(*format_args)
|
||||
|
||||
|
||||
def get_cdash_build_name(spec, build_group):
|
||||
def _get_cdash_build_name(spec, build_group):
|
||||
return '{0}@{1}%{2} arch={3} ({4})'.format(
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def get_spec_string(spec):
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
'{%compiler}',
|
||||
@@ -197,15 +211,15 @@ def get_spec_string(spec):
|
||||
return spec.format(''.join(format_elements))
|
||||
|
||||
|
||||
def format_root_spec(spec, main_phase, strip_compiler):
|
||||
def _format_root_spec(spec, main_phase, strip_compiler):
|
||||
if main_phase is False and strip_compiler is True:
|
||||
return '{0}@{1} arch={2}'.format(
|
||||
spec.name, spec.version, spec.architecture)
|
||||
else:
|
||||
return spec.build_hash()
|
||||
return spec.dag_hash()
|
||||
|
||||
|
||||
def spec_deps_key(s):
|
||||
def _spec_deps_key(s):
|
||||
return '{0}/{1}'.format(s.name, s.dag_hash(7))
|
||||
|
||||
|
||||
@@ -217,8 +231,8 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -266,11 +280,11 @@ def stage_spec_jobs(specs, check_index_only=False):
|
||||
|
||||
"""
|
||||
|
||||
# The convenience method below, "remove_satisfied_deps()", does not modify
|
||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||
# the "deps" parameter. Instead, it returns a new dictionary where only
|
||||
# dependencies which have not yet been satisfied are included in the
|
||||
# return value.
|
||||
def remove_satisfied_deps(deps, satisfied_list):
|
||||
def _remove_satisfied_deps(deps, satisfied_list):
|
||||
new_deps = {}
|
||||
|
||||
for key, value in iteritems(deps):
|
||||
@@ -283,7 +297,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
get_spec_dependencies(
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
@@ -302,7 +316,7 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
# Note that "dependencies" is a dictionary mapping each dependent
|
||||
# package to the set of not-yet-handled dependencies. The final step
|
||||
# below removes all the dependencies that are handled by this stage.
|
||||
dependencies = remove_satisfied_deps(dependencies, next_stage)
|
||||
dependencies = _remove_satisfied_deps(dependencies, next_stage)
|
||||
|
||||
if unstaged:
|
||||
stages.append(unstaged.copy())
|
||||
@@ -310,13 +324,12 @@ def remove_satisfied_deps(deps, satisfied_list):
|
||||
return spec_labels, deps, stages
|
||||
|
||||
|
||||
def print_staging_summary(spec_labels, dependencies, stages):
|
||||
def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg(' Staging summary:')
|
||||
stage_index = 0
|
||||
for stage in stages:
|
||||
tty.msg(' Staging summary ([x] means a job needs rebuilding):')
|
||||
for stage_index, stage in enumerate(stages):
|
||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
@@ -324,12 +337,10 @@ def print_staging_summary(spec_labels, dependencies, stages):
|
||||
tty.msg(' [{1}] {0} -> {2}'.format(
|
||||
job,
|
||||
'x' if spec_labels[job]['needs_rebuild'] else ' ',
|
||||
get_spec_string(s)))
|
||||
|
||||
stage_index += 1
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def compute_spec_deps(spec_list, check_index_only=False):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -402,17 +413,17 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, full_hash_match=True, index_only=check_index_only)
|
||||
spec=s, index_only=check_index_only)
|
||||
|
||||
skey = spec_deps_key(s)
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
'spec': get_spec_string(s),
|
||||
'spec': _get_spec_string(s),
|
||||
'root': root_spec,
|
||||
'needs_rebuild': not up_to_date_mirrors,
|
||||
}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = spec_deps_key(d)
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg('Will not stage external dep: {0}'.format(d))
|
||||
continue
|
||||
@@ -435,11 +446,11 @@ def append_dep(s, d):
|
||||
return deps_json_obj
|
||||
|
||||
|
||||
def spec_matches(spec, match_string):
|
||||
def _spec_matches(spec, match_string):
|
||||
return spec.satisfies(match_string)
|
||||
|
||||
|
||||
def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
for runner_attr in attrs_list:
|
||||
if runner_attr in src_dict:
|
||||
if runner_attr in dest_dict and runner_attr == 'tags':
|
||||
@@ -460,7 +471,7 @@ def copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
|
||||
|
||||
|
||||
def find_matching_config(spec, gitlab_ci):
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = [
|
||||
'image',
|
||||
@@ -471,16 +482,16 @@ def find_matching_config(spec, gitlab_ci):
|
||||
'after_script',
|
||||
]
|
||||
|
||||
copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
ci_mappings = gitlab_ci['mappings']
|
||||
for ci_mapping in ci_mappings:
|
||||
for match_string in ci_mapping['match']:
|
||||
if spec_matches(spec, match_string):
|
||||
if _spec_matches(spec, match_string):
|
||||
if 'runner-attributes' in ci_mapping:
|
||||
copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
_copy_attributes(overridable_attrs,
|
||||
ci_mapping['runner-attributes'],
|
||||
runner_attributes)
|
||||
return runner_attributes
|
||||
else:
|
||||
return None
|
||||
@@ -488,16 +499,16 @@ def find_matching_config(spec, gitlab_ci):
|
||||
return runner_attributes
|
||||
|
||||
|
||||
def pkg_name_from_spec_label(spec_label):
|
||||
def _pkg_name_from_spec_label(spec_label):
|
||||
return spec_label[:spec_label.index('/')]
|
||||
|
||||
|
||||
def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
def _format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
osname, build_group, prune_dag, stage_spec_dict,
|
||||
enable_artifacts_buildcache):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = spec_deps_key(dep_job)
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_info = stage_spec_dict[dep_spec_key]
|
||||
|
||||
if not prune_dag or dep_spec_info['needs_rebuild']:
|
||||
@@ -592,6 +603,33 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
env (spack.environment.Environment): Activated environment object
|
||||
which must contain a gitlab-ci section describing how to map
|
||||
specs to runners
|
||||
print_summary (bool): Should we print a summary of all the jobs in
|
||||
the stages in which they were placed.
|
||||
output_file (str): File path where generated file should be written
|
||||
prune_dag (bool): If True, do not generate jobs for specs already
|
||||
exist built on the mirror.
|
||||
check_index_only (bool): If True, attempt to fetch the mirror index
|
||||
and only use that to determine whether built specs on the mirror
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
env.concretize()
|
||||
@@ -804,7 +842,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
max_needs_job = ''
|
||||
|
||||
# If this is configured, spack will fail "spack ci generate" if it
|
||||
# generates any full hash which exists under the broken specs url.
|
||||
# generates any hash which exists under the broken specs url.
|
||||
broken_spec_urls = None
|
||||
if broken_specs_url:
|
||||
if broken_specs_url.startswith('http'):
|
||||
@@ -819,7 +857,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
strip_compilers = phase['strip-compilers']
|
||||
|
||||
main_phase = is_main_phase(phase_name)
|
||||
main_phase = _is_main_phase(phase_name)
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
for stage_jobs in stages:
|
||||
@@ -830,11 +868,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for spec_label in stage_jobs:
|
||||
spec_record = spec_labels[spec_label]
|
||||
root_spec = spec_record['rootSpec']
|
||||
pkg_name = pkg_name_from_spec_label(spec_label)
|
||||
pkg_name = _pkg_name_from_spec_label(spec_label)
|
||||
release_spec = root_spec[pkg_name]
|
||||
release_spec_full_hash = release_spec.full_hash()
|
||||
release_spec_dag_hash = release_spec.dag_hash()
|
||||
release_spec_build_hash = release_spec.build_hash()
|
||||
|
||||
if prune_untouched_packages:
|
||||
if release_spec not in affected_specs:
|
||||
@@ -843,7 +879,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
spec_record['needs_rebuild'] = False
|
||||
continue
|
||||
|
||||
runner_attribs = find_matching_config(
|
||||
runner_attribs = _find_matching_config(
|
||||
release_spec, gitlab_ci)
|
||||
|
||||
if not runner_attribs:
|
||||
@@ -897,15 +933,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
compiler_action = 'FIND_ANY'
|
||||
if is_main_phase(phase_name):
|
||||
if _is_main_phase(phase_name):
|
||||
compiler_action = 'INSTALL_MISSING'
|
||||
|
||||
job_vars = {
|
||||
'SPACK_ROOT_SPEC': format_root_spec(
|
||||
'SPACK_ROOT_SPEC': _format_root_spec(
|
||||
root_spec, main_phase, strip_compilers),
|
||||
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||
'SPACK_COMPILER_ACTION': compiler_action
|
||||
}
|
||||
@@ -924,15 +958,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# purposes, so we only get the direct dependencies.
|
||||
dep_jobs = []
|
||||
for dep_label in dependencies[spec_label]:
|
||||
dep_pkg = pkg_name_from_spec_label(dep_label)
|
||||
dep_pkg = _pkg_name_from_spec_label(dep_label)
|
||||
dep_root = spec_labels[dep_label]['rootSpec']
|
||||
dep_jobs.append(dep_root[dep_pkg])
|
||||
|
||||
job_dependencies.extend(
|
||||
format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
_format_job_needs(phase_name, strip_compilers,
|
||||
dep_jobs, osname, build_group,
|
||||
prune_dag, spec_labels,
|
||||
enable_artifacts_buildcache))
|
||||
|
||||
rebuild_spec = spec_record['needs_rebuild']
|
||||
|
||||
@@ -943,7 +977,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add more dependencies to
|
||||
# the job (that compiler and maybe it's dependencies as well).
|
||||
if is_main_phase(phase_name):
|
||||
if _is_main_phase(phase_name):
|
||||
spec_arch_family = (release_spec.architecture
|
||||
.target
|
||||
.microarchitecture
|
||||
@@ -971,7 +1005,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# be rebuilt if the compiler targeted to build it
|
||||
# needs to be rebuilt.
|
||||
bs_specs, _, _ = staged_phases[bs['phase-name']]
|
||||
c_spec_key = spec_deps_key(c_spec)
|
||||
c_spec_key = _spec_deps_key(c_spec)
|
||||
rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
|
||||
rebuild_spec = rebuild_spec or rbld_comp
|
||||
# Also update record so dependents do not fail to
|
||||
@@ -985,14 +1019,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
|
||||
job_dependencies.extend(
|
||||
format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
_format_job_needs(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
dep_jobs,
|
||||
str(bs_arch),
|
||||
build_group,
|
||||
prune_dag,
|
||||
bs_specs,
|
||||
enable_artifacts_buildcache))
|
||||
else:
|
||||
debug_msg = ''.join([
|
||||
'Considered compiler {0} for spec ',
|
||||
@@ -1009,9 +1043,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
continue
|
||||
|
||||
if (broken_spec_urls is not None and
|
||||
release_spec_full_hash in broken_spec_urls):
|
||||
release_spec_dag_hash in broken_spec_urls):
|
||||
known_broken_specs_encountered.append('{0} ({1})'.format(
|
||||
release_spec, release_spec_full_hash))
|
||||
release_spec, release_spec_dag_hash))
|
||||
|
||||
if artifacts_root:
|
||||
job_dependencies.append({
|
||||
@@ -1022,7 +1056,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
|
||||
|
||||
if enable_cdash_reporting:
|
||||
cdash_build_name = get_cdash_build_name(
|
||||
cdash_build_name = _get_cdash_build_name(
|
||||
release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||
@@ -1087,7 +1121,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_name = phase['name']
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
print_staging_summary(*phase_stages)
|
||||
_print_staging_summary(*phase_stages)
|
||||
|
||||
tty.debug('{0} build jobs generated in {1} stages'.format(
|
||||
job_id, stage_id))
|
||||
@@ -1099,8 +1133,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if enable_cdash_reporting and cdash_auth_token:
|
||||
try:
|
||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
_populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
||||
else:
|
||||
@@ -1136,9 +1170,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
cleanup_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
@@ -1156,9 +1190,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
final_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if is_pr_pipeline:
|
||||
@@ -1229,9 +1263,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
noop_job = {}
|
||||
|
||||
if service_job_config:
|
||||
copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
_copy_attributes(default_attrs,
|
||||
service_job_config,
|
||||
noop_job)
|
||||
|
||||
if 'script' not in noop_job:
|
||||
noop_job['script'] = [
|
||||
@@ -1254,7 +1288,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def url_encode_string(input_string):
|
||||
def _url_encode_string(input_string):
|
||||
encoded_keyval = urlencode({'donotcare': input_string})
|
||||
eq_idx = encoded_keyval.find('=') + 1
|
||||
encoded_value = encoded_keyval[eq_idx:]
|
||||
@@ -1262,6 +1296,17 @@ def url_encode_string(input_string):
|
||||
|
||||
|
||||
def import_signing_key(base64_signing_key):
|
||||
""" Given Base64-encoded gpg key, decode and import it to use for
|
||||
signing packages.
|
||||
|
||||
Arguments:
|
||||
base64_signing_key (str): A gpg key including the secret key,
|
||||
armor-exported and base64 encoded, so it can be stored in a
|
||||
gitlab CI variable. For an example of how to generate such
|
||||
a key, see:
|
||||
|
||||
https://github.com/spack/spack-infrastructure/blob/main/gitlab-docker/files/gen-key
|
||||
"""
|
||||
if not base64_signing_key:
|
||||
tty.warn('No key found for signing/verifying packages')
|
||||
return
|
||||
@@ -1299,14 +1344,34 @@ def import_signing_key(base64_signing_key):
|
||||
|
||||
|
||||
def can_sign_binaries():
|
||||
""" Utility method to determine if this spack instance is capable of
|
||||
signing binary packages. This is currently only possible if the
|
||||
spack gpg keystore contains exactly one secret key."""
|
||||
return len(gpg_util.signing_keys()) == 1
|
||||
|
||||
|
||||
def can_verify_binaries():
|
||||
""" Utility method to determin if this spack instance is capable (at
|
||||
least in theory) of verifying signed binaries."""
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def configure_compilers(compiler_action, scope=None):
|
||||
""" Depending on the compiler_action parameter, either turn on the
|
||||
install_missing_compilers config option, or find spack compilers,
|
||||
or do nothing. This is used from rebuild jobs in bootstrapping
|
||||
pipelines, where in the bootsrapping phase we would pass
|
||||
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
|
||||
spec building phase we would pass INSTALL_MISSING in order to get
|
||||
spack to use the compiler which was built in the previous phase and
|
||||
is now sitting in the binary mirror.
|
||||
|
||||
Arguments:
|
||||
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
|
||||
described above. Any other value essentially results in a no-op.
|
||||
scope (spack.config.ConfigScope): Optional. The scope in which to look for
|
||||
compilers, in case 'FIND_ANY' was provided.
|
||||
"""
|
||||
if compiler_action == 'INSTALL_MISSING':
|
||||
tty.debug('Make sure bootstrapped compiler will be installed')
|
||||
config = cfg.get('config')
|
||||
@@ -1330,6 +1395,35 @@ def configure_compilers(compiler_action, scope=None):
|
||||
|
||||
|
||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
||||
""" Build a dictionary of concrete specs relevant to a particular
|
||||
rebuild job. This includes the root spec and the spec to be
|
||||
rebuilt (which could be the same).
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Activated spack environment
|
||||
used to get concrete root spec by hash in case compiler_action
|
||||
is anthing other than FIND_ANY.
|
||||
root_spec (str): If compiler_action is FIND_ANY root_spec is
|
||||
a string representation which can be turned directly into
|
||||
a spec, otherwise, it's a hash used to index the activated
|
||||
spack environment.
|
||||
job_name (str): Name of package to be built, used to index the
|
||||
concrete root spec and produce the concrete spec to be
|
||||
built.
|
||||
compiler_action (str): Determines how to interpret the root_spec
|
||||
parameter, either as a string representation as a hash.
|
||||
|
||||
Returns:
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"root": "<spec>",
|
||||
"<job-pkg-name>": "<spec>",
|
||||
}
|
||||
|
||||
"""
|
||||
spec_map = {
|
||||
'root': None,
|
||||
}
|
||||
@@ -1376,6 +1470,19 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
""" Push one or more binary packages to the mirror.
|
||||
|
||||
Arguments:
|
||||
|
||||
env (spack.environment.Environment): Optional environment. If
|
||||
provided, it is used to make sure binary package to push
|
||||
exists in the environment.
|
||||
specfile_path (str): Path to spec.json corresponding to built pkg
|
||||
to push.
|
||||
mirror_url (str): Base url of target mirror
|
||||
sign_binaries (bool): If True, spack will attempt to sign binary
|
||||
package before pushing.
|
||||
"""
|
||||
try:
|
||||
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
||||
except Exception as inst:
|
||||
@@ -1400,6 +1507,15 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
""" Looks for spack-build-out.txt in the stage directory of the given
|
||||
job_spec, and attempts to copy the file into the directory given
|
||||
by job_log_dir.
|
||||
|
||||
Arguments:
|
||||
|
||||
job_spec (spack.spec.Spec): Spec associated with spack install log
|
||||
job_log_dir (str): Path into which build log should be copied
|
||||
"""
|
||||
try:
|
||||
job_pkg = spack.repo.get(job_spec)
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
@@ -1418,6 +1534,14 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
""" Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
|
||||
Arguments:
|
||||
|
||||
url (str): Complete url to artifacts.zip file
|
||||
work_dir (str): Path to destination where artifacts should be extracted
|
||||
"""
|
||||
tty.msg('Fetching artifacts from: {0}\n'.format(url))
|
||||
|
||||
headers = {
|
||||
@@ -1457,6 +1581,8 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
|
||||
|
||||
def get_spack_info():
|
||||
""" If spack is running from a git repo, return the most recent git log
|
||||
entry, otherwise, return a string containing the spack version. """
|
||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if os.path.exists(git_path):
|
||||
git = exe.which("git")
|
||||
@@ -1472,6 +1598,23 @@ def get_spack_info():
|
||||
|
||||
|
||||
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
""" Look in the local spack clone to find the checkout_commit, and if
|
||||
provided, the merge_commit given as arguments. If those commits can
|
||||
be found locally, then clone spack and attempt to recreate a merge
|
||||
commit with the same parent commits as tested in gitlab. This looks
|
||||
something like 1) git clone repo && cd repo 2) git checkout
|
||||
<checkout_commit> 3) git merge <merge_commit>. If there is no
|
||||
merge_commit provided, then skip step (3).
|
||||
|
||||
Arguments:
|
||||
|
||||
repro_dir (str): Location where spack should be cloned
|
||||
checkout_commit (str): SHA of PR branch commit
|
||||
merge_commit (str): SHA of target branch parent
|
||||
|
||||
Returns: True if git repo state was successfully recreated, or False
|
||||
otherwise.
|
||||
"""
|
||||
# figure out the path to the spack git version being used for the
|
||||
# reproduction
|
||||
print('checkout_commit: {0}'.format(checkout_commit))
|
||||
@@ -1513,7 +1656,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
fail_on_error=False)
|
||||
|
||||
if git.returncode != 0:
|
||||
tty.error('Unable to clone your local spac repo:')
|
||||
tty.error('Unable to clone your local spack repo:')
|
||||
tty.msg(clone_out)
|
||||
return False
|
||||
|
||||
@@ -1546,6 +1689,18 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
""" Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
|
||||
First download and extract artifacts. Then look through those artifacts
|
||||
to glean some information needed for the reproduer (e.g. one of the
|
||||
artifacts contains information about the version of spack tested by
|
||||
gitlab, another is the generated pipeline yaml containing details
|
||||
of the job like the docker image used to run it). The output of this
|
||||
function is a set of printed instructions for running docker and then
|
||||
commands to run to reproduce the build once inside the container.
|
||||
"""
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, 'spack.lock')[0]
|
||||
|
||||
@@ -161,11 +161,6 @@ def setup_parser(subparser):
|
||||
help=('Check single spec from json or yaml file instead of release ' +
|
||||
'specs file'))
|
||||
|
||||
check.add_argument(
|
||||
'--rebuild-on-error', default=False, action='store_true',
|
||||
help="Default to rebuilding packages if errors are encountered " +
|
||||
"during the process of checking whether rebuilding is needed")
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
@@ -361,7 +356,7 @@ def list_fn(args):
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
tty.die(e)
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
@@ -430,7 +425,7 @@ def check_fn(args):
|
||||
sys.exit(0)
|
||||
|
||||
sys.exit(bindist.check_specs_against_mirrors(
|
||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
||||
configured_mirrors, specs, args.output_file))
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
@@ -486,7 +481,7 @@ def save_specfile_fn(args):
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
@@ -701,7 +696,7 @@ def update_index(mirror_url, update_keys=False):
|
||||
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
outdir = '.'
|
||||
outdir = 'file://.'
|
||||
if args.mirror_url:
|
||||
outdir = args.mirror_url
|
||||
|
||||
|
||||
@@ -167,8 +167,7 @@ def ci_reindex(args):
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the full hash match of the spec
|
||||
as computed locally. """
|
||||
source if the mirror does not contain the hash. """
|
||||
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -280,8 +279,8 @@ def ci_rebuild(args):
|
||||
env, root_spec, job_spec_pkg_name, compiler_action)
|
||||
job_spec = spec_map[job_spec_pkg_name]
|
||||
|
||||
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
|
||||
job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
|
||||
job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
|
||||
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||
|
||||
# To provide logs, cdash reports, etc for developer download/perusal,
|
||||
# these things have to be put into artifacts. This means downstream
|
||||
@@ -335,23 +334,23 @@ def ci_rebuild(args):
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec yaml into the reproduction directory, and it will
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
|
||||
with open(job_spec_json_path, 'w') as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write the concrete root spec yaml into the reproduction directory
|
||||
root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
|
||||
# Write the concrete root spec json into the reproduction directory
|
||||
root_spec_json_path = os.path.join(repro_dir, 'root.json')
|
||||
with open(root_spec_json_path, 'w') as fd:
|
||||
fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
|
||||
|
||||
# Write some other details to aid in reproduction into an artifact
|
||||
repro_file = os.path.join(repro_dir, 'repro.json')
|
||||
repro_details = {
|
||||
'job_name': ci_job_name,
|
||||
'job_spec_yaml': job_spec_yaml_file,
|
||||
'root_spec_yaml': 'root.yaml',
|
||||
'job_spec_json': job_spec_json_file,
|
||||
'root_spec_json': 'root.json',
|
||||
'ci_project_dir': ci_project_dir
|
||||
}
|
||||
with open(repro_file, 'w') as fd:
|
||||
@@ -366,25 +365,24 @@ def ci_rebuild(args):
|
||||
fd.write(b'\n')
|
||||
|
||||
# If we decided there should be a temporary storage mechanism, add that
|
||||
# mirror now so it's used when we check for a full hash match already
|
||||
# mirror now so it's used when we check for a hash match already
|
||||
# built for this spec.
|
||||
if pipeline_mirror_url:
|
||||
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
|
||||
pipeline_mirror_url,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching full hash
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, full_hash_match=True, index_only=False)
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a full hash match on at least one configured mirror. All
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
# matches represent the fully up-to-date spec, so should all be
|
||||
# equivalent. If artifacts mirror is enabled, we just pick one
|
||||
# of the matches and download the buildcache files from there to
|
||||
# the artifacts, so they're available to be used by dependent
|
||||
# jobs in subsequent stages.
|
||||
tty.msg('No need to rebuild {0}, found full hash match at: '.format(
|
||||
tty.msg('No need to rebuild {0}, found hash match at: '.format(
|
||||
job_spec_pkg_name))
|
||||
for match in matches:
|
||||
tty.msg(' {0}'.format(match['mirror_url']))
|
||||
@@ -403,7 +401,7 @@ def ci_rebuild(args):
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
|
||||
# No full hash match anywhere means we need to rebuild spec
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
||||
@@ -415,7 +413,6 @@ def ci_rebuild(args):
|
||||
install_args.extend([
|
||||
'install',
|
||||
'--keep-stage',
|
||||
'--require-full-hash-match',
|
||||
])
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
@@ -443,8 +440,8 @@ def ci_rebuild(args):
|
||||
|
||||
# TODO: once we have the concrete spec registry, use the DAG hash
|
||||
# to identify the spec to install, rather than the concrete spec
|
||||
# yaml file.
|
||||
install_args.extend(['-f', job_spec_yaml_path])
|
||||
# json file.
|
||||
install_args.extend(['-f', job_spec_json_path])
|
||||
|
||||
tty.debug('Installing {0} from source'.format(job_spec.name))
|
||||
tty.debug('spack install arguments: {0}'.format(
|
||||
@@ -477,13 +474,13 @@ def ci_rebuild(args):
|
||||
tty.debug('spack install exited {0}'.format(install_exit_code))
|
||||
|
||||
# If a spec fails to build in a spack develop pipeline, we add it to a
|
||||
# list of known broken full hashes. This allows spack PR pipelines to
|
||||
# list of known broken hashes. This allows spack PR pipelines to
|
||||
# avoid wasting compute cycles attempting to build those hashes.
|
||||
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
|
||||
tty.debug('Install failed on develop')
|
||||
if 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
dev_fail_hash = job_spec.full_hash()
|
||||
dev_fail_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
|
||||
tty.msg('Reporting broken develop build as: {0}'.format(
|
||||
broken_spec_path))
|
||||
@@ -494,7 +491,7 @@ def ci_rebuild(args):
|
||||
'broken-spec': {
|
||||
'job-url': get_env_var('CI_JOB_URL'),
|
||||
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
|
||||
'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
|
||||
'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -539,7 +536,7 @@ def ci_rebuild(args):
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
env, job_spec_json_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# Create another copy of that buildcache in the per-pipeline
|
||||
@@ -548,14 +545,14 @@ def ci_rebuild(args):
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
env, job_spec_json_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
# on the broken-specs list. If so, remove it.
|
||||
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
|
||||
broken_specs_url = gitlab_ci['broken-specs-url']
|
||||
just_built_hash = job_spec.full_hash()
|
||||
just_built_hash = job_spec.dag_hash()
|
||||
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
|
||||
if web_util.url_exists(broken_spec_path):
|
||||
tty.msg('Removing {0} from the list of broken specs'.format(
|
||||
|
||||
@@ -18,6 +18,8 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['clean', 'dirty'])
|
||||
arguments.add_concretizer_args(subparser)
|
||||
|
||||
subparser.add_argument(
|
||||
'--dump', metavar="FILE",
|
||||
help="dump a source-able environment to FILE"
|
||||
|
||||
@@ -22,6 +22,9 @@ def setup_parser(subparser):
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
subparser.add_argument(
|
||||
'-q', '--quiet', action='store_true',
|
||||
help="Don't print concretized specs")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
@@ -38,5 +41,6 @@ def concretize(parser, args):
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
if not args.quiet:
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -187,6 +187,27 @@ def cmake_args(self):
|
||||
return args"""
|
||||
|
||||
|
||||
class LuaPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for LuaRocks-based packages"""
|
||||
|
||||
base_class_name = 'LuaPackage'
|
||||
|
||||
body_def = """\
|
||||
def luarocks_args(self):
|
||||
# FIXME: Add arguments to `luarocks make` other than rockspec path
|
||||
# FIXME: If not needed delete this function
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith('lua-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = 'lua-{0}'.format(name)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
"""Provides appropriate overrides for meson-based packages"""
|
||||
|
||||
@@ -580,6 +601,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
'makefile': MakefilePackageTemplate,
|
||||
'intel': IntelPackageTemplate,
|
||||
'meson': MesonPackageTemplate,
|
||||
'lua': LuaPackageTemplate,
|
||||
'sip': SIPPackageTemplate,
|
||||
'generic': PackageTemplate,
|
||||
}
|
||||
@@ -644,6 +666,9 @@ def __call__(self, stage, url):
|
||||
if url.endswith('.whl') or '.whl#' in url:
|
||||
self.build_system = 'python'
|
||||
return
|
||||
if url.endswith('.rock'):
|
||||
self.build_system = 'lua'
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
@@ -668,6 +693,7 @@ def __call__(self, stage, url):
|
||||
(r'/Rakefile$', 'ruby'),
|
||||
(r'/setup\.rb$', 'ruby'),
|
||||
(r'/.*\.pro$', 'qmake'),
|
||||
(r'/.*\.rockspec$', 'lua'),
|
||||
(r'/(GNU)?[Mm]akefile$', 'makefile'),
|
||||
(r'/DESCRIPTION$', 'octave'),
|
||||
(r'/meson\.build$', 'meson'),
|
||||
|
||||
@@ -91,8 +91,8 @@ def dev_build(self, args):
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if package.installed:
|
||||
tty.error("Already installed in %s" % package.prefix)
|
||||
if spec.installed:
|
||||
tty.error("Already installed in %s" % spec.prefix)
|
||||
tty.msg("Uninstall or try adding a version suffix for this dev build.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -68,8 +68,14 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# Prepare a solver setup to parse differences
|
||||
setup = asp.SpackSolverSetup()
|
||||
|
||||
a_facts = set(t for t in setup.spec_clauses(a, body=True, expand_hashes=True))
|
||||
b_facts = set(t for t in setup.spec_clauses(b, body=True, expand_hashes=True))
|
||||
# get facts for specs, making sure to include build dependencies of concrete
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(t for t in setup.spec_clauses(
|
||||
a, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
b_facts = set(t for t in setup.spec_clauses(
|
||||
b, body=True, expand_hashes=True, concrete_build_deps=True,
|
||||
))
|
||||
|
||||
# We want to present them to the user as simple key: values
|
||||
intersect = sorted(a_facts.intersection(b_facts))
|
||||
|
||||
@@ -8,6 +8,8 @@
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -41,7 +43,8 @@
|
||||
'loads',
|
||||
'view',
|
||||
'update',
|
||||
'revert'
|
||||
'revert',
|
||||
'depfile'
|
||||
]
|
||||
|
||||
|
||||
@@ -523,6 +526,154 @@ def env_revert(args):
|
||||
tty.msg(msg.format(manifest_file))
|
||||
|
||||
|
||||
def env_depfile_setup_parser(subparser):
|
||||
"""generate a depfile from the concrete environment specs"""
|
||||
subparser.add_argument(
|
||||
'--make-target-prefix', default=None, metavar='TARGET',
|
||||
help='prefix Makefile targets with <TARGET>/<name>. By default the absolute '
|
||||
'path to the directory makedeps under the environment metadata dir is '
|
||||
'used. Can be set to an empty string --make-target-prefix \'\'.')
|
||||
subparser.add_argument(
|
||||
'--make-disable-jobserver', default=True, action='store_false',
|
||||
dest='jobserver', help='disable POSIX jobserver support.')
|
||||
subparser.add_argument(
|
||||
'-o', '--output', default=None, metavar='FILE',
|
||||
help='write the depfile to FILE rather than to stdout')
|
||||
subparser.add_argument(
|
||||
'-G', '--generator', default='make', choices=('make',),
|
||||
help='specify the depfile type. Currently only make is supported.')
|
||||
|
||||
|
||||
def env_depfile(args):
|
||||
# Currently only make is supported.
|
||||
spack.cmd.require_active_env(cmd_name='env depfile')
|
||||
env = ev.active_environment()
|
||||
|
||||
# Maps each hash in the environment to a string of install prereqs
|
||||
hash_to_prereqs = {}
|
||||
hash_to_spec = {}
|
||||
|
||||
if args.make_target_prefix is None:
|
||||
target_prefix = os.path.join(env.env_subdir_path, 'makedeps')
|
||||
else:
|
||||
target_prefix = args.make_target_prefix
|
||||
|
||||
def get_target(name):
|
||||
# The `all`, `fetch` and `clean` targets are phony. It doesn't make sense to
|
||||
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
|
||||
# sense to have a prefix like `env/all`, `env/fetch`, `env/clean` when they are
|
||||
# supposed to be included
|
||||
if name in ('all', 'fetch-all', 'clean') and os.path.isabs(target_prefix):
|
||||
return name
|
||||
else:
|
||||
return os.path.join(target_prefix, name)
|
||||
|
||||
def get_install_target(name):
|
||||
return os.path.join(target_prefix, '.install', name)
|
||||
|
||||
def get_fetch_target(name):
|
||||
return os.path.join(target_prefix, '.fetch', name)
|
||||
|
||||
for _, spec in env.concretized_specs():
|
||||
for s in spec.traverse(root=True):
|
||||
hash_to_spec[s.dag_hash()] = s
|
||||
hash_to_prereqs[s.dag_hash()] = [
|
||||
get_install_target(dep.dag_hash()) for dep in s.dependencies()]
|
||||
|
||||
root_dags = [s.dag_hash() for _, s in env.concretized_specs()]
|
||||
|
||||
# Root specs without deps are the prereqs for the environment target
|
||||
root_install_targets = [get_install_target(h) for h in root_dags]
|
||||
|
||||
# All package install targets, not just roots.
|
||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
# Fetch targets for all packages in the environment, not just roots.
|
||||
all_fetch_targets = [get_fetch_target(h) for h in hash_to_spec.keys()]
|
||||
|
||||
buf = six.StringIO()
|
||||
|
||||
buf.write("""SPACK ?= spack
|
||||
|
||||
.PHONY: {} {} {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}: {}
|
||||
\t@touch $@
|
||||
|
||||
{}:
|
||||
\t@mkdir -p {} {}
|
||||
|
||||
{}: | {}
|
||||
\t$(info Fetching $(SPEC))
|
||||
\t$(SPACK) -e '{}' fetch $(SPACK_FETCH_FLAGS) /$(notdir $@) && touch $@
|
||||
|
||||
{}: {}
|
||||
\t$(info Installing $(SPEC))
|
||||
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
|
||||
--no-add /$(notdir $@) && touch $@
|
||||
|
||||
""".format(get_target('all'), get_target('fetch-all'), get_target('clean'),
|
||||
get_target('all'), get_target('env'),
|
||||
get_target('fetch-all'), get_target('fetch'),
|
||||
get_target('env'), ' '.join(root_install_targets),
|
||||
get_target('fetch'), ' '.join(all_fetch_targets),
|
||||
get_target('dirs'), get_target('.fetch'), get_target('.install'),
|
||||
get_target('.fetch/%'), get_target('dirs'),
|
||||
env.path,
|
||||
get_target('.install/%'), get_target('.fetch/%'),
|
||||
'+' if args.jobserver else '', env.path))
|
||||
|
||||
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
|
||||
# The prefix can be an empty string, in that case we don't add the `/`.
|
||||
# The name is currently the dag hash of the spec. In principle it
|
||||
# could be the package name in case of `concretization: together` so
|
||||
# it can be more easily referred to, but for now we don't special case
|
||||
# this.
|
||||
fmt = '{name}{@version}{%compiler}{variants}{arch=architecture}'
|
||||
|
||||
# Set SPEC for each hash
|
||||
buf.write('# Set the human-readable spec for each target\n')
|
||||
for dag_hash in hash_to_prereqs.keys():
|
||||
formatted_spec = hash_to_spec[dag_hash].format(fmt)
|
||||
buf.write("{}: SPEC = {}\n".format(get_target('%/' + dag_hash), formatted_spec))
|
||||
buf.write('\n')
|
||||
|
||||
# Set install dependencies
|
||||
buf.write('# Install dependencies\n')
|
||||
for parent, children in hash_to_prereqs.items():
|
||||
if not children:
|
||||
continue
|
||||
buf.write('{}: {}\n'.format(get_install_target(parent), ' '.join(children)))
|
||||
buf.write('\n')
|
||||
|
||||
# Clean target: remove target files but not their folders, cause
|
||||
# --make-target-prefix can be any existing directory we do not control,
|
||||
# including empty string (which means deleting the containing folder
|
||||
# would delete the folder with the Makefile)
|
||||
buf.write("{}:\n\trm -f -- {} {} {} {}\n".format(
|
||||
get_target('clean'),
|
||||
get_target('env'),
|
||||
get_target('fetch'),
|
||||
' '.join(all_fetch_targets),
|
||||
' '.join(all_install_targets)))
|
||||
|
||||
makefile = buf.getvalue()
|
||||
|
||||
# Finally write to stdout/file.
|
||||
if args.output:
|
||||
with open(args.output, 'w') as f:
|
||||
f.write(makefile)
|
||||
else:
|
||||
sys.stdout.write(makefile)
|
||||
|
||||
|
||||
#: Dictionary mapping subcommand names and aliases to functions
|
||||
subcommand_functions = {}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -13,6 +14,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.util.environment
|
||||
@@ -35,6 +37,9 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
'--not-buildable', action='store_true', default=False,
|
||||
help="packages with detected externals won't be built with Spack")
|
||||
find_parser.add_argument(
|
||||
'-p', '--path', default=None, action='append',
|
||||
help="Alternative search paths for finding externals. May be repeated")
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope('packages'),
|
||||
@@ -55,8 +60,40 @@ def setup_parser(subparser):
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
)
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
'read-cray-manifest', help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
)
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
'--file', default=None,
|
||||
help="specify a location other than the default")
|
||||
read_cray_manifest.add_argument(
|
||||
'--directory', default=None,
|
||||
help="specify a directory storing a group of manifest files")
|
||||
read_cray_manifest.add_argument(
|
||||
'--dry-run', action='store_true', default=False,
|
||||
help="don't modify DB with files that are read")
|
||||
read_cray_manifest.add_argument(
|
||||
'--fail-on-error', action='store_true',
|
||||
help=("if a manifest file cannot be parsed, fail and report the "
|
||||
"full stack trace")
|
||||
)
|
||||
|
||||
|
||||
def external_find(args):
|
||||
if args.all or not (args.tags or args.packages):
|
||||
# If the user calls 'spack external find' with no arguments, and
|
||||
# this system has a description of installed packages, then we should
|
||||
# consume it automatically.
|
||||
try:
|
||||
_collect_and_consume_cray_manifest_files()
|
||||
except NoManifestFileError:
|
||||
# It's fine to not find any manifest file if we are doing the
|
||||
# search implicitly (i.e. as part of 'spack external find')
|
||||
pass
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ['core-packages', 'build-tools']
|
||||
@@ -90,8 +127,10 @@ def external_find(args):
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
|
||||
detected_packages = spack.detection.by_executable(packages_to_check)
|
||||
detected_packages.update(spack.detection.by_library(packages_to_check))
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(
|
||||
packages_to_check, path_hints=args.path))
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -106,6 +145,56 @@ def external_find(args):
|
||||
tty.msg('No new external packages detected')
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
manifest_directory=args.directory,
|
||||
dry_run=args.dry_run,
|
||||
fail_on_error=args.fail_on_error
|
||||
)
|
||||
|
||||
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False,
|
||||
fail_on_error=False):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
||||
manifest_dirs = []
|
||||
if manifest_directory:
|
||||
manifest_dirs.append(manifest_directory)
|
||||
|
||||
if os.path.isdir(cray_manifest.default_path):
|
||||
tty.debug(
|
||||
"Cray manifest path {0} exists: collecting all files to read."
|
||||
.format(cray_manifest.default_path))
|
||||
manifest_dirs.append(cray_manifest.default_path)
|
||||
else:
|
||||
tty.debug("Default Cray manifest directory {0} does not exist."
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
"--file/--directory not specified, and no manifest found at {0}"
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}"
|
||||
"\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_packages())
|
||||
@@ -117,5 +206,10 @@ def external_list(args):
|
||||
|
||||
|
||||
def external(parser, args):
|
||||
action = {'find': external_find, 'list': external_list}
|
||||
action = {'find': external_find, 'list': external_list,
|
||||
'read-cray-manifest': external_read_cray_manifest}
|
||||
action[args.external_command](args)
|
||||
|
||||
|
||||
class NoManifestFileError(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
@@ -69,14 +69,10 @@ def fetch(parser, args):
|
||||
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse():
|
||||
package = spack.repo.get(s)
|
||||
|
||||
for s in spec.traverse(root=False):
|
||||
# Skip already-installed packages with --missing
|
||||
if args.missing and package.installed:
|
||||
if args.missing and s.installed:
|
||||
continue
|
||||
|
||||
package.do_fetch()
|
||||
|
||||
package = spack.repo.get(spec)
|
||||
package.do_fetch()
|
||||
s.package.do_fetch()
|
||||
spec.package.do_fetch()
|
||||
|
||||
@@ -184,8 +184,9 @@ def print_detectable(pkg):
|
||||
color.cprint('')
|
||||
color.cprint(section_title('Externally Detectable: '))
|
||||
|
||||
# If the package has an 'executables' field, it can detect an installation
|
||||
if hasattr(pkg, 'executables'):
|
||||
# If the package has an 'executables' of 'libraries' field, it
|
||||
# can detect an installation
|
||||
if hasattr(pkg, 'executables') or hasattr(pkg, 'libraries'):
|
||||
find_attributes = []
|
||||
if hasattr(pkg, 'determine_version'):
|
||||
find_attributes.append('version')
|
||||
|
||||
@@ -47,7 +47,6 @@ def update_kwargs_from_args(args, kwargs):
|
||||
'explicit': True, # Always true for install command
|
||||
'stop_at': args.until,
|
||||
'unsigned': args.unsigned,
|
||||
'full_hash_match': args.full_hash_match,
|
||||
})
|
||||
|
||||
kwargs.update({
|
||||
@@ -117,11 +116,6 @@ def setup_parser(subparser):
|
||||
'--no-check-signature', action='store_true',
|
||||
dest='unsigned', default=False,
|
||||
help="do not check signatures of binary packages")
|
||||
subparser.add_argument(
|
||||
'--require-full-hash-match', action='store_true',
|
||||
dest='full_hash_match', default=False, help="""when installing from
|
||||
binary mirrors, do not install binary package unless the full hash of the
|
||||
remote spec matches that of the local spec""")
|
||||
subparser.add_argument(
|
||||
'--show-log-on-error', action='store_true',
|
||||
help="print full build log to stderr if build fails")
|
||||
@@ -159,10 +153,6 @@ def setup_parser(subparser):
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages."""
|
||||
)
|
||||
testing.add_argument(
|
||||
'--run-tests', action='store_true',
|
||||
help='run package tests during installation (same as --test=all)'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--log-format',
|
||||
default=None,
|
||||
@@ -316,11 +306,8 @@ def install(parser, args, **kwargs):
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
if args.run_tests:
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
def get_tests(specs):
|
||||
if args.test == 'all' or args.run_tests:
|
||||
if args.test == 'all':
|
||||
return True
|
||||
elif args.test == 'root':
|
||||
return [spec.name for spec in specs]
|
||||
@@ -477,7 +464,7 @@ def get_tests(specs):
|
||||
})
|
||||
|
||||
# If we are using the monitor, we send configs. and create build
|
||||
# The full_hash is the main package id, the build_hash for others
|
||||
# The dag_hash is the main package id
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
install_specs(args, kwargs, zip(abstract_specs, specs))
|
||||
|
||||
@@ -273,7 +273,7 @@ def refresh(module_type, specs, args):
|
||||
return
|
||||
|
||||
if not args.upstream_modules:
|
||||
specs = list(s for s in specs if not s.package.installed_upstream)
|
||||
specs = list(s for s in specs if not s.installed_upstream)
|
||||
|
||||
if not args.yes_to_all:
|
||||
msg = 'You are about to regenerate {types} module files for:\n'
|
||||
|
||||
@@ -136,13 +136,13 @@ def solve(parser, args):
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
@@ -151,9 +151,9 @@ def solve(parser, args):
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.build_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.build_hash))
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
|
||||
@@ -34,12 +34,16 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status']
|
||||
)
|
||||
subparser.add_argument(
|
||||
format_group = subparser.add_mutually_exclusive_group()
|
||||
format_group.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
const='yaml', help='print concrete spec as YAML')
|
||||
subparser.add_argument(
|
||||
format_group.add_argument(
|
||||
'-j', '--json', action='store_const', dest='format', default=None,
|
||||
const='json', help='print concrete spec as JSON')
|
||||
format_group.add_argument(
|
||||
'--format', action='store', default=None,
|
||||
help='print concrete spec with the specified format string')
|
||||
subparser.add_argument(
|
||||
'-c', '--cover', action='store',
|
||||
default='nodes', choices=['nodes', 'edges', 'paths'],
|
||||
@@ -47,10 +51,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-N', '--namespaces', action='store_true', default=False,
|
||||
help='show fully qualified package names')
|
||||
subparser.add_argument(
|
||||
'--hash-type', default="build_hash",
|
||||
choices=['build_hash', 'full_hash', 'dag_hash'],
|
||||
help='generate spec with a particular hash type.')
|
||||
subparser.add_argument(
|
||||
'-t', '--types', action='store_true', default=False,
|
||||
help='show dependency types')
|
||||
@@ -92,14 +92,13 @@ def spec(parser, args):
|
||||
for (input, output) in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
# The user can specify the hash type to use
|
||||
hash_type = getattr(ht, args.hash_type)
|
||||
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=hash_type))
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.to_json(hash=hash_type))
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
|
||||
@@ -337,6 +337,8 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
|
||||
tty.msg('test specs:')
|
||||
|
||||
failed, skipped, untested = 0, 0, 0
|
||||
for pkg_id in test_specs:
|
||||
if pkg_id in results:
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v%d.%d" % spack.spack_version_info[:2]
|
||||
tutorial_branch = "releases/v0.17"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -62,9 +62,14 @@ def setup_parser(subparser):
|
||||
'-a', '--all', action='store_true', dest='all',
|
||||
help="remove ALL installed packages that match each supplied spec"
|
||||
)
|
||||
subparser.add_argument(
|
||||
'--origin', dest='origin',
|
||||
help="only remove DB records with the specified origin"
|
||||
)
|
||||
|
||||
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
|
||||
origin=None):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
@@ -85,8 +90,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes,
|
||||
installed=install_query)
|
||||
matching = spack.store.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -220,15 +225,25 @@ def do_uninstall(env, specs, force):
|
||||
|
||||
# A package is ready to be uninstalled when nothing else references it,
|
||||
# unless we are requested to force uninstall it.
|
||||
is_ready = lambda x: not spack.store.db.query_by_spec_hash(x)[1].ref_count
|
||||
if force:
|
||||
is_ready = lambda x: True
|
||||
def is_ready(dag_hash):
|
||||
if force:
|
||||
return True
|
||||
|
||||
_, record = spack.store.db.query_by_spec_hash(dag_hash)
|
||||
if not record.ref_count:
|
||||
return True
|
||||
|
||||
# If this spec is only used as a build dependency, we can uninstall
|
||||
return all(
|
||||
dspec.deptypes == ("build",)
|
||||
for dspec in record.spec.edges_from_dependents()
|
||||
)
|
||||
|
||||
while packages:
|
||||
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
|
||||
if not ready:
|
||||
msg = 'unexpected error [cannot proceed uninstalling specs with' \
|
||||
' remaining dependents {0}]'
|
||||
' remaining link or run dependents {0}]'
|
||||
msg = msg.format(', '.join(x.name for x in packages))
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
@@ -240,7 +255,8 @@ def do_uninstall(env, specs, force):
|
||||
def get_uninstall_list(args, specs, env):
|
||||
# Gets the list of installed specs that match the ones give via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force,
|
||||
args.origin)
|
||||
|
||||
# Takes care of '-R'
|
||||
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
|
||||
|
||||
@@ -495,7 +495,8 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
assert supported(compiler_name)
|
||||
if not supported(compiler_name):
|
||||
raise UnknownCompilerError(compiler_name)
|
||||
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
@@ -788,6 +789,13 @@ def __init__(self):
|
||||
"Spack could not find any compilers!")
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super(UnknownCompilerError, self).__init__(
|
||||
"Spack doesn't support the requested compiler: {0}"
|
||||
.format(compiler_name))
|
||||
|
||||
|
||||
class NoCompilerForSpecError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, target):
|
||||
super(NoCompilerForSpecError, self).__init__(
|
||||
|
||||
@@ -78,10 +78,8 @@ def cxx14_flag(self):
|
||||
self, "the C++14 standard", "cxx14_flag", "< 4.8")
|
||||
elif self.real_version < ver('4.9'):
|
||||
return "-std=c++1y"
|
||||
elif self.real_version < ver('6.0'):
|
||||
return "-std=c++14"
|
||||
else:
|
||||
return ""
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
|
||||
193
lib/spack/spack/cray_manifest.py
Normal file
193
lib/spack/spack/cray_manifest.py
Normal file
@@ -0,0 +1,193 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
|
||||
import jsonschema
|
||||
import six
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.hash_types as hash_types
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
#: packages here.
|
||||
default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
|
||||
|
||||
compiler_name_translation = {
|
||||
'nvidia': 'nvhpc',
|
||||
}
|
||||
|
||||
|
||||
def translated_compiler_name(manifest_compiler_name):
|
||||
"""
|
||||
When creating a Compiler object, Spack expects a name matching
|
||||
one of the classes in `spack.compilers`. Names in the Cray manifest
|
||||
may differ; for cases where we know the name refers to a compiler in
|
||||
Spack, this function translates it automatically.
|
||||
|
||||
This function will raise an error if there is no recorded translation
|
||||
and the name doesn't match a known compiler name.
|
||||
"""
|
||||
if manifest_compiler_name in compiler_name_translation:
|
||||
return compiler_name_translation[manifest_compiler_name]
|
||||
elif manifest_compiler_name in spack.compilers.supported_compilers():
|
||||
return manifest_compiler_name
|
||||
else:
|
||||
# Try to fail quickly. This can occur in two cases: (1) the compiler
|
||||
# definition (2) a spec can specify a compiler that doesn't exist; the
|
||||
# first will be caught when creating compiler definition. The second
|
||||
# will result in Specs with associated undefined compilers.
|
||||
raise spack.compilers.UnknownCompilerError(
|
||||
"Manifest parsing - unknown compiler: {0}"
|
||||
.format(manifest_compiler_name))
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
compiler_name = translated_compiler_name(entry['name'])
|
||||
paths = entry['executables']
|
||||
version = entry['version']
|
||||
arch = entry['arch']
|
||||
operating_system = arch['os']
|
||||
target = arch['target']
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
|
||||
return compiler_cls(
|
||||
spec, operating_system, target, paths
|
||||
)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
arch_str = ""
|
||||
if 'arch' in entry:
|
||||
arch_format = "arch={platform}-{os}-{target}"
|
||||
arch_str = arch_format.format(
|
||||
platform=entry['arch']['platform'],
|
||||
os=entry['arch']['platform_os'],
|
||||
target=entry['arch']['target']['name']
|
||||
)
|
||||
|
||||
compiler_str = ""
|
||||
if 'compiler' in entry:
|
||||
compiler_format = "%{name}@{version}"
|
||||
compiler_str = compiler_format.format(
|
||||
name=translated_compiler_name(entry['compiler']['name']),
|
||||
version=entry['compiler']['version']
|
||||
)
|
||||
|
||||
spec_format = "{name}@{version} {compiler} {arch}"
|
||||
spec_str = spec_format.format(
|
||||
name=entry['name'],
|
||||
version=entry['version'],
|
||||
compiler=compiler_str,
|
||||
arch=arch_str
|
||||
)
|
||||
|
||||
package = spack.repo.get(entry['name'])
|
||||
|
||||
if 'parameters' in entry:
|
||||
variant_strs = list()
|
||||
for name, value in entry['parameters'].items():
|
||||
# TODO: also ensure that the variant value is valid?
|
||||
if not (name in package.variants):
|
||||
tty.debug("Omitting variant {0} for entry {1}/{2}"
|
||||
.format(name, entry['name'], entry['hash'][:7]))
|
||||
continue
|
||||
|
||||
# Value could be a list (of strings), boolean, or string
|
||||
if isinstance(value, six.string_types):
|
||||
variant_strs.append('{0}={1}'.format(name, value))
|
||||
else:
|
||||
try:
|
||||
iter(value)
|
||||
variant_strs.append(
|
||||
'{0}={1}'.format(name, ','.join(value)))
|
||||
continue
|
||||
except TypeError:
|
||||
# Not an iterable
|
||||
pass
|
||||
# At this point not a string or collection, check for boolean
|
||||
if value in [True, False]:
|
||||
bool_symbol = '+' if value else '~'
|
||||
variant_strs.append('{0}{1}'.format(bool_symbol, name))
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unexpected value for {0} ({1}): {2}".format(
|
||||
name, str(type(value)), str(value)
|
||||
)
|
||||
)
|
||||
spec_str += ' ' + ' '.join(variant_strs)
|
||||
|
||||
spec, = spack.cmd.parse_specs(spec_str.split())
|
||||
|
||||
for ht in [hash_types.dag_hash, hash_types.build_hash,
|
||||
hash_types.full_hash]:
|
||||
setattr(spec, ht.attr, entry['hash'])
|
||||
|
||||
spec._concrete = True
|
||||
spec._hashes_final = True
|
||||
spec.external_path = entry['prefix']
|
||||
spec.origin = 'external-db'
|
||||
spack.spec.Spec.ensure_valid_variants(spec)
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def entries_to_specs(entries):
|
||||
spec_dict = {}
|
||||
for entry in entries:
|
||||
try:
|
||||
spec = spec_from_entry(entry)
|
||||
spec_dict[spec._hash] = spec
|
||||
except spack.repo.UnknownPackageError:
|
||||
tty.debug("Omitting package {0}: no corresponding repo package"
|
||||
.format(entry['name']))
|
||||
except spack.error.SpackError:
|
||||
raise
|
||||
except Exception:
|
||||
tty.warn("Could not parse entry: " + str(entry))
|
||||
|
||||
for entry in filter(lambda x: 'dependencies' in x, entries):
|
||||
dependencies = entry['dependencies']
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties['hash']
|
||||
deptypes = properties['type']
|
||||
if dep_hash in spec_dict:
|
||||
if entry['hash'] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry['hash']]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
||||
def read(path, apply_updates):
|
||||
with open(path, 'r') as json_file:
|
||||
json_data = json.load(json_file)
|
||||
|
||||
jsonschema.validate(json_data, manifest_schema)
|
||||
|
||||
specs = entries_to_specs(json_data['specs'])
|
||||
tty.debug("{0}: {1} specs read from manifest".format(
|
||||
path,
|
||||
str(len(specs))))
|
||||
compilers = list()
|
||||
if 'compilers' in json_data:
|
||||
compilers.extend(compiler_from_entry(x)
|
||||
for x in json_data['compilers'])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(
|
||||
path,
|
||||
str(len(compilers))))
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
compilers, init_config=False)
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.db.add(spec, directory_layout=None)
|
||||
@@ -91,7 +91,8 @@
|
||||
_pkg_lock_timeout = None
|
||||
|
||||
# Types of dependencies tracked by the database
|
||||
_tracked_deps = ('link', 'run')
|
||||
# We store by DAG hash, so we track the dependencies that the DAG hash includes.
|
||||
_tracked_deps = ht.dag_hash.deptype
|
||||
|
||||
# Default list of fields written for each install record
|
||||
default_install_record_fields = [
|
||||
@@ -187,6 +188,7 @@ def __init__(
|
||||
installation_time=None,
|
||||
deprecated_for=None,
|
||||
in_buildcache=False,
|
||||
origin=None
|
||||
):
|
||||
self.spec = spec
|
||||
self.path = str(path) if path else None
|
||||
@@ -196,6 +198,7 @@ def __init__(
|
||||
self.installation_time = installation_time or _now()
|
||||
self.deprecated_for = deprecated_for
|
||||
self.in_buildcache = in_buildcache
|
||||
self.origin = origin
|
||||
|
||||
def install_type_matches(self, installed):
|
||||
installed = InstallStatuses.canonicalize(installed)
|
||||
@@ -217,6 +220,9 @@ def to_dict(self, include_fields=default_install_record_fields):
|
||||
else:
|
||||
rec_dict.update({field_name: getattr(self, field_name)})
|
||||
|
||||
if self.origin:
|
||||
rec_dict['origin'] = self.origin
|
||||
|
||||
return rec_dict
|
||||
|
||||
@classmethod
|
||||
@@ -428,7 +434,7 @@ def _failed_spec_path(self, spec):
|
||||
.format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir,
|
||||
'{0}-{1}'.format(spec.name, spec.full_hash()))
|
||||
'{0}-{1}'.format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self):
|
||||
"""Force remove install failure tracking files."""
|
||||
@@ -640,8 +646,12 @@ def _write_to_file(self, stream):
|
||||
# TODO: fix this before we support multiple install locations.
|
||||
database = {
|
||||
'database': {
|
||||
# TODO: move this to a top-level _meta section if we ever
|
||||
# TODO: bump the DB version to 7
|
||||
'version': str(_db_version),
|
||||
|
||||
# dictionary of installation records, keyed by DAG hash
|
||||
'installs': installs,
|
||||
'version': str(_db_version)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -681,6 +691,13 @@ def db_for_spec_hash(self, hash_key):
|
||||
return db
|
||||
|
||||
def query_by_spec_hash(self, hash_key, data=None):
|
||||
"""Get a spec for hash, and whether it's installed upstream.
|
||||
|
||||
Return:
|
||||
(tuple): (bool, optional InstallRecord): bool tells us whether
|
||||
the spec is installed upstream. Its InstallRecord is also
|
||||
returned if it's installed at all; otherwise None.
|
||||
"""
|
||||
if data and hash_key in data:
|
||||
return False, data[hash_key]
|
||||
if not data:
|
||||
@@ -1087,6 +1104,7 @@ def _add(
|
||||
"Specs added to DB must be concrete.")
|
||||
|
||||
key = spec.dag_hash()
|
||||
spec_pkg_hash = spec._package_hash
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
if upstream:
|
||||
return
|
||||
@@ -1131,6 +1149,10 @@ def _add(
|
||||
'explicit': explicit,
|
||||
'installation_time': installation_time
|
||||
}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, 'origin'):
|
||||
extra_args['origin'] = spec.origin
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec, path, installed, ref_count=0, **extra_args
|
||||
)
|
||||
@@ -1144,10 +1166,10 @@ def _add(
|
||||
record.ref_count += 1
|
||||
|
||||
# Mark concrete once everything is built, and preserve
|
||||
# the original hash of concrete specs.
|
||||
# the original hashes of concrete specs.
|
||||
new_spec._mark_concrete()
|
||||
new_spec._hash = key
|
||||
new_spec._full_hash = spec._full_hash
|
||||
new_spec._package_hash = spec_pkg_hash
|
||||
|
||||
else:
|
||||
# It is already in the database
|
||||
@@ -1462,6 +1484,7 @@ def _query(
|
||||
end_date=None,
|
||||
hashes=None,
|
||||
in_buildcache=any,
|
||||
origin=None
|
||||
):
|
||||
"""Run a query on the database."""
|
||||
|
||||
@@ -1490,6 +1513,9 @@ def _query(
|
||||
if hashes is not None and rec.spec.dag_hash() not in hashes:
|
||||
continue
|
||||
|
||||
if origin and not (origin == rec.origin):
|
||||
continue
|
||||
|
||||
if not rec.install_type_matches(installed):
|
||||
continue
|
||||
|
||||
@@ -1583,11 +1609,12 @@ def unused_specs(self):
|
||||
needed, visited = set(), set()
|
||||
with self.read_transaction():
|
||||
for key, rec in self._data.items():
|
||||
if rec.explicit:
|
||||
# recycle `visited` across calls to avoid
|
||||
# redundantly traversing
|
||||
for spec in rec.spec.traverse(visited=visited):
|
||||
needed.add(spec.dag_hash())
|
||||
if not rec.explicit:
|
||||
continue
|
||||
|
||||
# recycle `visited` across calls to avoid redundantly traversing
|
||||
for spec in rec.spec.traverse(visited=visited, deptype=("link", "run")):
|
||||
needed.add(spec.dag_hash())
|
||||
|
||||
unused = [rec.spec for key, rec in self._data.items()
|
||||
if key not in needed and rec.installed]
|
||||
|
||||
@@ -74,7 +74,8 @@ def executables_in_path(path_hints=None):
|
||||
|
||||
|
||||
def libraries_in_ld_library_path(path_hints=None):
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH.
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
the library paths and the values are the names of the libraries
|
||||
@@ -85,9 +86,15 @@ def libraries_in_ld_library_path(path_hints=None):
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH environment variable.
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables.
|
||||
"""
|
||||
path_hints = path_hints or spack.util.environment.get_path('LD_LIBRARY_PATH')
|
||||
path_hints = path_hints or \
|
||||
spack.util.environment.get_path('LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('LD_LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('DYLD_LIBRARY_PATH') + \
|
||||
spack.util.environment.get_path('DYLD_FALLBACK_LIBRARY_PATH')
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
|
||||
path_to_lib = {}
|
||||
|
||||
@@ -48,13 +48,13 @@ class OpenMpi(Package):
|
||||
from spack.resource import Resource
|
||||
from spack.version import Version, VersionChecksumError
|
||||
|
||||
__all__ = ['DirectiveError', 'DirectiveMeta']
|
||||
__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
|
||||
'extends', 'provides', 'patch', 'variant', 'resource']
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = ['patches', 'dev_path']
|
||||
|
||||
#: Names of possible directives. This list is populated elsewhere in the file and then
|
||||
#: added to `__all__` at the bottom.
|
||||
#: Names of possible directives. This list is populated elsewhere in the file.
|
||||
directive_names = []
|
||||
|
||||
_patch_order_index = 0
|
||||
@@ -731,7 +731,3 @@ class DependencyPatchError(DirectiveError):
|
||||
|
||||
class UnsupportedPackageDirective(DirectiveError):
|
||||
"""Raised when an invalid or unsupported package directive is specified."""
|
||||
|
||||
|
||||
#: add all directive names to __all__
|
||||
__all__.extend(directive_names)
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -24,6 +25,7 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
is_windows = sys.platform == 'win32'
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
@@ -108,13 +110,9 @@ def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
_check_concrete(spec)
|
||||
with open(path, 'w') as f:
|
||||
# The hash the the projection is the DAG hash but we write out the
|
||||
# full provenance by full hash so it's availabe if we want it later
|
||||
# extension = os.path.splitext(path)[-1].lower()
|
||||
# if 'json' in extension:
|
||||
spec.to_json(f, hash=ht.full_hash)
|
||||
# elif 'yaml' in extension:
|
||||
# spec.to_yaml(f, hash=ht.full_hash)
|
||||
# The hash of the projection is the DAG hash which contains
|
||||
# the full provenance, so it's availabe if we want it later
|
||||
spec.to_json(f, hash=ht.dag_hash)
|
||||
|
||||
def write_host_environment(self, spec):
|
||||
"""The host environment is a json file with os, kernel, and spack
|
||||
@@ -240,10 +238,10 @@ def create_install_directory(self, spec):
|
||||
|
||||
def ensure_installed(self, spec):
|
||||
"""
|
||||
Throws DirectoryLayoutError if:
|
||||
Throws InconsistentInstallDirectoryError if:
|
||||
1. spec prefix does not exist
|
||||
2. spec prefix does not contain a spec file
|
||||
3. the spec file does not correspond to the spec
|
||||
2. spec prefix does not contain a spec file, or
|
||||
3. We read a spec with the wrong DAG hash out of an existing install directory.
|
||||
"""
|
||||
_check_concrete(spec)
|
||||
path = self.path_for_spec(spec)
|
||||
@@ -259,25 +257,7 @@ def ensure_installed(self, spec):
|
||||
" " + path)
|
||||
|
||||
installed_spec = self.read_spec(spec_file_path)
|
||||
if installed_spec == spec:
|
||||
return
|
||||
|
||||
# DAG hashes currently do not include build dependencies.
|
||||
#
|
||||
# TODO: remove this when we do better concretization and don't
|
||||
# ignore build-only deps in hashes.
|
||||
elif (installed_spec.copy(deps=('link', 'run')) ==
|
||||
spec.copy(deps=('link', 'run'))):
|
||||
# The directory layout prefix is based on the dag hash, so among
|
||||
# specs with differing full-hash but matching dag-hash, only one
|
||||
# may be installed. This means for example that for two instances
|
||||
# that differ only in CMake version used to build, only one will
|
||||
# be installed.
|
||||
return
|
||||
|
||||
if spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise SpecHashCollisionError(spec, installed_spec)
|
||||
else:
|
||||
if installed_spec.dag_hash() != spec.dag_hash():
|
||||
raise InconsistentInstallDirectoryError(
|
||||
'Spec file in %s does not match hash!' % spec_file_path)
|
||||
|
||||
@@ -349,6 +329,14 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
path = self.path_for_spec(spec)
|
||||
assert(path.startswith(self.root))
|
||||
|
||||
# Windows readonly files cannot be removed by Python
|
||||
# directly, change permissions before attempting to remove
|
||||
if is_windows:
|
||||
kwargs = {'ignore_errors': False,
|
||||
'onerror': fs.readonly_file_handler(ignore_errors=False)}
|
||||
else:
|
||||
kwargs = {} # the default value for ignore_errors is false
|
||||
|
||||
if deprecated:
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
@@ -357,10 +345,9 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
os.remove(metapath)
|
||||
except OSError as e:
|
||||
raise six.raise_from(RemoveFailedError(spec, path, e), e)
|
||||
|
||||
elif os.path.exists(path):
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
shutil.rmtree(path, **kwargs)
|
||||
except OSError as e:
|
||||
raise six.raise_from(RemoveFailedError(spec, path, e), e)
|
||||
|
||||
@@ -458,8 +445,8 @@ def add_extension(self, spec, ext_spec):
|
||||
def check_extension_conflict(self, spec, ext_spec):
|
||||
exts = self._extension_map(spec)
|
||||
if ext_spec.name in exts:
|
||||
installed_spec = exts[ext_spec.name].copy(deps=('link', 'run'))
|
||||
if ext_spec.copy(deps=('link', 'run')) == installed_spec:
|
||||
installed_spec = exts[ext_spec.name]
|
||||
if ext_spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise ExtensionAlreadyInstalledError(spec, ext_spec)
|
||||
else:
|
||||
raise ExtensionConflictError(spec, ext_spec, installed_spec)
|
||||
@@ -579,15 +566,6 @@ def __init__(self, message, long_msg=None):
|
||||
super(DirectoryLayoutError, self).__init__(message, long_msg)
|
||||
|
||||
|
||||
class SpecHashCollisionError(DirectoryLayoutError):
|
||||
"""Raised when there is a hash collision in an install layout."""
|
||||
|
||||
def __init__(self, installed_spec, new_spec):
|
||||
super(SpecHashCollisionError, self).__init__(
|
||||
'Specs %s and %s have the same SHA-1 prefix!'
|
||||
% (installed_spec, new_spec))
|
||||
|
||||
|
||||
class RemoveFailedError(DirectoryLayoutError):
|
||||
"""Raised when a DirectoryLayout cannot remove an install prefix."""
|
||||
|
||||
|
||||
@@ -1,7 +1,334 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This package implements Spack environments.
|
||||
|
||||
.. _lockfile-format:
|
||||
|
||||
`spack.lock` format
|
||||
===================
|
||||
|
||||
Spack environments have existed since Spack ``v0.12.0``, and there have been 4 different
|
||||
``spack.lock`` formats since then. The formats are documented here.
|
||||
|
||||
The high-level format of a Spack lockfile hasn't changed much between versions, but the
|
||||
contents have. Lockfiles are JSON-formatted and their top-level sections are:
|
||||
|
||||
1. ``_meta`` (object): this contains deatails about the file format, including:
|
||||
* ``file-type``: always ``"spack-lockfile"``
|
||||
* ``lockfile-version``: an integer representing the lockfile format version
|
||||
* ``specfile-version``: an integer representing the spec format version (since
|
||||
``v0.17``)
|
||||
|
||||
2. ``roots`` (list): an ordered list of records representing the roots of the Spack
|
||||
environment. Each has two fields:
|
||||
* ``hash``: a Spack spec hash uniquely identifying the concrete root spec
|
||||
* ``spec``: a string representation of the abstract spec that was concretized
|
||||
|
||||
3. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
New versions of Spack can (so far) read all old lockfile formats -- they are
|
||||
backward-compatible. Old versions cannot read new lockfile formats, and you'll need to
|
||||
upgrade Spack to use them.
|
||||
|
||||
.. list-table:: Lockfile version compatibility across Spack versions
|
||||
:header-rows: 1
|
||||
|
||||
* - Spack version
|
||||
- ``v1``
|
||||
- ``v2``
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
|
||||
Version 1
|
||||
---------
|
||||
|
||||
When lockfiles were first created, there was only one hash in Spack: the DAG hash. This
|
||||
DAG hash (we'll call it the old DAG hash) did *not* include build dependencies -- it
|
||||
only included transitive link and run dependencies.
|
||||
|
||||
The spec format at this time was keyed by name. Each spec started with a key for its
|
||||
name, whose value was a dictionary of other spec attributes. The lockfile put these
|
||||
name-keyed specs into dictionaries keyed by their DAG hash, and the spec records did not
|
||||
actually have a "hash" field in the lockfile -- you have to associate the hash from the
|
||||
key with the spec record after the fact.
|
||||
|
||||
Dependencies in original lockfiles were keyed by ``"hash"``, i.e. the old DAG hash.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 1
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<old_dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<old_dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<old_dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_1": {
|
||||
"hash": "<old_dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_2": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 1>"
|
||||
},
|
||||
"<old_dag_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_3": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_4": {
|
||||
"hash": "<old_dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
},
|
||||
"hash": "<old_dag_hash 2>"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 2
|
||||
---------
|
||||
|
||||
Version 2 changes one thing: specs in the lockfile are now keyed by ``build_hash``
|
||||
instead of the old ``dag_hash``. Specs have a ``hash`` attribute with their real DAG
|
||||
hash, so you can't go by the dictionary key anymore to identify a spec -- you have to
|
||||
read it in and look at ``"hash"``. Dependencies are still keyed by old DAG hash.
|
||||
|
||||
Even though we key lockfiles by ``build_hash``, specs in Spack were still deployed with
|
||||
the old, coarser DAG hash. This means that in v2 and v3 lockfiles (which are keyed by
|
||||
build hash), there may be multiple versions of the same spec with different build
|
||||
dependencies, which means they will have different build hashes but the same DAG hash.
|
||||
Spack would only have been able to actually install one of these.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<build_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<build_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<build_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_1": {
|
||||
"hash": "<old_dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_2": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 1>",
|
||||
},
|
||||
"<build_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": {
|
||||
"depname_3": {
|
||||
"hash": "<old_dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
"depname_4": {
|
||||
"hash": "<old_dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
},
|
||||
"hash": "<old_dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 3
|
||||
---------
|
||||
|
||||
Version 3 doesn't change the top-level lockfile format, but this was when we changed the
|
||||
specfile format. Specs in ``concrete_specs`` are now keyed by the build hash, with no
|
||||
inner dictionary keyed by their package name. The package name is in a ``name`` field
|
||||
inside each spec dictionary. The ``dependencies`` field in the specs is a list instead
|
||||
of a dictionary, and each element of the list is a record with the name, dependency
|
||||
types, and hash of the dependency. Instead of a key called ``hash``, dependencies are
|
||||
keyed by ``build_hash``. Each spec still has a ``hash`` attribute.
|
||||
|
||||
Version 3 adds the ``specfile_version`` field to ``_meta`` and uses the new JSON spec
|
||||
format.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 3,
|
||||
"specfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<build_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<build_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
},
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<build_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"build_hash": "<build_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"build_hash": "<build_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
],
|
||||
"hash": "<old_dag_hash 1>",
|
||||
},
|
||||
"<build_hash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"build_hash": "<build_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"build_hash": "<build_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
],
|
||||
"hash": "<old_dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 4
|
||||
---------
|
||||
|
||||
Version 4 removes build hashes and is keyed by the new DAG hash (``hash``). The ``hash``
|
||||
now includes build dependencies and a canonical hash of the ``package.py`` file.
|
||||
Dependencies are keyed by ``hash`` (DAG hash) as well. There are no more ``build_hash``
|
||||
fields in the specs, and there are no more issues with lockfiles being able to store
|
||||
multiple specs with the same DAG hash (because the DAG hash is now finer-grained).
|
||||
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 3,
|
||||
"specfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
Environment,
|
||||
SpackEnvironmentError,
|
||||
|
||||
@@ -94,7 +94,7 @@
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 3
|
||||
lockfile_format_version = 4
|
||||
|
||||
# Magic names
|
||||
# The name of the standalone spec list in the manifest yaml
|
||||
@@ -302,7 +302,7 @@ def _is_dev_spec_and_has_changed(spec):
|
||||
return False
|
||||
|
||||
# Now we can check whether the code changed since the last installation
|
||||
if not spec.package.installed:
|
||||
if not spec.installed:
|
||||
# Not installed -> nothing to compare against
|
||||
return False
|
||||
|
||||
@@ -315,7 +315,7 @@ def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
"""Check whether the current spec needs to be overwritten because either it has
|
||||
changed itself or one of its dependencies have changed"""
|
||||
# if it's not installed, we don't need to overwrite it
|
||||
if not spec.package.installed:
|
||||
if not spec.installed:
|
||||
return False
|
||||
|
||||
# If the spec itself has changed this is a trivial decision
|
||||
@@ -330,7 +330,7 @@ def _spec_needs_overwrite(spec, changed_dev_specs):
|
||||
# If any dep needs overwrite, or any dep is missing and is a dev build then
|
||||
# overwrite this package
|
||||
if any(
|
||||
((not dep.package.installed) and dep.satisfies('dev_path=*')) or
|
||||
((not dep.installed) and dep.satisfies('dev_path=*')) or
|
||||
_spec_needs_overwrite(dep, changed_dev_specs)
|
||||
for dep in spec.traverse(root=False)
|
||||
):
|
||||
@@ -439,7 +439,7 @@ def _next_root(self, specs):
|
||||
def content_hash(self, specs):
|
||||
d = syaml.syaml_dict([
|
||||
('descriptor', self.to_dict()),
|
||||
('specs', [(spec.full_hash(), spec.prefix) for spec in sorted(specs)])
|
||||
('specs', [(spec.dag_hash(), spec.prefix) for spec in sorted(specs)])
|
||||
])
|
||||
contents = sjson.dump(d)
|
||||
return spack.util.hash.b32_hash(contents)
|
||||
@@ -518,7 +518,7 @@ def specs_for_view(self, concretized_root_specs):
|
||||
|
||||
# Filter selected, installed specs
|
||||
with spack.store.db.read_transaction():
|
||||
specs = [s for s in specs if s in self and s.package.installed]
|
||||
specs = [s for s in specs if s in self and s.installed]
|
||||
|
||||
return specs
|
||||
|
||||
@@ -1010,14 +1010,9 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
|
||||
|
||||
if not matches:
|
||||
# concrete specs match against concrete specs in the env
|
||||
# by *dag hash*, not build hash.
|
||||
dag_hashes_in_order = [
|
||||
self.specs_by_hash[build_hash].dag_hash()
|
||||
for build_hash in self.concretized_order
|
||||
]
|
||||
|
||||
# by dag hash.
|
||||
specs_hashes = zip(
|
||||
self.concretized_user_specs, dag_hashes_in_order
|
||||
self.concretized_user_specs, self.concretized_order
|
||||
)
|
||||
|
||||
matches = [
|
||||
@@ -1274,7 +1269,7 @@ def _concretize_separately(self, tests=False):
|
||||
by_hash = {}
|
||||
for abstract, concrete in zip(root_specs, concretized_root_specs):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.build_hash()] = concrete
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
@@ -1331,7 +1326,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
spec = next(
|
||||
s for s in self.user_specs if s.satisfies(user_spec)
|
||||
)
|
||||
concrete = self.specs_by_hash.get(spec.build_hash())
|
||||
concrete = self.specs_by_hash.get(spec.dag_hash())
|
||||
if not concrete:
|
||||
concrete = spec.concretized(tests=tests)
|
||||
self._add_concrete_spec(spec, concrete)
|
||||
@@ -1380,9 +1375,10 @@ def check_views(self):
|
||||
# default view if they are installed.
|
||||
for view_name, view in self.views.items():
|
||||
for _, spec in self.concretized_specs():
|
||||
if spec in view and spec.package.installed:
|
||||
tty.debug(
|
||||
'Spec %s in view %s' % (spec.name, view_name))
|
||||
if spec in view and spec.package and spec.installed:
|
||||
msg = '{0} in view "{1}"'
|
||||
tty.debug(msg.format(spec.name, view_name))
|
||||
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
@@ -1398,7 +1394,8 @@ def _env_modifications_for_default_view(self, reverse=False):
|
||||
|
||||
errors = []
|
||||
for _, root_spec in self.concretized_specs():
|
||||
if root_spec in self.default_view and root_spec.package.installed:
|
||||
if (root_spec in self.default_view and
|
||||
root_spec.installed and root_spec.package):
|
||||
for spec in root_spec.traverse(deptype='run', root=True):
|
||||
if spec.name in visited:
|
||||
# It is expected that only one instance of the package
|
||||
@@ -1497,7 +1494,7 @@ def _add_concrete_spec(self, spec, concrete, new=True):
|
||||
# update internal lists of specs
|
||||
self.concretized_user_specs.append(spec)
|
||||
|
||||
h = concrete.build_hash()
|
||||
h = concrete.dag_hash()
|
||||
self.concretized_order.append(h)
|
||||
self.specs_by_hash[h] = concrete
|
||||
|
||||
@@ -1537,7 +1534,7 @@ def uninstalled_specs(self):
|
||||
with spack.store.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.package.installed or (
|
||||
if not spec.installed or (
|
||||
spec.satisfies('dev_path=*') or
|
||||
spec.satisfies('^dev_path=*')
|
||||
):
|
||||
@@ -1572,7 +1569,7 @@ def install_specs(self, specs=None, **install_args):
|
||||
|
||||
# ensure specs already installed are marked explicit
|
||||
all_specs = specs or [cs for _, cs in self.concretized_specs()]
|
||||
specs_installed = [s for s in all_specs if s.package.installed]
|
||||
specs_installed = [s for s in all_specs if s.installed]
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_installed:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
@@ -1599,7 +1596,7 @@ def install_specs(self, specs=None, **install_args):
|
||||
finally:
|
||||
# Ensure links are set appropriately
|
||||
for spec in specs_to_install:
|
||||
if spec.package.installed:
|
||||
if spec.installed:
|
||||
self.new_installs.append(spec)
|
||||
try:
|
||||
self._install_log_links(spec)
|
||||
@@ -1619,9 +1616,7 @@ def all_specs(self):
|
||||
return sorted(all_specs)
|
||||
|
||||
def all_hashes(self):
|
||||
"""Return hashes of all specs.
|
||||
|
||||
Note these hashes exclude build dependencies."""
|
||||
"""Return hashes of all specs."""
|
||||
return list(set(s.dag_hash() for s in self.all_specs()))
|
||||
|
||||
def roots(self):
|
||||
@@ -1649,7 +1644,7 @@ def added_specs(self):
|
||||
concrete = concretized.get(spec)
|
||||
if not concrete:
|
||||
yield spec
|
||||
elif not concrete.package.installed:
|
||||
elif not concrete.installed:
|
||||
yield concrete
|
||||
|
||||
def concretized_specs(self):
|
||||
@@ -1657,6 +1652,15 @@ def concretized_specs(self):
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def get_by_hash(self, dag_hash):
|
||||
matches = {}
|
||||
for _, root in self.concretized_specs():
|
||||
for spec in root.traverse(root=True):
|
||||
dep_hash = spec.dag_hash()
|
||||
if dep_hash.startswith(dag_hash):
|
||||
matches[dep_hash] = spec
|
||||
return list(matches.values())
|
||||
|
||||
def matching_spec(self, spec):
|
||||
"""
|
||||
Given a spec (likely not concretized), find a matching concretized
|
||||
@@ -1684,13 +1688,7 @@ def matching_spec(self, spec):
|
||||
for user_spec, concretized_user_spec in self.concretized_specs():
|
||||
# Deal with concrete specs differently
|
||||
if spec.concrete:
|
||||
# Matching a concrete spec is more restrictive
|
||||
# than just matching the dag hash
|
||||
is_match = (
|
||||
spec in concretized_user_spec and
|
||||
concretized_user_spec[spec.name].build_hash() == spec.build_hash()
|
||||
)
|
||||
if is_match:
|
||||
if spec in concretized_user_spec:
|
||||
matches[spec] = spec
|
||||
continue
|
||||
|
||||
@@ -1770,12 +1768,12 @@ def _to_lockfile_dict(self):
|
||||
concrete_specs = {}
|
||||
for spec in self.specs_by_hash.values():
|
||||
for s in spec.traverse():
|
||||
build_hash = s.build_hash()
|
||||
if build_hash not in concrete_specs:
|
||||
spec_dict = s.to_node_dict(hash=ht.build_hash)
|
||||
dag_hash = s.dag_hash()
|
||||
if dag_hash not in concrete_specs:
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[build_hash] = spec_dict
|
||||
concrete_specs[dag_hash] = spec_dict
|
||||
|
||||
hash_spec_list = zip(
|
||||
self.concretized_order, self.concretized_user_specs)
|
||||
@@ -1809,47 +1807,56 @@ def _read_lockfile(self, file_or_json):
|
||||
|
||||
def _read_lockfile_dict(self, d):
|
||||
"""Read a lockfile dictionary into this environment."""
|
||||
self.specs_by_hash = {}
|
||||
|
||||
roots = d['roots']
|
||||
self.concretized_user_specs = [Spec(r['spec']) for r in roots]
|
||||
self.concretized_order = [r['hash'] for r in roots]
|
||||
|
||||
json_specs_by_hash = d['concrete_specs']
|
||||
root_hashes = set(self.concretized_order)
|
||||
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
if d['_meta']['lockfile-version'] > 1:
|
||||
# Build hash is stored as a key, but not as part of the node dict
|
||||
# To ensure build hashes are not recomputed, we reattach here
|
||||
setattr(spec, ht.build_hash.attr, build_hash)
|
||||
specs_by_hash[build_hash] = spec
|
||||
|
||||
for build_hash, node_dict in json_specs_by_hash.items():
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
if not spec._hash:
|
||||
# in v1 lockfiles, the hash only occurs as a key
|
||||
spec._hash = lockfile_key
|
||||
specs_by_hash[lockfile_key] = spec
|
||||
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
for _, dep_hash, deptypes, _ in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[build_hash]._add_dependency(
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
||||
# If we are reading an older lockfile format (which uses dag hashes
|
||||
# that exclude build deps), we use this to convert the old
|
||||
# concretized_order to the full hashes (preserving the order)
|
||||
old_hash_to_new = {}
|
||||
self.specs_by_hash = {}
|
||||
for _, spec in specs_by_hash.items():
|
||||
dag_hash = spec.dag_hash()
|
||||
build_hash = spec.build_hash()
|
||||
if dag_hash in root_hashes:
|
||||
old_hash_to_new[dag_hash] = build_hash
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||
# one-to-many.
|
||||
for lockfile_key in self.concretized_order:
|
||||
for s in specs_by_hash[lockfile_key].traverse():
|
||||
if s.dag_hash() not in first_seen:
|
||||
first_seen[s.dag_hash()] = s
|
||||
|
||||
if (dag_hash in root_hashes or build_hash in root_hashes):
|
||||
self.specs_by_hash[build_hash] = spec
|
||||
# Now make sure concretized_order and our internal specs dict
|
||||
# contains the keys used by modern spack (i.e. the dag_hash
|
||||
# that includes build deps and package hash).
|
||||
self.concretized_order = [specs_by_hash[h_key].dag_hash()
|
||||
for h_key in self.concretized_order]
|
||||
|
||||
if old_hash_to_new:
|
||||
# Replace any older hashes in concretized_order with hashes
|
||||
# that include build deps
|
||||
self.concretized_order = [
|
||||
old_hash_to_new.get(h, h) for h in self.concretized_order]
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
def write(self, regenerate=True):
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: whether we should write stack traces or short error messages
|
||||
#: at what level we should write stack traces or short error messages
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = False
|
||||
debug = 0
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
|
||||
@@ -406,12 +406,12 @@ def write(self, spec, color=None, out=None):
|
||||
# Colors associated with each node in the DAG.
|
||||
# Edges are colored by the node they point to.
|
||||
self._name_to_color = {
|
||||
spec.full_hash(): self.colors[i % len(self.colors)]
|
||||
spec.dag_hash(): self.colors[i % len(self.colors)]
|
||||
for i, spec in enumerate(nodes_in_topological_order)
|
||||
}
|
||||
|
||||
# Frontier tracks open edges of the graph as it's written out.
|
||||
self._frontier = [[spec.full_hash()]]
|
||||
self._frontier = [[spec.dag_hash()]]
|
||||
while self._frontier:
|
||||
# Find an unexpanded part of frontier
|
||||
i = find(self._frontier, lambda f: len(f) > 1)
|
||||
@@ -488,14 +488,14 @@ def write(self, spec, color=None, out=None):
|
||||
node = nodes_in_topological_order.pop()
|
||||
|
||||
# Find the named node in the frontier and draw it.
|
||||
i = find(self._frontier, lambda f: node.full_hash() in f)
|
||||
i = find(self._frontier, lambda f: node.dag_hash() in f)
|
||||
self._node_line(i, node)
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
deps = node.dependencies(deptype=self.deptype)
|
||||
if deps:
|
||||
deps = sorted((d.full_hash() for d in deps), reverse=True)
|
||||
deps = sorted((d.dag_hash() for d in deps), reverse=True)
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
|
||||
elif self._frontier:
|
||||
|
||||
@@ -33,15 +33,14 @@ def attr(self):
|
||||
"""Private attribute stored on spec"""
|
||||
return '_' + self.name
|
||||
|
||||
def __call__(self, spec):
|
||||
"""Run this hash on the provided spec."""
|
||||
return spec.spec_hash(self)
|
||||
|
||||
#: Default Hash descriptor, used by Spec.dag_hash() and stored in the DB.
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(
|
||||
deptype=('link', 'run'), package_hash=False, name='hash')
|
||||
|
||||
|
||||
#: Hash descriptor that includes build dependencies.
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='hash')
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
@@ -51,12 +50,19 @@ def attr(self):
|
||||
name='process_hash'
|
||||
)
|
||||
|
||||
#: Full hash used in build pipelines to determine when to rebuild packages.
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=lambda s: s.package.content_hash())
|
||||
|
||||
|
||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||
# spec formats
|
||||
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
|
||||
|
||||
|
||||
#: Package hash used as part of full hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name='package_hash',
|
||||
override=lambda s: s.package.content_hash())
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
|
||||
|
||||
@@ -140,7 +140,7 @@ def _handle_external_and_upstream(pkg, explicit):
|
||||
.format(pkg.prefix, package_id(pkg)))
|
||||
return True
|
||||
|
||||
if pkg.installed_upstream:
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose('{0} is installed in an upstream Spack instance at {1}'
|
||||
.format(package_id(pkg), pkg.spec.prefix))
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
@@ -260,8 +260,7 @@ def _hms(seconds):
|
||||
return ' '.join(parts)
|
||||
|
||||
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
||||
full_hash_match=False):
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
"""
|
||||
Extract the package from binary cache
|
||||
|
||||
@@ -278,7 +277,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
|
||||
``False`` otherwise
|
||||
"""
|
||||
installed_from_cache = _try_install_from_binary_cache(
|
||||
pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
|
||||
pkg, explicit, unsigned=unsigned)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
pre = 'No binary for {0} found'.format(pkg_id)
|
||||
@@ -390,8 +389,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
return True
|
||||
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
||||
full_hash_match=False):
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
"""
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
@@ -403,8 +401,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
|
||||
matches = binary_distribution.get_mirrors_for_spec(
|
||||
pkg.spec, full_hash_match=full_hash_match)
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
|
||||
if not matches:
|
||||
return False
|
||||
@@ -561,6 +558,10 @@ def log(pkg):
|
||||
# Archive the environment modifications for the build.
|
||||
fs.install(pkg.env_mods_path, pkg.install_env_path)
|
||||
|
||||
# Archive the install-phase test log, if present
|
||||
if pkg.test_install_log_path and os.path.exists(pkg.test_install_log_path):
|
||||
fs.install(pkg.test_install_log_path, pkg.install_test_install_log_path)
|
||||
|
||||
if os.path.exists(pkg.configure_args_path):
|
||||
# Archive the args used for the build
|
||||
fs.install(pkg.configure_args_path, pkg.install_configure_args_path)
|
||||
@@ -853,7 +854,7 @@ def _check_deps_status(self, request):
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
|
||||
# Flag external and upstream packages as being installed
|
||||
if dep_pkg.spec.external or dep_pkg.installed_upstream:
|
||||
if dep_pkg.spec.external or dep_pkg.spec.installed_upstream:
|
||||
self._flag_installed(dep_pkg)
|
||||
continue
|
||||
|
||||
@@ -995,7 +996,7 @@ def _ensure_install_ready(self, pkg):
|
||||
raise ExternalPackageError('{0} {1}'.format(pre, 'is external'))
|
||||
|
||||
# Upstream packages cannot be installed locally.
|
||||
if pkg.installed_upstream:
|
||||
if pkg.spec.installed_upstream:
|
||||
raise UpstreamPackageError('{0} {1}'.format(pre, 'is upstream'))
|
||||
|
||||
# The package must have a prefix lock at this stage.
|
||||
@@ -1200,7 +1201,6 @@ def _install_task(self, task):
|
||||
install_args = task.request.install_args
|
||||
cache_only = install_args.get('cache_only')
|
||||
explicit = task.explicit
|
||||
full_hash_match = install_args.get('full_hash_match')
|
||||
tests = install_args.get('tests')
|
||||
unsigned = install_args.get('unsigned')
|
||||
use_cache = install_args.get('use_cache')
|
||||
@@ -1213,8 +1213,7 @@ def _install_task(self, task):
|
||||
|
||||
# Use the binary cache if requested
|
||||
if use_cache and \
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned,
|
||||
full_hash_match):
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
@@ -2018,11 +2017,10 @@ def build_process(pkg, install_args):
|
||||
|
||||
|
||||
class OverwriteInstall(object):
|
||||
def __init__(self, installer, database, task, tmp_root=None):
|
||||
def __init__(self, installer, database, task):
|
||||
self.installer = installer
|
||||
self.database = database
|
||||
self.task = task
|
||||
self.tmp_root = tmp_root
|
||||
|
||||
def install(self):
|
||||
"""
|
||||
@@ -2032,7 +2030,7 @@ def install(self):
|
||||
install error if installation fails.
|
||||
"""
|
||||
try:
|
||||
with fs.replace_directory_transaction(self.task.pkg.prefix, self.tmp_root):
|
||||
with fs.replace_directory_transaction(self.task.pkg.prefix):
|
||||
self.installer._install_task(self.task)
|
||||
except fs.CouldNotRestoreDirectoryBackup as e:
|
||||
self.database.remove(self.task.pkg.spec)
|
||||
@@ -2303,7 +2301,6 @@ def _add_default_args(self):
|
||||
('dirty', False),
|
||||
('fail_fast', False),
|
||||
('fake', False),
|
||||
('full_hash_match', False),
|
||||
('install_deps', True),
|
||||
('install_package', True),
|
||||
('install_source', False),
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.log import log_output, winlog
|
||||
from llnl.util.tty.log import log_output
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
@@ -375,13 +375,6 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -405,9 +398,6 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -490,18 +480,11 @@ def setup_main_options(args):
|
||||
# errors raised by spack.config.
|
||||
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
spack.error.debug = args.debug
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
@@ -605,14 +588,9 @@ def __call__(self, *argv, **kwargs):
|
||||
|
||||
out = StringIO()
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
with winlog(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
else:
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
with log_output(out):
|
||||
self.returncode = _invoke_command(
|
||||
self.command, self.parser, args, unknown)
|
||||
|
||||
except SystemExit as e:
|
||||
self.returncode = e.code
|
||||
|
||||
@@ -184,19 +184,38 @@ def _filter_compiler_wrappers_impl(self):
|
||||
|
||||
x = llnl.util.filesystem.FileFilter(*abs_files)
|
||||
|
||||
replacements = [
|
||||
compiler_vars = [
|
||||
('CC', self.compiler.cc),
|
||||
('CXX', self.compiler.cxx),
|
||||
('F77', self.compiler.f77),
|
||||
('FC', self.compiler.fc)
|
||||
]
|
||||
for env_var, compiler_path in replacements:
|
||||
|
||||
# Some paths to the compiler wrappers might be substrings of the others.
|
||||
# For example:
|
||||
# CC=/path/to/spack/lib/spack/env/cc (realpath to the wrapper)
|
||||
# FC=/path/to/spack/lib/spack/env/cce/ftn
|
||||
# Therefore, we perform the filtering in the reversed sorted order of
|
||||
# the substituted strings. If, however, the strings are identical (e.g.
|
||||
# both CC and FC are set using realpath), the filtering is done
|
||||
# according to the order in compiler_vars. To achieve that, we populate
|
||||
# the following array with tuples of three elements: path to the
|
||||
# wrapper, negated index of the variable in compiler_vars, path to the
|
||||
# real compiler. This way, the reversed sorted order of the resulting
|
||||
# array is the order of replacements that we need.
|
||||
replacements = []
|
||||
|
||||
for idx, (env_var, compiler_path) in enumerate(compiler_vars):
|
||||
if env_var in os.environ:
|
||||
# filter spack wrapper and links to spack wrapper in case
|
||||
# build system runs realpath
|
||||
wrapper = os.environ[env_var]
|
||||
for wrapper_path in (wrapper, os.path.realpath(wrapper)):
|
||||
x.filter(wrapper_path, compiler_path, **filter_kwargs)
|
||||
replacements.append((wrapper_path, -idx, compiler_path))
|
||||
|
||||
for wrapper_path, _, compiler_path in sorted(replacements,
|
||||
reverse=True):
|
||||
x.filter(wrapper_path, compiler_path, **filter_kwargs)
|
||||
|
||||
# Remove this linking flag if present (it turns RPATH into RUNPATH)
|
||||
x.filter('{0}--enable-new-dtags'.format(self.compiler.linker_arg), '',
|
||||
|
||||
@@ -370,7 +370,7 @@ def get_module(
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
upstream = spec.package.installed_upstream
|
||||
upstream = spec.installed_upstream
|
||||
except spack.repo.UnknownPackageError:
|
||||
upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
|
||||
if upstream:
|
||||
|
||||
@@ -132,7 +132,7 @@ def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||
self.tags = tags
|
||||
self.save_local = save_local
|
||||
|
||||
# We keey lookup of build_id by full_hash
|
||||
# We key lookup of build_id by dag_hash
|
||||
self.build_ids = {}
|
||||
self.setup_save()
|
||||
|
||||
@@ -412,6 +412,8 @@ def new_configuration(self, specs):
|
||||
spec.concretize()
|
||||
|
||||
# Remove extra level of nesting
|
||||
# This is the only place in Spack we still use full_hash, as `spack monitor`
|
||||
# requires specs with full_hash-keyed dependencies.
|
||||
as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
|
||||
"spack_version": self.spack_version}
|
||||
|
||||
@@ -437,8 +439,7 @@ def failed_concretization(self, specs):
|
||||
meta = spec.to_dict()['spec']
|
||||
nodes = []
|
||||
for node in meta.get("nodes", []):
|
||||
for hashtype in ["build_hash", "full_hash"]:
|
||||
node[hashtype] = "FAILED_CONCRETIZATION"
|
||||
node["full_hash"] = "FAILED_CONCRETIZATION"
|
||||
nodes.append(node)
|
||||
meta['nodes'] = nodes
|
||||
|
||||
@@ -470,13 +471,13 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
"""
|
||||
Retrieve a build id, either in the local cache, or query the server.
|
||||
"""
|
||||
full_hash = spec.full_hash()
|
||||
if full_hash in self.build_ids:
|
||||
return self.build_ids[full_hash]
|
||||
dag_hash = spec.dag_hash()
|
||||
if dag_hash in self.build_ids:
|
||||
return self.build_ids[dag_hash]
|
||||
|
||||
# Prepare build environment data (including spack version)
|
||||
data = self.build_environment.copy()
|
||||
data['full_hash'] = full_hash
|
||||
data['full_hash'] = dag_hash
|
||||
|
||||
# If the build should be tagged, add it
|
||||
if self.tags:
|
||||
@@ -494,10 +495,10 @@ def get_build_id(self, spec, return_response=False, spec_exists=True):
|
||||
data['spec'] = syaml.load(read_file(spec_file))
|
||||
|
||||
if self.save_local:
|
||||
return self.get_local_build_id(data, full_hash, return_response)
|
||||
return self.get_server_build_id(data, full_hash, return_response)
|
||||
return self.get_local_build_id(data, dag_hash, return_response)
|
||||
return self.get_server_build_id(data, dag_hash, return_response)
|
||||
|
||||
def get_local_build_id(self, data, full_hash, return_response):
|
||||
def get_local_build_id(self, data, dag_hash, return_response):
|
||||
"""
|
||||
Generate a local build id based on hashing the expected data
|
||||
"""
|
||||
@@ -510,15 +511,15 @@ def get_local_build_id(self, data, full_hash, return_response):
|
||||
return response
|
||||
return bid
|
||||
|
||||
def get_server_build_id(self, data, full_hash, return_response=False):
|
||||
def get_server_build_id(self, data, dag_hash, return_response=False):
|
||||
"""
|
||||
Retrieve a build id from the spack monitor server
|
||||
"""
|
||||
response = self.do_request("builds/new/", data=sjson.dump(data))
|
||||
|
||||
# Add the build id to the lookup
|
||||
bid = self.build_ids[full_hash] = response['data']['build']['build_id']
|
||||
self.build_ids[full_hash] = bid
|
||||
bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
|
||||
self.build_ids[dag_hash] = bid
|
||||
|
||||
# If the function is called directly, the user might want output
|
||||
if return_response:
|
||||
|
||||
@@ -26,13 +26,14 @@
|
||||
import time
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Optional # novm
|
||||
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.lang import memoized, nullcontext
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.compilers
|
||||
@@ -76,6 +77,9 @@
|
||||
# Filename for the Spack build/install environment modifications file.
|
||||
_spack_build_envmodsfile = 'spack-build-env-mods.txt'
|
||||
|
||||
# Filename for the Spack install phase-time test log.
|
||||
_spack_install_test_log = 'install-time-test-log.txt'
|
||||
|
||||
# Filename of json with total build and phase times (seconds)
|
||||
_spack_times_log = 'install_times.json'
|
||||
|
||||
@@ -790,15 +794,6 @@ def __init__(self, spec):
|
||||
|
||||
super(PackageBase, self).__init__()
|
||||
|
||||
@property
|
||||
def installed_upstream(self):
|
||||
if not hasattr(self, '_installed_upstream'):
|
||||
upstream, record = spack.store.db.query_by_spec_hash(
|
||||
self.spec.dag_hash())
|
||||
self._installed_upstream = upstream
|
||||
|
||||
return self._installed_upstream
|
||||
|
||||
@classmethod
|
||||
def possible_dependencies(
|
||||
cls, transitive=True, expand_virtuals=True, deptype='all',
|
||||
@@ -1252,6 +1247,16 @@ def configure_args_path(self):
|
||||
"""Return the configure args file path associated with staging."""
|
||||
return os.path.join(self.stage.path, _spack_configure_argsfile)
|
||||
|
||||
@property
|
||||
def test_install_log_path(self):
|
||||
"""Return the install phase-time test log file path, if set."""
|
||||
return getattr(self, 'test_log_file', None)
|
||||
|
||||
@property
|
||||
def install_test_install_log_path(self):
|
||||
"""Return the install location for the install phase-time test log."""
|
||||
return fsys.join_path(self.metadata_dir, _spack_install_test_log)
|
||||
|
||||
@property
|
||||
def times_log_path(self):
|
||||
"""Return the times log json file."""
|
||||
@@ -1267,6 +1272,20 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
return os.path.join(self.metadata_dir, 'test')
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
msg = ('the "PackageBase.installed" property is deprecated and will be '
|
||||
'removed in Spack v0.19, use "Spec.installed" instead')
|
||||
warnings.warn(msg)
|
||||
return self.spec.installed
|
||||
|
||||
@property
|
||||
def installed_upstream(self):
|
||||
msg = ('the "PackageBase.installed_upstream" property is deprecated and will '
|
||||
'be removed in Spack v0.19, use "Spec.installed_upstream" instead')
|
||||
warnings.warn(msg)
|
||||
return self.spec.installed_upstream
|
||||
|
||||
def _make_fetcher(self):
|
||||
# Construct a composite fetcher that always contains at least
|
||||
# one element (the root package). In case there are resources
|
||||
@@ -1380,7 +1399,7 @@ def is_activated(self, view):
|
||||
if not self.is_extension:
|
||||
raise ValueError(
|
||||
"is_activated called on package that is not an extension.")
|
||||
if self.extendee_spec.package.installed_upstream:
|
||||
if self.extendee_spec.installed_upstream:
|
||||
# If this extends an upstream package, it cannot be activated for
|
||||
# it. This bypasses construction of the extension map, which can
|
||||
# can fail when run in the context of a downstream Spack instance
|
||||
@@ -1406,22 +1425,6 @@ def virtuals_provided(self):
|
||||
return [vspec for vspec, constraints in self.provided.items()
|
||||
if any(self.spec.satisfies(c) for c in constraints)]
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
"""Installation status of a package.
|
||||
|
||||
Returns:
|
||||
True if the package has been installed, False otherwise.
|
||||
"""
|
||||
try:
|
||||
# If the spec is in the DB, check the installed
|
||||
# attribute of the record
|
||||
return spack.store.db.get_record(self.spec).installed
|
||||
except KeyError:
|
||||
# If the spec is not in the DB, the method
|
||||
# above raises a Key error
|
||||
return False
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
"""Get the prefix into which this package should be installed."""
|
||||
@@ -1670,39 +1673,62 @@ def all_patches(cls):
|
||||
return patches
|
||||
|
||||
def content_hash(self, content=None):
|
||||
"""Create a hash based on the sources and logic used to build the
|
||||
package. This includes the contents of all applied patches and the
|
||||
contents of applicable functions in the package subclass."""
|
||||
if not self.spec.concrete:
|
||||
err_msg = ("Cannot invoke content_hash on a package"
|
||||
" if the associated spec is not concrete")
|
||||
raise spack.error.SpackError(err_msg)
|
||||
"""Create a hash based on the artifacts and patches used to build this package.
|
||||
|
||||
hash_content = list()
|
||||
try:
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
except fs.ExtrapolationError:
|
||||
source_id = None
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
else:
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
hash_content.extend(':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches)
|
||||
This includes:
|
||||
* source artifacts (tarballs, repositories) used to build;
|
||||
* content hashes (``sha256``'s) of all patches applied by Spack; and
|
||||
* canonicalized contents the ``package.py`` recipe used to build.
|
||||
|
||||
This hash is only included in Spack's DAG hash for concrete specs, but if it
|
||||
happens to be called on a package with an abstract spec, only applicable (i.e.,
|
||||
determinable) portions of the hash will be included.
|
||||
|
||||
"""
|
||||
# list of components to make up the hash
|
||||
hash_content = []
|
||||
|
||||
# source artifacts/repositories
|
||||
# TODO: resources
|
||||
if self.spec.versions.concrete:
|
||||
try:
|
||||
source_id = fs.for_package_version(self, self.version).source_id()
|
||||
except (fs.ExtrapolationError, fs.InvalidArgsError):
|
||||
# ExtrapolationError happens if the package has no fetchers defined.
|
||||
# InvalidArgsError happens when there are version directives with args,
|
||||
# but none of them identifies an actual fetcher.
|
||||
source_id = None
|
||||
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
env = spack.environment.active_environment()
|
||||
from_local_sources = env and env.is_develop(self.spec)
|
||||
if not self.spec.external and not from_local_sources:
|
||||
message = 'Missing a source id for {s.name}@{s.version}'
|
||||
tty.warn(message.format(s=self))
|
||||
hash_content.append(''.encode('utf-8'))
|
||||
else:
|
||||
hash_content.append(source_id.encode('utf-8'))
|
||||
|
||||
# patch sha256's
|
||||
if self.spec.concrete:
|
||||
hash_content.extend(
|
||||
':'.join((p.sha256, str(p.level))).encode('utf-8')
|
||||
for p in self.spec.patches
|
||||
)
|
||||
|
||||
# package.py contents
|
||||
hash_content.append(package_hash(self.spec, source=content).encode('utf-8'))
|
||||
|
||||
# put it all together and encode as base32
|
||||
b32_hash = base64.b32encode(
|
||||
hashlib.sha256(bytes().join(
|
||||
sorted(hash_content))).digest()).lower()
|
||||
hashlib.sha256(
|
||||
bytes().join(sorted(hash_content))
|
||||
).digest()
|
||||
).lower()
|
||||
|
||||
# convert from bytes if running python 3
|
||||
if sys.version_info[0] >= 3:
|
||||
@@ -1926,6 +1952,33 @@ def cache_extra_test_sources(self, srcs):
|
||||
fsys.mkdirp(os.path.dirname(dest_path))
|
||||
fsys.copy(src_path, dest_path)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _setup_test(self, verbose, externals):
|
||||
self.test_failures = []
|
||||
if self.test_suite:
|
||||
self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
|
||||
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
|
||||
pkg_id = self.test_suite.test_pkg_id(self.spec)
|
||||
else:
|
||||
self.test_log_file = fsys.join_path(
|
||||
self.stage.path, _spack_install_test_log)
|
||||
pkg_id = self.spec.format('{name}-{version}-{hash:7}')
|
||||
fsys.touch(self.test_log_file) # Otherwise log_parse complains
|
||||
|
||||
with tty.log.log_output(self.test_log_file, verbose) as logger:
|
||||
with logger.force_echo():
|
||||
tty.msg('Testing package {0}'.format(pkg_id))
|
||||
|
||||
# use debug print levels for log file to record commands
|
||||
old_debug = tty.is_debug()
|
||||
tty.set_debug(True)
|
||||
|
||||
try:
|
||||
yield logger
|
||||
finally:
|
||||
# reset debug level
|
||||
tty.set_debug(old_debug)
|
||||
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
if self.test_requires_compiler:
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
@@ -1937,19 +1990,14 @@ def do_test(self, dirty=False, externals=False):
|
||||
self.spec.compiler)
|
||||
return
|
||||
|
||||
# Clear test failures
|
||||
self.test_failures = []
|
||||
self.test_log_file = self.test_suite.log_file_for_spec(self.spec)
|
||||
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
|
||||
fsys.touch(self.test_log_file) # Otherwise log_parse complains
|
||||
|
||||
kwargs = {
|
||||
'dirty': dirty, 'fake': False, 'context': 'test',
|
||||
'externals': externals
|
||||
}
|
||||
if tty.is_verbose():
|
||||
kwargs['verbose'] = True
|
||||
spack.build_environment.start_build_process(self, test_process, kwargs)
|
||||
spack.build_environment.start_build_process(
|
||||
self, test_process, kwargs)
|
||||
|
||||
def test(self):
|
||||
# Defer tests to virtual and concrete packages
|
||||
@@ -2143,21 +2191,21 @@ def build_log_path(self):
|
||||
to the staging build file until the software is successfully installed,
|
||||
when it points to the file in the installation directory.
|
||||
"""
|
||||
return self.install_log_path if self.installed else self.log_path
|
||||
return self.install_log_path if self.spec.installed else self.log_path
|
||||
|
||||
@classmethod
|
||||
def inject_flags(cls, name, flags):
|
||||
"""
|
||||
flag_handler that injects all flags through the compiler wrapper.
|
||||
"""
|
||||
return (flags, None, None)
|
||||
return flags, None, None
|
||||
|
||||
@classmethod
|
||||
def env_flags(cls, name, flags):
|
||||
"""
|
||||
flag_handler that adds all flags to canonical environment variables.
|
||||
"""
|
||||
return (None, flags, None)
|
||||
return None, flags, None
|
||||
|
||||
@classmethod
|
||||
def build_system_flags(cls, name, flags):
|
||||
@@ -2168,7 +2216,7 @@ def build_system_flags(cls, name, flags):
|
||||
implements it. Currently, AutotoolsPackage and CMakePackage
|
||||
implement it.
|
||||
"""
|
||||
return (None, None, flags)
|
||||
return None, None, flags
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
"""Sets up the build environment for a package.
|
||||
@@ -2323,7 +2371,11 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
|
||||
if not force:
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
spec, 'parents', True)
|
||||
spec,
|
||||
direction='parents',
|
||||
transitive=True,
|
||||
deptype=("link", "run"),
|
||||
)
|
||||
if dependents:
|
||||
raise PackageStillNeededError(spec, dependents)
|
||||
|
||||
@@ -2465,10 +2517,10 @@ def _sanity_check_extension(self):
|
||||
extendee_package = self.extendee_spec.package
|
||||
extendee_package._check_extendable()
|
||||
|
||||
if not extendee_package.installed:
|
||||
if not self.extendee_spec.installed:
|
||||
raise ActivationError(
|
||||
"Can only (de)activate extensions for installed packages.")
|
||||
if not self.installed:
|
||||
if not self.spec.installed:
|
||||
raise ActivationError("Extensions must first be installed.")
|
||||
if self.extendee_spec.name not in self.extendees:
|
||||
raise ActivationError("%s does not extend %s!" %
|
||||
@@ -2694,45 +2746,54 @@ def rpath_args(self):
|
||||
"""
|
||||
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
|
||||
|
||||
def _run_test_callbacks(self, method_names, callback_type='install'):
|
||||
"""Tries to call all of the listed methods, returning immediately
|
||||
if the list is None."""
|
||||
if method_names is None:
|
||||
return
|
||||
|
||||
fail_fast = spack.config.get('config:fail_fast', False)
|
||||
|
||||
with self._setup_test(verbose=False, externals=False) as logger:
|
||||
# Report running each of the methods in the build log
|
||||
print_test_message(
|
||||
logger, 'Running {0}-time tests'.format(callback_type), True)
|
||||
|
||||
for name in method_names:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
|
||||
msg = 'RUN-TESTS: {0}-time tests [{1}]' \
|
||||
.format(callback_type, name),
|
||||
print_test_message(logger, msg, True)
|
||||
|
||||
fn()
|
||||
except AttributeError as e:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]' \
|
||||
.format(name),
|
||||
print_test_message(logger, msg, True)
|
||||
|
||||
self.test_failures.append((e, msg))
|
||||
if fail_fast:
|
||||
break
|
||||
|
||||
# Raise any collected failures here
|
||||
if self.test_failures:
|
||||
raise TestFailure(self.test_failures)
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_build_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``build_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``build_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.build_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.build_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
else:
|
||||
tty.msg('RUN-TESTS: build-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
self._run_test_callbacks(self.build_time_test_callbacks, 'build')
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
def _run_default_install_time_test_callbacks(self):
|
||||
"""Tries to call all the methods that are listed in the attribute
|
||||
``install_time_test_callbacks`` if ``self.run_tests is True``.
|
||||
|
||||
If ``install_time_test_callbacks is None`` returns immediately.
|
||||
"""
|
||||
if self.install_time_test_callbacks is None:
|
||||
return
|
||||
|
||||
for name in self.install_time_test_callbacks:
|
||||
try:
|
||||
fn = getattr(self, name)
|
||||
except AttributeError:
|
||||
msg = 'RUN-TESTS: method not implemented [{0}]'
|
||||
tty.warn(msg.format(name))
|
||||
else:
|
||||
tty.msg('RUN-TESTS: install-time tests [{0}]'.format(name))
|
||||
fn()
|
||||
self._run_test_callbacks(self.install_time_test_callbacks, 'install')
|
||||
|
||||
|
||||
def has_test_method(pkg):
|
||||
@@ -2757,27 +2818,21 @@ def has_test_method(pkg):
|
||||
def print_test_message(logger, msg, verbose):
|
||||
if verbose:
|
||||
with logger.force_echo():
|
||||
print(msg)
|
||||
tty.msg(msg)
|
||||
else:
|
||||
print(msg)
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def test_process(pkg, kwargs):
|
||||
verbose = kwargs.get('verbose', False)
|
||||
externals = kwargs.get('externals', False)
|
||||
with tty.log.log_output(pkg.test_log_file, verbose) as logger:
|
||||
with logger.force_echo():
|
||||
tty.msg('Testing package {0}'
|
||||
.format(pkg.test_suite.test_pkg_id(pkg.spec)))
|
||||
|
||||
with pkg._setup_test(verbose, externals) as logger:
|
||||
if pkg.spec.external and not externals:
|
||||
print_test_message(logger, 'Skipped external package', verbose)
|
||||
print_test_message(
|
||||
logger, 'Skipped tests for external package', verbose)
|
||||
return
|
||||
|
||||
# use debug print levels for log file to record commands
|
||||
old_debug = tty.is_debug()
|
||||
tty.set_debug(True)
|
||||
|
||||
# run test methods from the package and all virtuals it
|
||||
# provides virtuals have to be deduped by name
|
||||
v_names = list(set([vspec.name
|
||||
@@ -2796,8 +2851,7 @@ def test_process(pkg, kwargs):
|
||||
|
||||
ran_actual_test_function = False
|
||||
try:
|
||||
with fsys.working_dir(
|
||||
pkg.test_suite.test_dir_for_spec(pkg.spec)):
|
||||
with fsys.working_dir(pkg.test_suite.test_dir_for_spec(pkg.spec)):
|
||||
for spec in test_specs:
|
||||
pkg.test_suite.current_test_spec = spec
|
||||
# Fail gracefully if a virtual has no package/tests
|
||||
@@ -2839,7 +2893,9 @@ def test_process(pkg, kwargs):
|
||||
|
||||
# Run the tests
|
||||
ran_actual_test_function = True
|
||||
test_fn(pkg)
|
||||
context = logger.force_echo if verbose else nullcontext
|
||||
with context():
|
||||
test_fn(pkg)
|
||||
|
||||
# If fail-fast was on, we error out above
|
||||
# If we collect errors, raise them in batch here
|
||||
@@ -2847,15 +2903,12 @@ def test_process(pkg, kwargs):
|
||||
raise TestFailure(pkg.test_failures)
|
||||
|
||||
finally:
|
||||
# reset debug level
|
||||
tty.set_debug(old_debug)
|
||||
|
||||
# flag the package as having been tested (i.e., ran one or more
|
||||
# non-pass-only methods
|
||||
if ran_actual_test_function:
|
||||
fsys.touch(pkg.tested_file)
|
||||
else:
|
||||
print_test_message(logger, 'No tests to run', verbose)
|
||||
print_test_message(logger, 'No tests to run', verbose)
|
||||
|
||||
|
||||
inject_flags = PackageBase.inject_flags
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
|
||||
@@ -355,9 +355,17 @@ def list_packages(rev):
|
||||
ref = rev.replace('...', '')
|
||||
rev = git('merge-base', ref, 'HEAD', output=str).strip()
|
||||
|
||||
output = git('ls-tree', '--name-only', rev, output=str)
|
||||
return sorted(line for line in output.split('\n')
|
||||
if line and not line.startswith('.'))
|
||||
output = git('ls-tree', '-r', '--name-only', rev, output=str)
|
||||
|
||||
# recursively list the packages directory
|
||||
package_paths = [
|
||||
line.split(os.sep) for line in output.split("\n") if line.endswith("package.py")
|
||||
]
|
||||
|
||||
# take the directory names with one-level-deep package files
|
||||
package_names = sorted(set([line[0] for line in package_paths if len(line) == 2]))
|
||||
|
||||
return package_names
|
||||
|
||||
|
||||
def diff_packages(rev1, rev2):
|
||||
|
||||
@@ -112,8 +112,7 @@ def __enter__(self):
|
||||
# Check which specs are already installed and mark them as skipped
|
||||
# only for install_task
|
||||
if self.do_fn == '_install_task':
|
||||
for dep in filter(lambda x: x.package.installed,
|
||||
input_spec.traverse()):
|
||||
for dep in filter(lambda x: x.installed, input_spec.traverse()):
|
||||
package = {
|
||||
'name': dep.name,
|
||||
'id': dep.dag_hash(),
|
||||
@@ -140,7 +139,7 @@ def wrapper(instance, *args, **kwargs):
|
||||
raise Exception
|
||||
|
||||
# We accounted before for what is already installed
|
||||
installed_already = pkg.installed
|
||||
installed_already = pkg.spec.installed
|
||||
|
||||
package = {
|
||||
'name': pkg.name,
|
||||
|
||||
@@ -38,13 +38,13 @@ def rewire(spliced_spec):
|
||||
nodes in the DAG of that spec."""
|
||||
assert spliced_spec.spliced
|
||||
for spec in spliced_spec.traverse(order='post', root=True):
|
||||
if not spec.build_spec.package.installed:
|
||||
if not spec.build_spec.installed:
|
||||
# TODO: May want to change this at least for the root spec...
|
||||
# spec.build_spec.package.do_install(force=True)
|
||||
raise PackageNotInstalledError(spliced_spec,
|
||||
spec.build_spec,
|
||||
spec)
|
||||
if spec.build_spec is not spec and not spec.package.installed:
|
||||
if spec.build_spec is not spec and not spec.installed:
|
||||
explicit = spec is spliced_spec
|
||||
rewire_node(spec, explicit)
|
||||
|
||||
@@ -95,7 +95,8 @@ def rewire_node(spec, explicit):
|
||||
spec.prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate,
|
||||
prefixes=prefix_to_prefix)
|
||||
# copy package into place (shutil.copytree)
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
||||
ignore=shutil.ignore_patterns('spec.json',
|
||||
'install_manifest.json'))
|
||||
@@ -104,7 +105,10 @@ def rewire_node(spec, explicit):
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix)
|
||||
shutil.rmtree(tempdir)
|
||||
# handle all metadata changes; don't copy over spec.json file in .spack/
|
||||
# Above, we did not copy spec.json: instead, here we write the new
|
||||
# (spliced) spec into spec.json, without this, Database.add would fail on
|
||||
# the next line (because it checks the spec.json in the prefix against the
|
||||
# spec being added to look for mismatches)
|
||||
spack.store.layout.write_spec(spec, spack.store.layout.spec_file_path(spec))
|
||||
# add to database, not sure about explicit
|
||||
spack.store.db.add(spec, spack.store.layout, explicit=explicit)
|
||||
|
||||
@@ -15,6 +15,16 @@
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'reuse': {'type': 'boolean'},
|
||||
'targets': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host_compatible': {'type': 'boolean'},
|
||||
'granularity': {
|
||||
'type': 'string',
|
||||
'enum': ['generic', 'microarchitectures']
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
131
lib/spack/spack/schema/cray_manifest.py
Normal file
131
lib/spack/spack/schema/cray_manifest.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for Cray descriptive manifest: this describes a set of
|
||||
installed packages on the system and also specifies dependency
|
||||
relationships between them (so this provides more information than
|
||||
external entries in packages configuration).
|
||||
|
||||
This does not specify a configuration - it is an input format
|
||||
that is consumed and transformed into Spack DB records.
|
||||
"""
|
||||
|
||||
schema = {
|
||||
"$schema": "http://json-schema.org/schema#",
|
||||
"title": "CPE manifest schema",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"_meta": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"file-type": {"type": "string", "minLength": 1},
|
||||
"cpe-version": {"type": "string", "minLength": 1},
|
||||
"system-type": {"type": "string", "minLength": 1},
|
||||
"schema-version": {"type": "string", "minLength": 1},
|
||||
}
|
||||
},
|
||||
"compilers": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"prefix": {"type": "string", "minLength": 1},
|
||||
"executables": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"cc": {"type": "string", "minLength": 1},
|
||||
"cxx": {"type": "string", "minLength": 1},
|
||||
"fc": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["os", "target"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"os": {"type": "string", "minLength": 1},
|
||||
"target": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"specs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version",
|
||||
"arch",
|
||||
"compiler",
|
||||
"prefix",
|
||||
"hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1},
|
||||
"arch": {
|
||||
"type": "object",
|
||||
"required": ["platform", "platform_os", "target"],
|
||||
"additioanlProperties": False,
|
||||
"properties": {
|
||||
"platform": {"type": "string", "minLength": 1},
|
||||
"platform_os": {"type": "string", "minLength": 1},
|
||||
"target": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["name"],
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"name": {"type": "string", "minLength": 1},
|
||||
"version": {"type": "string", "minLength": 1}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
"\\w[\\w-]*": {
|
||||
"type": "object",
|
||||
"required": ["hash"],
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"type": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string", "minLength": 1}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"prefix": {
|
||||
"type": "string", "minLength": 1},
|
||||
"rpm": {"type": "string", "minLength": 1},
|
||||
"hash": {"type": "string", "minLength": 1},
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.container
|
||||
import spack.schema.gitlab_ci
|
||||
@@ -27,6 +28,7 @@
|
||||
spack.schema.bootstrap.properties,
|
||||
spack.schema.cdash.properties,
|
||||
spack.schema.compilers.properties,
|
||||
spack.schema.concretizer.properties,
|
||||
spack.schema.config.properties,
|
||||
spack.schema.container.properties,
|
||||
spack.schema.gitlab_ci.properties,
|
||||
|
||||
@@ -110,9 +110,12 @@
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'hash': {'type': 'string'},
|
||||
'package_hash': {'type': 'string'},
|
||||
|
||||
# these hashes were used on some specs prior to 0.18
|
||||
'full_hash': {'type': 'string'},
|
||||
'build_hash': {'type': 'string'},
|
||||
'package_hash': {'type': 'string'},
|
||||
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import types
|
||||
import warnings
|
||||
|
||||
@@ -55,14 +56,6 @@
|
||||
parse_files = None
|
||||
|
||||
|
||||
#: whether we should write ASP unsat cores quickly in debug mode when the cores
|
||||
#: may be very large or take the time (sometimes hours) to minimize them
|
||||
minimize_cores = True
|
||||
|
||||
#: whether we should include all facts in the unsat cores or only error messages
|
||||
full_cores = False
|
||||
|
||||
|
||||
# backward compatibility functions for clingo ASTs
|
||||
def ast_getter(*names):
|
||||
def getter(node):
|
||||
@@ -77,20 +70,25 @@ def getter(node):
|
||||
ast_type = ast_getter("ast_type", "type")
|
||||
ast_sym = ast_getter("symbol", "term")
|
||||
|
||||
#: Order of precedence for version origins. Topmost types are preferred.
|
||||
version_origin_fields = [
|
||||
'spec',
|
||||
'external',
|
||||
'packages_yaml',
|
||||
'package_py',
|
||||
'installed',
|
||||
]
|
||||
|
||||
#: Look up version precedence strings by enum id
|
||||
version_origin_str = {
|
||||
i: name for i, name in enumerate(version_origin_fields)
|
||||
}
|
||||
|
||||
#: Enumeration like object to mark version provenance
|
||||
version_provenance = collections.namedtuple( # type: ignore
|
||||
'VersionProvenance', ['external', 'packages_yaml', 'package_py', 'spec']
|
||||
)(spec=0, external=1, packages_yaml=2, package_py=3)
|
||||
|
||||
#: String representation of version origins, to emit legible
|
||||
# facts for the ASP solver
|
||||
version_origin_str = {
|
||||
0: 'spec',
|
||||
1: 'external',
|
||||
2: 'packages_yaml',
|
||||
3: 'package_py'
|
||||
}
|
||||
'VersionProvenance',
|
||||
version_origin_fields,
|
||||
)(**{name: i for i, name in enumerate(version_origin_fields)})
|
||||
|
||||
#: Named tuple to contain information on declared versions
|
||||
DeclaredVersion = collections.namedtuple(
|
||||
@@ -109,7 +107,7 @@ def getter(node):
|
||||
|
||||
|
||||
def build_criteria_names(costs, tuples):
|
||||
"""Construct an ordered mapping from criteria names to indices in the cost list."""
|
||||
"""Construct an ordered mapping from criteria names to costs."""
|
||||
# pull optimization criteria names out of the solution
|
||||
priorities_names = []
|
||||
|
||||
@@ -136,7 +134,10 @@ def build_criteria_names(costs, tuples):
|
||||
# sort the criteria by priority
|
||||
priorities_names = sorted(priorities_names, reverse=True)
|
||||
|
||||
assert len(priorities_names) == len(costs), "Wrong number of optimization criteria!"
|
||||
# We only have opt-criterion values for non-error types
|
||||
# error type criteria are excluded (they come first)
|
||||
error_criteria = len(costs) - len(priorities_names)
|
||||
costs = costs[error_criteria:]
|
||||
|
||||
# split list into three parts: build criteria, fixed criteria, non-build criteria
|
||||
num_criteria = len(priorities_names)
|
||||
@@ -149,12 +150,12 @@ def build_criteria_names(costs, tuples):
|
||||
# mapping from priority to index in cost list
|
||||
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
|
||||
|
||||
# make a list that has each name with its build and non-build priority
|
||||
# make a list that has each name with its build and non-build costs
|
||||
criteria = [
|
||||
(p - fixed_priority_offset + num_build, None, name) for p, name in fixed
|
||||
(costs[p - fixed_priority_offset + num_build], None, name) for p, name in fixed
|
||||
]
|
||||
for (i, name), (b, _) in zip(installed, build):
|
||||
criteria.append((indices[i], indices[b], name))
|
||||
criteria.append((costs[indices[i]], costs[indices[b]], name))
|
||||
|
||||
return criteria
|
||||
|
||||
@@ -326,9 +327,6 @@ def format_core(self, core):
|
||||
core_symbols = []
|
||||
for atom in core:
|
||||
sym = symbols[atom]
|
||||
if sym.name in ("rule", "error"):
|
||||
# these are special symbols we use to get messages in the core
|
||||
sym = sym.arguments[0].string
|
||||
core_symbols.append(sym)
|
||||
|
||||
return sorted(str(symbol) for symbol in core_symbols)
|
||||
@@ -387,7 +385,7 @@ def raise_if_unsat(self):
|
||||
"""
|
||||
Raise an appropriate error if the result is unsatisfiable.
|
||||
|
||||
The error is a UnsatisfiableSpecError, and includes the minimized cores
|
||||
The error is an InternalConcretizerError, and includes the minimized cores
|
||||
resulting from the solve, formatted to be human readable.
|
||||
"""
|
||||
if self.satisfiable:
|
||||
@@ -397,12 +395,8 @@ def raise_if_unsat(self):
|
||||
if len(constraints) == 1:
|
||||
constraints = constraints[0]
|
||||
|
||||
if minimize_cores:
|
||||
conflicts = self.format_minimal_cores()
|
||||
else:
|
||||
conflicts = self.format_cores()
|
||||
|
||||
raise UnsatisfiableSpecError(constraints, conflicts=conflicts)
|
||||
conflicts = self.format_minimal_cores()
|
||||
raise InternalConcretizerError(constraints, conflicts=conflicts)
|
||||
|
||||
@property
|
||||
def specs(self):
|
||||
@@ -502,13 +496,11 @@ def h2(self, name):
|
||||
def newline(self):
|
||||
self.out.write('\n')
|
||||
|
||||
def fact(self, head, assumption=False):
|
||||
def fact(self, head):
|
||||
"""ASP fact (a rule without a body).
|
||||
|
||||
Arguments:
|
||||
head (AspFunction): ASP function to generate as fact
|
||||
assumption (bool): If True and using cores, use this fact as a
|
||||
choice point in ASP and include it in unsatisfiable cores
|
||||
"""
|
||||
symbol = head.symbol() if hasattr(head, 'symbol') else head
|
||||
|
||||
@@ -516,10 +508,9 @@ def fact(self, head, assumption=False):
|
||||
|
||||
atom = self.backend.add_atom(symbol)
|
||||
|
||||
# with `--show-cores=full or --show-cores=minimized, make all facts
|
||||
# choices/assumptions, otherwise only if assumption=True
|
||||
choice = self.cores and (full_cores or assumption)
|
||||
|
||||
# Only functions relevant for constructing bug reports for bad error messages
|
||||
# are assumptions, and only when using cores.
|
||||
choice = self.cores and symbol.name == 'internal_error'
|
||||
self.backend.add_rule([atom], [], choice=choice)
|
||||
if choice:
|
||||
self.assumptions.append(atom)
|
||||
@@ -577,9 +568,10 @@ def visit(node):
|
||||
for term in node.body:
|
||||
if ast_type(term) == ASTType.Literal:
|
||||
if ast_type(term.atom) == ASTType.SymbolicAtom:
|
||||
if ast_sym(term.atom).name == "error":
|
||||
name = ast_sym(term.atom).name
|
||||
if name == 'internal_error':
|
||||
arg = ast_sym(ast_sym(term.atom).arguments[0])
|
||||
self.fact(fn.error(arg.string), assumption=True)
|
||||
self.fact(AspFunction(name)(arg.string))
|
||||
|
||||
path = os.path.join(parent_dir, 'concretize.lp')
|
||||
parse_files([path], visit)
|
||||
@@ -696,15 +688,12 @@ def __init__(self, reuse=False, tests=False):
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
This uses self.possible_versions so that we include any versions
|
||||
This uses self.declared_versions so that we include any versions
|
||||
that arise from a spec.
|
||||
"""
|
||||
def key_fn(version):
|
||||
# Origins are sorted by order of importance:
|
||||
# 1. Spec from command line
|
||||
# 2. Externals
|
||||
# 3. Package preferences
|
||||
# 4. Directives in package.py
|
||||
# Origins are sorted by precedence defined in `version_origin_str`,
|
||||
# then by order added.
|
||||
return version.origin, version.idx
|
||||
|
||||
pkg = packagize(pkg)
|
||||
@@ -735,7 +724,7 @@ def spec_versions(self, spec):
|
||||
|
||||
# record all version constraints for later
|
||||
self.version_constraints.add((spec.name, spec.versions))
|
||||
return [fn.version_satisfies(spec.name, spec.versions)]
|
||||
return [fn.node_version_satisfies(spec.name, spec.versions)]
|
||||
|
||||
def target_ranges(self, spec, single_target_fn):
|
||||
target = spec.architecture.target
|
||||
@@ -748,13 +737,24 @@ def target_ranges(self, spec, single_target_fn):
|
||||
return [fn.node_target_satisfies(spec.name, target)]
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
default_msg = "{0} '{1}' conflicts with '{2}'"
|
||||
no_constraint_msg = "{0} conflicts with '{1}'"
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name)
|
||||
self.gen.fact(fn.conflict_trigger(trigger_id))
|
||||
trigger_msg = "conflict trigger %s" % str(trigger)
|
||||
trigger_id = self.condition(
|
||||
spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
|
||||
|
||||
for constraint, _ in constraints:
|
||||
constraint_id = self.condition(constraint, name=pkg.name)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id))
|
||||
for constraint, conflict_msg in constraints:
|
||||
if conflict_msg is None:
|
||||
if constraint == spack.spec.Spec():
|
||||
conflict_msg = no_constraint_msg.format(pkg.name, trigger)
|
||||
else:
|
||||
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
|
||||
constraint_msg = "conflict constraint %s" % str(constraint)
|
||||
constraint_id = self.condition(
|
||||
constraint, name=pkg.name, msg=constraint_msg)
|
||||
self.gen.fact(
|
||||
fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
@@ -838,9 +838,18 @@ def pkg_rules(self, pkg, tests):
|
||||
for name, entry in sorted(pkg.variants.items()):
|
||||
variant, when = entry
|
||||
|
||||
for w in when:
|
||||
cond_id = self.condition(w, name=pkg.name)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
if spack.spec.Spec() in when:
|
||||
# unconditional variant
|
||||
self.gen.fact(fn.variant(pkg.name, name))
|
||||
else:
|
||||
# conditional variant
|
||||
for w in when:
|
||||
msg = "%s has variant %s" % (pkg.name, name)
|
||||
if str(w):
|
||||
msg += " when %s" % w
|
||||
|
||||
cond_id = self.condition(w, name=pkg.name, msg=msg)
|
||||
self.gen.fact(fn.variant_condition(cond_id, pkg.name, name))
|
||||
|
||||
single_value = not variant.multi
|
||||
if single_value:
|
||||
@@ -883,7 +892,9 @@ def pkg_rules(self, pkg, tests):
|
||||
imposed = spack.spec.Spec(value.when)
|
||||
imposed.name = pkg.name
|
||||
self.condition(
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name
|
||||
required_spec=required, imposed_spec=imposed, name=pkg.name,
|
||||
msg="%s variant %s value %s when %s" % (
|
||||
pkg.name, name, value, when)
|
||||
)
|
||||
|
||||
if variant.sticky:
|
||||
@@ -911,7 +922,7 @@ def pkg_rules(self, pkg, tests):
|
||||
)
|
||||
)
|
||||
|
||||
def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
@@ -920,7 +931,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
are imposed when this condition is triggered
|
||||
name (str or None): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
|
||||
msg (str or None): description of the condition
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -929,7 +940,7 @@ def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.condition(condition_id))
|
||||
self.gen.fact(fn.condition(condition_id, msg))
|
||||
|
||||
# requirements trigger the condition
|
||||
requirements = self.spec_clauses(
|
||||
@@ -961,7 +972,8 @@ def package_provider_rules(self, pkg):
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
for when in whens:
|
||||
condition_id = self.condition(when, provided, pkg.name)
|
||||
msg = '%s provides %s when %s' % (pkg.name, provided, when)
|
||||
condition_id = self.condition(when, provided, pkg.name, msg)
|
||||
self.gen.fact(fn.provider_condition(
|
||||
condition_id, when.name, provided.name
|
||||
))
|
||||
@@ -985,7 +997,11 @@ def package_dependencies_rules(self, pkg):
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name)
|
||||
msg = '%s depends on %s' % (pkg.name, dep.spec.name)
|
||||
if cond != spack.spec.Spec():
|
||||
msg += ' when %s' % cond
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
|
||||
self.gen.fact(fn.dependency_condition(
|
||||
condition_id, pkg.name, dep.spec.name
|
||||
))
|
||||
@@ -1065,7 +1081,8 @@ def external_packages(self):
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
condition_id = self.condition(spec)
|
||||
msg = '%s available as external when satisfying %s' % (spec.name, spec)
|
||||
condition_id = self.condition(spec, msg=msg)
|
||||
self.gen.fact(
|
||||
fn.possible_external(condition_id, pkg_name, local_idx)
|
||||
)
|
||||
@@ -1147,7 +1164,14 @@ def spec_clauses(self, *args, **kwargs):
|
||||
raise RuntimeError(msg)
|
||||
return clauses
|
||||
|
||||
def _spec_clauses(self, spec, body=False, transitive=True, expand_hashes=False):
|
||||
def _spec_clauses(
|
||||
self,
|
||||
spec,
|
||||
body=False,
|
||||
transitive=True,
|
||||
expand_hashes=False,
|
||||
concrete_build_deps=False,
|
||||
):
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
Arguments:
|
||||
@@ -1158,6 +1182,8 @@ def _spec_clauses(self, spec, body=False, transitive=True, expand_hashes=False):
|
||||
dependencies (default True)
|
||||
expand_hashes (bool): if True, descend into hashes of concrete specs
|
||||
(default False)
|
||||
concrete_build_deps (bool): if False, do not include pure build deps
|
||||
of concrete specs (as they have no effect on runtime constraints)
|
||||
|
||||
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
|
||||
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
|
||||
@@ -1265,18 +1291,34 @@ class Body(object):
|
||||
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
if spec.concrete:
|
||||
# TODO: We need to distinguish 2 specs from the same package later
|
||||
for edge in spec.edges_to_dependencies():
|
||||
for dtype in edge.deptypes:
|
||||
clauses.append(fn.depends_on(spec.name, edge.spec.name, dtype))
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
for dspec in spec.edges_to_dependencies():
|
||||
dep = dspec.spec
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
if spec.concrete:
|
||||
clauses.append(fn.hash(dep.name, dep.dag_hash()))
|
||||
# We know dependencies are real for concrete specs. For abstract
|
||||
# specs they just mean the dep is somehow in the DAG.
|
||||
for dtype in dspec.deptypes:
|
||||
# skip build dependencies of already-installed specs
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.depends_on(spec.name, dep.name, dtype))
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.deptypes != ("build",):
|
||||
clauses.append(fn.hash(dep.name, dep.dag_hash()))
|
||||
|
||||
# if the spec is abstract, descend into dependencies.
|
||||
# if it's concrete, then the hashes above take care of dependency
|
||||
# constraints, but expand the hashes if asked for.
|
||||
if not spec.concrete or expand_hashes:
|
||||
clauses.extend(
|
||||
self._spec_clauses(dep, body, transitive=False)
|
||||
self._spec_clauses(
|
||||
dep,
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
)
|
||||
)
|
||||
|
||||
return clauses
|
||||
@@ -1411,23 +1453,48 @@ def target_defaults(self, specs):
|
||||
|
||||
self.gen.h2('Target compatibility')
|
||||
|
||||
compatible_targets = [uarch] + uarch.ancestors
|
||||
additional_targets_in_family = sorted([
|
||||
t for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == uarch.family.name and
|
||||
t not in compatible_targets)
|
||||
], key=lambda x: len(x.ancestors), reverse=True)
|
||||
compatible_targets += additional_targets_in_family
|
||||
# Construct the list of targets which are compatible with the host
|
||||
candidate_targets = [uarch] + uarch.ancestors
|
||||
|
||||
# Get configuration options
|
||||
granularity = spack.config.get('concretizer:targets:granularity')
|
||||
host_compatible = spack.config.get('concretizer:targets:host_compatible')
|
||||
|
||||
# Add targets which are not compatible with the current host
|
||||
if not host_compatible:
|
||||
additional_targets_in_family = sorted([
|
||||
t for t in archspec.cpu.TARGETS.values()
|
||||
if (t.family.name == uarch.family.name and
|
||||
t not in candidate_targets)
|
||||
], key=lambda x: len(x.ancestors), reverse=True)
|
||||
candidate_targets += additional_targets_in_family
|
||||
|
||||
# Check if we want only generic architecture
|
||||
if granularity == 'generic':
|
||||
candidate_targets = [t for t in candidate_targets if t.vendor == 'generic']
|
||||
|
||||
compilers = self.possible_compilers
|
||||
|
||||
# this loop can be used to limit the number of targets
|
||||
# considered. Right now we consider them all, but it seems that
|
||||
# many targets can make things slow.
|
||||
# TODO: investigate this.
|
||||
# Add targets explicitly requested from specs
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
continue
|
||||
|
||||
if target not in candidate_targets and not host_compatible:
|
||||
candidate_targets.append(target)
|
||||
for ancestor in target.ancestors:
|
||||
if ancestor not in candidate_targets:
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = set([uarch.family.name])
|
||||
for compiler in sorted(compilers):
|
||||
supported = self._supported_targets(
|
||||
compiler.name, compiler.version, compatible_targets
|
||||
compiler.name, compiler.version, candidate_targets
|
||||
)
|
||||
|
||||
# If we can't find supported targets it may be due to custom
|
||||
@@ -1440,7 +1507,7 @@ def target_defaults(self, specs):
|
||||
supported = self._supported_targets(
|
||||
compiler.name,
|
||||
compiler_obj.real_version,
|
||||
compatible_targets
|
||||
candidate_targets
|
||||
)
|
||||
|
||||
if not supported:
|
||||
@@ -1456,21 +1523,8 @@ def target_defaults(self, specs):
|
||||
compiler.name, compiler.version, uarch.family.name
|
||||
))
|
||||
|
||||
# add any targets explicitly mentioned in specs
|
||||
for spec in specs:
|
||||
if not spec.architecture or not spec.architecture.target:
|
||||
continue
|
||||
|
||||
target = archspec.cpu.TARGETS.get(spec.target.name)
|
||||
if not target:
|
||||
self.target_ranges(spec, None)
|
||||
continue
|
||||
|
||||
if target not in compatible_targets:
|
||||
compatible_targets.append(target)
|
||||
|
||||
i = 0
|
||||
for target in compatible_targets:
|
||||
for target in candidate_targets:
|
||||
self.gen.fact(fn.target(target.name))
|
||||
self.gen.fact(fn.target_family(target.name, target.family.name))
|
||||
for parent in sorted(target.parents):
|
||||
@@ -1512,9 +1566,12 @@ def generate_possible_compilers(self, specs):
|
||||
continue
|
||||
|
||||
if strict and s.compiler not in cspecs:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
if not s.concrete:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(
|
||||
s.compiler
|
||||
)
|
||||
# Allow unknown compilers to exist if the associated spec
|
||||
# is already built
|
||||
else:
|
||||
cspecs.add(s.compiler)
|
||||
self.gen.fact(fn.allow_compiler(
|
||||
@@ -1645,6 +1702,12 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
# be dependencies (don't tell it about the others)
|
||||
h = spec.dag_hash()
|
||||
if spec.name in possible and h not in self.seen_hashes:
|
||||
try:
|
||||
# Only consider installed packages for repo we know
|
||||
spack.repo.path.get(spec)
|
||||
except (spack.repo.UnknownNamespaceError, spack.repo.UnknownPackageError):
|
||||
return
|
||||
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
|
||||
@@ -1652,8 +1715,16 @@ def _facts_from_concrete_spec(self, spec, possible):
|
||||
self.impose(h, spec, body=True)
|
||||
self.gen.newline()
|
||||
|
||||
# add OS to possible OS's
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
# - Add OS to possible OS's
|
||||
for dep in spec.traverse():
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
self.declared_versions[dep.name].append(DeclaredVersion(
|
||||
version=dep.version,
|
||||
idx=0,
|
||||
origin=version_provenance.installed
|
||||
))
|
||||
self.possible_oses.add(dep.os)
|
||||
|
||||
# add the hash to the one seen so far
|
||||
@@ -1718,7 +1789,7 @@ def setup(self, driver, specs):
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [d for d in spec.traverse()
|
||||
missing_deps = [str(d) for d in spec.traverse()
|
||||
if d.name not in possible and not d.virtual]
|
||||
if missing_deps:
|
||||
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
|
||||
@@ -1781,12 +1852,14 @@ def setup(self, driver, specs):
|
||||
fn.virtual_root(spec.name) if spec.virtual
|
||||
else fn.root(spec.name)
|
||||
)
|
||||
|
||||
for clause in self.spec_clauses(spec):
|
||||
self.gen.fact(clause)
|
||||
if clause.name == 'variant_set':
|
||||
self.gen.fact(fn.variant_default_value_from_cli(
|
||||
*clause.args
|
||||
))
|
||||
self.gen.fact(
|
||||
fn.variant_default_value_from_cli(*clause.args)
|
||||
)
|
||||
|
||||
self.gen.h1("Variant Values defined in specs")
|
||||
self.define_variant_values()
|
||||
|
||||
@@ -1809,6 +1882,7 @@ class SpecBuilder(object):
|
||||
ignored_attributes = ["opt_criterion"]
|
||||
|
||||
def __init__(self, specs):
|
||||
self._specs = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
@@ -1861,6 +1935,17 @@ def node_os(self, pkg, os):
|
||||
def node_target(self, pkg, target):
|
||||
self._arch(pkg).target = target
|
||||
|
||||
def error(self, priority, msg, *args):
|
||||
msg = msg.format(*args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
specs_to_construct = re.findall(r'Spec\(([^)]*)\)', msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace('Spec(%s)' % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
def variant_value(self, pkg, name, value):
|
||||
# FIXME: is there a way not to special case 'dev_path' everywhere?
|
||||
if name == 'dev_path':
|
||||
@@ -1983,15 +2068,27 @@ def deprecated(self, pkg, version):
|
||||
msg = 'using "{0}@{1}" which is a deprecated version'
|
||||
tty.warn(msg.format(pkg, version))
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple):
|
||||
name = function_tuple[0]
|
||||
if name == 'error':
|
||||
priority = function_tuple[1][0]
|
||||
return (-4, priority)
|
||||
elif name == 'hash':
|
||||
return (-3, 0)
|
||||
elif name == 'node':
|
||||
return (-2, 0)
|
||||
elif name == 'node_compiler':
|
||||
return (-1, 0)
|
||||
else:
|
||||
return (0, 0)
|
||||
|
||||
def build_specs(self, function_tuples):
|
||||
# Functions don't seem to be in particular order in output. Sort
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
function_tuples.sort(key=lambda f: {
|
||||
"hash": -3,
|
||||
"node": -2,
|
||||
"node_compiler": -1,
|
||||
}.get(f[0], 0))
|
||||
self.function_tuples = function_tuples
|
||||
self.function_tuples.sort(key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in function_tuples:
|
||||
@@ -1999,7 +2096,6 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
action = getattr(self, name, None)
|
||||
|
||||
# print out unknown actions so we can display them for debugging
|
||||
if not action:
|
||||
msg = "%s(%s)" % (name, ", ".join(str(a) for a in args))
|
||||
@@ -2009,22 +2105,26 @@ def build_specs(self, function_tuples):
|
||||
assert action and callable(action)
|
||||
|
||||
# ignore predicates on virtual packages, as they're used for
|
||||
# solving but don't construct anything
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
# solving but don't construct anything. Do not ignore error
|
||||
# predicates on virtual packages.
|
||||
if name != 'error':
|
||||
pkg = args[0]
|
||||
if spack.repo.path.is_virtual(pkg):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it.
|
||||
spec = self._specs.get(pkg)
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
# namespace assignment is done after the fact, as it is not
|
||||
# currently part of the solve
|
||||
for spec in self._specs.values():
|
||||
if spec.namespace:
|
||||
continue
|
||||
repo = spack.repo.path.repo_for_pkg(spec)
|
||||
spec.namespace = repo.namespace
|
||||
|
||||
@@ -2034,7 +2134,7 @@ def build_specs(self, function_tuples):
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values()]
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -2140,25 +2240,27 @@ def solve(
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, msg):
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
self.provided = None
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
|
||||
|
||||
class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
|
||||
"""
|
||||
Subclass for new constructor signature for new concretizer
|
||||
"""
|
||||
def __init__(self, provided, conflicts):
|
||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||
conflict_msg = ''.join(indented)
|
||||
issue = 'conflicts' if full_cores else 'errors'
|
||||
msg = '%s is unsatisfiable, %s are:\n%s' % (provided, issue, conflict_msg)
|
||||
|
||||
newline_indent = '\n '
|
||||
if not full_cores:
|
||||
msg += newline_indent + 'To see full clingo unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=full`'
|
||||
if not minimize_cores or not full_cores:
|
||||
# not solver.minimalize_cores and not solver.full_cores impossible
|
||||
msg += newline_indent + 'For full, subset-minimal unsat cores, '
|
||||
msg += 're-run with `spack --show-cores=minimized'
|
||||
msg += newline_indent
|
||||
msg += 'Warning: This may take (up to) hours for some specs'
|
||||
error_msg = ''.join(indented)
|
||||
msg = 'Spack concretizer internal error. Please submit a bug report'
|
||||
msg += '\n Please include the command, environment if applicable,'
|
||||
msg += '\n and the following error message.'
|
||||
msg = '\n %s is unsatisfiable, errors are:\n%s' % (provided, error_msg)
|
||||
|
||||
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
|
||||
|
||||
|
||||
@@ -7,22 +7,6 @@
|
||||
% This logic program implements Spack's concretizer
|
||||
%=============================================================================
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Generic constraints on nodes
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% each node must have a single version
|
||||
:- not 1 { version(Package, _) } 1, node(Package).
|
||||
|
||||
% each node must have a single platform, os and target
|
||||
:- not 1 { node_platform(Package, _) } 1, node(Package), error("A node must have exactly one platform").
|
||||
:- not 1 { node_os(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_target(Package, _) } 1, node(Package).
|
||||
|
||||
% each node has a single compiler associated with it
|
||||
:- not 1 { node_compiler(Package, _) } 1, node(Package).
|
||||
:- not 1 { node_compiler_version(Package, _, _) } 1, node(Package).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -35,18 +19,40 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
|
||||
:- version_declared(Package, Version, Weight, Origin1),
|
||||
version_declared(Package, Version, Weight, Origin2),
|
||||
Origin1 < Origin2,
|
||||
error("Internal error: two versions with identical weights").
|
||||
internal_error("Two versions with identical weights").
|
||||
|
||||
% We cannot use a version declared for an installed package if we end up building it
|
||||
:- version_declared(Package, Version, Weight, "installed"),
|
||||
version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not hash(Package, _).
|
||||
|
||||
% versions are declared w/priority -- declared with priority implies declared
|
||||
version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
|
||||
% If something is a package, it has only one version and that must be a
|
||||
% declared version.
|
||||
1 { version(Package, Version) : version_declared(Package, Version) } 1
|
||||
:- node(Package), error("Each node must have exactly one version").
|
||||
% We allow clingo to choose any version(s), and infer an error if there
|
||||
% is not precisely one version chosen. Error facts are heavily optimized
|
||||
% against to ensure they cannot be inferred when a non-error solution is
|
||||
% possible
|
||||
{ version(Package, Version) : version_declared(Package, Version) }
|
||||
:- node(Package).
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- node(Package),
|
||||
version(Package, Version1),
|
||||
version(Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% A virtual package may have or not a version, but never has more than one
|
||||
:- virtual_node(Package), 2 { version(Package, _) }.
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- node(Package), not version(Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Virtual, Version1, Version2)
|
||||
:- virtual_node(Virtual),
|
||||
version(Virtual, Version1),
|
||||
version(Virtual, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
% If we select a deprecated version, mark the package as deprecated
|
||||
deprecated(Package, Version) :- version(Package, Version), deprecated_version(Package, Version).
|
||||
@@ -55,14 +61,27 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_declared(Package, Version, Weight).
|
||||
|
||||
1 { version_weight(Package, Weight) : possible_version_weight(Package, Weight) } 1 :- node(Package), error("Internal error: Package version must have a unique weight").
|
||||
version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
node(Package),
|
||||
Weight = #min{W : version_declared(Package, Version, W)}.
|
||||
|
||||
% version_satisfies implies that exactly one of the satisfying versions
|
||||
% node_version_satisfies implies that exactly one of the satisfying versions
|
||||
% is the package's version, and vice versa.
|
||||
1 { version(Package, Version) : version_satisfies(Package, Constraint, Version) } 1
|
||||
:- version_satisfies(Package, Constraint),
|
||||
error("no version satisfies the given constraints").
|
||||
version_satisfies(Package, Constraint)
|
||||
% While this choice rule appears redundant with the initial choice rule for
|
||||
% versions, virtual nodes with version constraints require this rule to be
|
||||
% able to choose versions
|
||||
{ version(Package, Version) : version_satisfies(Package, Constraint, Version) }
|
||||
:- node_version_satisfies(Package, Constraint).
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
:- node_version_satisfies(Package, Constraint),
|
||||
C = #count{ Version : version(Package, Version), version_satisfies(Package, Constraint, Version)},
|
||||
C < 1.
|
||||
|
||||
node_version_satisfies(Package, Constraint)
|
||||
:- version(Package, Version), version_satisfies(Package, Constraint, Version).
|
||||
|
||||
#defined version_satisfies/3.
|
||||
@@ -81,7 +100,7 @@ version_satisfies(Package, Constraint)
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
condition(ID, _);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
@@ -100,7 +119,7 @@ attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
variant_value(Package, Variant, Value),
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value).
|
||||
|
||||
#defined condition/1.
|
||||
#defined condition/2.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
@@ -127,9 +146,7 @@ depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
build(Package),
|
||||
not external(Package).
|
||||
condition_holds(ID).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
@@ -155,17 +172,18 @@ node(Dependency) :- node(Package), depends_on(Package, Dependency).
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
needed(Package) :- root(Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
:- node(Package), not needed(Package),
|
||||
error("All dependencies must be reachable from root").
|
||||
error(1, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
:- node(Package),
|
||||
not needed(Package).
|
||||
|
||||
% Avoid cycles in the DAG
|
||||
% some combinations of conditional dependencies can result in cycles;
|
||||
% this ensures that we solve around them
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A), error("Cyclic dependencies are not allowed").
|
||||
|
||||
#defined error/1.
|
||||
error(2, "Cyclic dependency detected between '{0}' and '{1}'\n Consider changing variants to avoid the cycle", A, B)
|
||||
:- path(A, B),
|
||||
path(B, A).
|
||||
|
||||
#defined dependency_type/2.
|
||||
#defined dependency_condition/3.
|
||||
@@ -173,14 +191,13 @@ path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
error(0, Msg) :- node(Package),
|
||||
conflict(Package, TriggerID, ConstraintID, Msg),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID),
|
||||
not external(Package), % ignore conflicts for externals
|
||||
error("A conflict was triggered").
|
||||
not external(Package). % ignore conflicts for externals
|
||||
|
||||
#defined conflict/3.
|
||||
#defined conflict/4.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
@@ -200,8 +217,17 @@ virtual_node(Virtual)
|
||||
|
||||
% If there's a virtual node, we must select one and only one provider.
|
||||
% The provider must be selected among the possible providers.
|
||||
1 { provider(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual), error("Virtual packages must be satisfied by a unique provider").
|
||||
{ provider(Package, Virtual) : possible_provider(Package, Virtual) }
|
||||
:- virtual_node(Virtual).
|
||||
error(2, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- virtual_node(Virtual),
|
||||
P = #count{ Package : provider(Package, Virtual)},
|
||||
P < 1.
|
||||
error(2, "Spec cannot include multiple providers for virtual '{0}'\n Requested '{1}' and '{2}'", Virtual, P1, P2)
|
||||
:- virtual_node(Virtual),
|
||||
provider(P1, Virtual),
|
||||
provider(P2, Virtual),
|
||||
P1 < P2.
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
@@ -226,7 +252,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
:- provider(Package, Virtual), not virtual_condition_holds(Package, Virtual),
|
||||
error("Internal error: virtual when provides not respected").
|
||||
internal_error("Virtual when provides not respected").
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
@@ -239,7 +265,7 @@ virtual_condition_holds(Provider, Virtual) :-
|
||||
% we select the weight, among the possible ones, that minimizes the overall objective function.
|
||||
1 { provider_weight(Dependency, Virtual, Weight, Reason) :
|
||||
possible_provider_weight(Dependency, Virtual, Weight, Reason) } 1
|
||||
:- provider(Dependency, Virtual), error("Internal error: package provider weights must be unique").
|
||||
:- provider(Dependency, Virtual), internal_error("Package provider weights must be unique").
|
||||
|
||||
% Get rid or the reason for enabling the possible weight (useful for debugging)
|
||||
provider_weight(Dependency, Virtual, Weight) :- provider_weight(Dependency, Virtual, Weight, _).
|
||||
@@ -282,9 +308,10 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
||||
% These allow us to easily define conditional dependency and conflict rules
|
||||
% without enumerating all spec attributes every time.
|
||||
node(Package) :- attr("node", Package).
|
||||
virtual_node(Virtual) :- attr("virtual_node", Virtual).
|
||||
hash(Package, Hash) :- attr("hash", Package, Hash).
|
||||
version(Package, Version) :- attr("version", Package, Version).
|
||||
version_satisfies(Package, Constraint) :- attr("version_satisfies", Package, Constraint).
|
||||
node_version_satisfies(Package, Constraint) :- attr("node_version_satisfies", Package, Constraint).
|
||||
node_platform(Package, Platform) :- attr("node_platform", Package, Platform).
|
||||
node_os(Package, OS) :- attr("node_os", Package, OS).
|
||||
node_target(Package, Target) :- attr("node_target", Package, Target).
|
||||
@@ -300,9 +327,10 @@ node_compiler_version_satisfies(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version_satisfies", Package, Compiler, Version).
|
||||
|
||||
attr("node", Package) :- node(Package).
|
||||
attr("virtual_node", Virtual) :- virtual_node(Virtual).
|
||||
attr("hash", Package, Hash) :- hash(Package, Hash).
|
||||
attr("version", Package, Version) :- version(Package, Version).
|
||||
attr("version_satisfies", Package, Constraint) :- version_satisfies(Package, Constraint).
|
||||
attr("node_version_satisfies", Package, Constraint) :- node_version_satisfies(Package, Constraint).
|
||||
attr("node_platform", Package, Platform) :- node_platform(Package, Platform).
|
||||
attr("node_os", Package, OS) :- node_os(Package, OS).
|
||||
attr("node_target", Package, Target) :- node_target(Package, Target).
|
||||
@@ -330,7 +358,7 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
#defined external_only/1.
|
||||
#defined pkg_provider_preference/4.
|
||||
#defined default_provider_preference/3.
|
||||
#defined version_satisfies/2.
|
||||
#defined node_version_satisfies/2.
|
||||
#defined node_compiler_version_satisfies/3.
|
||||
#defined root/1.
|
||||
|
||||
@@ -339,9 +367,17 @@ attr("node_compiler_version_satisfies", Package, Compiler, Version)
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% if a package is external its version must be one of the external versions
|
||||
1 { external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") } 1
|
||||
:- external(Package), error("External package version does not satisfy external spec").
|
||||
{ external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") }
|
||||
:- external(Package).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_version(Package, _, _).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
external_version(Package, Version1, Weight1),
|
||||
external_version(Package, Version2, Weight2),
|
||||
(Version1, Weight1) < (Version2, Weight2). % see[1]
|
||||
|
||||
version_weight(Package, Weight) :- external_version(Package, Version, Weight).
|
||||
version(Package, Version) :- external_version(Package, Version, Weight).
|
||||
@@ -361,7 +397,7 @@ external(Package) :- external_spec_selected(Package, _).
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package),
|
||||
error("Internal error: external weight used for internal spec").
|
||||
internal_error("External weight used for internal spec").
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
@@ -373,8 +409,9 @@ external_conditions_hold(Package, LocalIndex) :-
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_conditions_hold(Package, _),
|
||||
error("External package does not satisfy external spec").
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
@@ -391,16 +428,16 @@ variant(Package, Variant) :- variant_condition(ID, Package, Variant),
|
||||
condition_holds(ID).
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot be set").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_set(Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package),
|
||||
error("Unsatisfied conditional variants cannot take on a variant value").
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Package, Variant)
|
||||
:- variant_value(Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% if a variant is sticky and not set its value is the default value
|
||||
variant_value(Package, Variant, Value) :-
|
||||
@@ -410,27 +447,30 @@ variant_value(Package, Variant, Value) :-
|
||||
variant_default_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% one variant value for single-valued variants.
|
||||
1 {
|
||||
% at most one variant value for single-valued variants.
|
||||
{
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
} 1
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Single valued variants must have a single value").
|
||||
|
||||
% at least one variant value for multi-valued variants.
|
||||
1 {
|
||||
variant_value(Package, Variant, Value)
|
||||
: variant_possible_value(Package, Variant, Value)
|
||||
}
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
not variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
error("Internal error: All variants must have a value").
|
||||
build(Package).
|
||||
|
||||
|
||||
error(2, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
Value1 < Value2. % see[1]
|
||||
error(2, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- node(Package),
|
||||
variant(Package, Variant),
|
||||
build(Package),
|
||||
C = #count{ Value : variant_value(Package, Variant, Value) },
|
||||
C < 1.
|
||||
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
@@ -438,21 +478,21 @@ variant_set(Package, Variant) :- variant_set(Package, Variant, _).
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
% This only applies to packages we need to build -- concrete packages may
|
||||
% have been built w/different variants from older/different package versions.
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package),
|
||||
error("Variant set to invalid value").
|
||||
error(1, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2,
|
||||
build(Package),
|
||||
error("Variant values selected from multiple disjoint sets").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come from disjoing value sets", Package, Variant, Value1, Value2)
|
||||
:- variant_value(Package, Variant, Value1),
|
||||
variant_value(Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value2, Set2),
|
||||
Set1 < Set2, % see[1]
|
||||
build(Package).
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
@@ -510,12 +550,11 @@ variant_default_value(Package, Variant, Value) :- variant_default_value_from_cli
|
||||
|
||||
% Treat 'none' in a special way - it cannot be combined with other
|
||||
% values even if the variant is multi-valued
|
||||
:- 2 {
|
||||
variant_value(Package, Variant, Value) : variant_possible_value(Package, Variant, Value)
|
||||
},
|
||||
variant_value(Package, Variant, "none"),
|
||||
build(Package),
|
||||
error("Variant value 'none' cannot be combined with any other value").
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
:- variant_value(Package, Variant, Value),
|
||||
variant_value(Package, Variant, "none"),
|
||||
Value != "none",
|
||||
build(Package).
|
||||
|
||||
% patches and dev_path are special variants -- they don't have to be
|
||||
% declared in the package, so we just allow them to spring into existence
|
||||
@@ -559,6 +598,18 @@ node_platform(Package, Platform)
|
||||
% platform is set if set to anything
|
||||
node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
|
||||
% each node must have a single platform
|
||||
error(2, "No valid platform found for {0}", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Platform : node_platform(Package, Platform)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple platforms\n Requested 'platform={1}' and 'platform={2}'", Package, Platform1, Platform2)
|
||||
:- node(Package),
|
||||
node_platform(Package, Platform1),
|
||||
node_platform(Package, Platform2),
|
||||
Platform1 < Platform2. % see[1]
|
||||
|
||||
#defined node_platform_set/2. % avoid warnings
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -568,20 +619,32 @@ node_platform_set(Package) :- node_platform_set(Package, _).
|
||||
os(OS) :- os(OS, _).
|
||||
|
||||
% one os per node
|
||||
1 { node_os(Package, OS) : os(OS) } 1 :-
|
||||
node(Package), error("Each node must have exactly one OS").
|
||||
{ node_os(Package, OS) : os(OS) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid operating system for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ OS : node_os(Package, OS)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple operating systems\n Requested 'os={1}' and 'os={2}'", Package, OS1, OS2)
|
||||
:- node(Package),
|
||||
node_os(Package, OS1),
|
||||
node_os(Package, OS2),
|
||||
OS1 < OS2. %see [1]
|
||||
|
||||
% can't have a non-buildable OS on a node we need to build
|
||||
:- build(Package), node_os(Package, OS), not buildable_os(OS),
|
||||
error("No available OS can be built for").
|
||||
error(2, "Cannot concretize '{0} os={1}'. Operating system '{1}' is not buildable", Package, OS)
|
||||
:- build(Package),
|
||||
node_os(Package, OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package),
|
||||
error("Dependencies must have compatible OS's with their dependents").
|
||||
error(2, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageOS, DependencyOS)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_os(Package, PackageOS),
|
||||
node_os(Dependency, DependencyOS),
|
||||
not os_compatible(PackageOS, DependencyOS),
|
||||
build(Package).
|
||||
|
||||
% give OS choice weights according to os declarations
|
||||
node_os_weight(Package, Weight)
|
||||
@@ -613,14 +676,24 @@ node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% Each node has only one target chosen among the known targets
|
||||
1 { node_target(Package, Target) : target(Target) } 1 :- node(Package), error("Each node must have exactly one target").
|
||||
{ node_target(Package, Target) : target(Target) } :- node(Package).
|
||||
|
||||
error(2, "Cannot find valid target for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{Target : node_target(Package, Target)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize '{0}' with multiple targets\n Requested 'target={1}' and 'target={2}'", Package, Target1, Target2)
|
||||
:- node(Package),
|
||||
node_target(Package, Target1),
|
||||
node_target(Package, Target2),
|
||||
Target1 < Target2. % see[1]
|
||||
|
||||
% If a node must satisfy a target constraint, enforce it
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target),
|
||||
error("Node targets must satisfy node target constraints").
|
||||
|
||||
error(1, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
:- node_target(Package, Target),
|
||||
node_target_satisfies(Package, Constraint),
|
||||
not target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target and the target satisfies a constraint, then the target
|
||||
% associated with the node satisfies the same constraint
|
||||
@@ -628,10 +701,10 @@ node_target_satisfies(Package, Constraint)
|
||||
:- node_target(Package, Target), target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target, all of its dependencies must be compatible with that target
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target),
|
||||
error("Dependency node targets must be compatible with dependent targets").
|
||||
error(2, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_target(Package, Target),
|
||||
not node_target_compatible(Dependency, Target).
|
||||
|
||||
% Intermediate step for performance reasons
|
||||
% When the integrity constraint above was formulated including this logic
|
||||
@@ -672,12 +745,12 @@ target_weight(Target, Package, Weight)
|
||||
:- package_target_weight(Target, Package, Weight).
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying target").
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- node_target(Package, Target),
|
||||
not compiler_supports_target(Compiler, Version, Target),
|
||||
node_compiler(Package, Compiler),
|
||||
node_compiler_version(Package, Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% if a target is set explicitly, respect it
|
||||
node_target(Package, Target)
|
||||
@@ -704,8 +777,10 @@ node_target_mismatch(Parent, Dependency)
|
||||
not node_target_match(Parent, Dependency).
|
||||
|
||||
% disallow reusing concrete specs that don't have a compatible target
|
||||
:- node(Package), node_target(Package, Target), not target(Target),
|
||||
error("No satisfying package's target is compatible with this machine").
|
||||
error(2, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
:- node(Package),
|
||||
node_target(Package, Target),
|
||||
not target(Target).
|
||||
|
||||
#defined node_target_set/2.
|
||||
#defined package_target_weight/3.
|
||||
@@ -717,10 +792,19 @@ compiler(Compiler) :- compiler_version(Compiler, _).
|
||||
|
||||
% There must be only one compiler set per built node. The compiler
|
||||
% is chosen among available versions.
|
||||
1 { node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } 1 :-
|
||||
{ node_compiler_version(Package, Compiler, Version) : compiler_version(Compiler, Version) } :-
|
||||
node(Package),
|
||||
build(Package),
|
||||
error("Each node must have exactly one compiler").
|
||||
build(Package).
|
||||
|
||||
error(2, "No valid compiler version found for '{0}'", Package)
|
||||
:- node(Package),
|
||||
C = #count{ Version : node_compiler_version(Package, _, Version)},
|
||||
C < 1.
|
||||
error(2, "'{0}' compiler constraints '%{1}@{2}' and '%{3}@{4}' are incompatible", Package, Compiler1, Version1, Compiler2, Version2)
|
||||
:- node(Package),
|
||||
node_compiler_version(Package, Compiler1, Version1),
|
||||
node_compiler_version(Package, Compiler2, Version2),
|
||||
(Compiler1, Version1) < (Compiler2, Version2). % see[1]
|
||||
|
||||
% Sometimes we just need to know the compiler and not the version
|
||||
node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
@@ -729,14 +813,22 @@ node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, _),
|
||||
Compiler1 != Compiler2,
|
||||
error("Internal error: mismatch between selected compiler and compiler version").
|
||||
internal_error("Mismatch between selected compiler and compiler version").
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, ":"),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, ":", Version) },
|
||||
C < 1.
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
1 { node_compiler_version(Package, Compiler, Version)
|
||||
: compiler_version_satisfies(Compiler, Constraint, Version) } 1 :-
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
error("Internal error: node compiler version mismatch").
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- node(Package),
|
||||
node_compiler_version_satisfies(Package, Compiler, Constraint),
|
||||
C = #count{ Version : node_compiler_version(Package, Compiler, Version), compiler_version_satisfies(Compiler, Constraint, Version) },
|
||||
C < 1.
|
||||
|
||||
% If the node is associated with a compiler and the compiler satisfy a constraint, then
|
||||
% the compiler associated with the node satisfy the same constraint
|
||||
@@ -754,11 +846,12 @@ node_compiler_version(Package, Compiler, Version) :- node_compiler_version_set(P
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
:- node_compiler_version(Package, Compiler, Version), node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package),
|
||||
error("No satisfying compiler available is compatible with a satisfying os").
|
||||
error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- node_compiler_version(Package, Compiler, Version),
|
||||
node_os(Package, OS),
|
||||
not compiler_supports_os(Compiler, Version, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
build(Package).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
% same compiler there's a mismatch.
|
||||
@@ -851,7 +944,7 @@ no_flags(Package, FlagType)
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ hash(Package, Hash) : installed_hash(Package, Hash) } 1
|
||||
:- node(Package), error("Internal error: package must resolve to at most one hash").
|
||||
:- node(Package), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- hash(Package, Hash), variant_value(Package, "dev_path", _).
|
||||
@@ -877,11 +970,47 @@ build(Package) :- not hash(Package, _), node(Package).
|
||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(Package, 200) :- build(Package), node(Package).
|
||||
build_priority(Package, 0) :- not build(Package), node(Package).
|
||||
build_priority(Package, 200) :- build(Package), node(Package), optimize_for_reuse().
|
||||
build_priority(Package, 0) :- not build(Package), node(Package), optimize_for_reuse().
|
||||
|
||||
% don't adjust build priorities if reuse is not enabled
|
||||
build_priority(Package, 0) :- node(Package), not optimize_for_reuse().
|
||||
|
||||
% don't assign versions from installed packages unless reuse is enabled
|
||||
% NOTE: that "installed" means the declared version was only included because
|
||||
% that package happens to be installed, NOT because it was asked for on the
|
||||
% command line. If the user specifies a hash, the origin will be "spec".
|
||||
%
|
||||
% TODO: There's a slight inconsistency with this: if the user concretizes
|
||||
% and installs `foo ^bar`, for some build dependency `bar`, and then later
|
||||
% does a `spack install --fresh foo ^bar/abcde` (i.e.,the hash of `bar`, it
|
||||
% currently *won't* force versions for `bar`'s build dependencies -- `--fresh`
|
||||
% will instead build the latest bar. When we actually include transitive
|
||||
% build deps in the solve, consider using them as a preference to resolve this.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
% Some errors are handled as rules instead of constraints because
|
||||
% it allows us to explain why something failed. Here we optimize
|
||||
% HEAVILY against the facts generated by those rules.
|
||||
#minimize{ 0@1000: #true}.
|
||||
#minimize{ 0@1001: #true}.
|
||||
#minimize{ 0@1002: #true}.
|
||||
|
||||
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% How to optimize the spec (high to low priority)
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -1061,3 +1190,11 @@ opt_criterion(1, "non-preferred targets").
|
||||
#heuristic variant_value(Package, Variant, Value) : variant_default_value(Package, Variant, Value), node(Package). [10, true]
|
||||
#heuristic provider(Package, Virtual) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
#heuristic node(Package) : possible_provider_weight(Package, Virtual, 0, _), virtual_node(Virtual). [10, true]
|
||||
|
||||
%-----------
|
||||
% Notes
|
||||
%-----------
|
||||
|
||||
% [1] Clingo ensures a total ordering among all atoms. We rely on that total ordering
|
||||
% to reduce symmetry in the solution by checking `<` instead of `!=` in symmetric
|
||||
% cases. These choices are made without loss of generality.
|
||||
|
||||
@@ -34,3 +34,13 @@
|
||||
|
||||
% deprecated packages
|
||||
#show deprecated/2.
|
||||
|
||||
% error types
|
||||
#show error/2.
|
||||
#show error/3.
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
|
||||
% debug
|
||||
|
||||
@@ -184,7 +184,7 @@
|
||||
default_format += '{variants}{arch=architecture}'
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
specfile_format_version = 2
|
||||
specfile_format_version = 3
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -1191,11 +1191,14 @@ def __init__(self, spec_like=None, normal=False,
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self.namespace = None
|
||||
|
||||
self._hash = None
|
||||
self._build_hash = None
|
||||
self._full_hash = None
|
||||
self._package_hash = None
|
||||
# initial values for all spec hash types
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
# Python __hash__ is handled separately from the cached spec hashes
|
||||
self._dunder_hash = None
|
||||
|
||||
# cache of package for this spec
|
||||
self._package = None
|
||||
|
||||
# Most of these are internal implementation details that can be
|
||||
@@ -1210,14 +1213,6 @@ def __init__(self, spec_like=None, normal=False,
|
||||
self.external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# Older spack versions did not compute full_hash or build_hash,
|
||||
# and we may not have the necessary information to recompute them
|
||||
# if we read in old specs. Old concrete specs are marked "final"
|
||||
# when read in to indicate that we shouldn't recompute full_hash
|
||||
# or build_hash. New specs are not final; we can lazily compute
|
||||
# their hashes.
|
||||
self._hashes_final = False
|
||||
|
||||
# This attribute is used to store custom information for
|
||||
# external specs. None signal that it was not set yet.
|
||||
self.extra_attributes = None
|
||||
@@ -1554,6 +1549,38 @@ def spliced(self):
|
||||
"""
|
||||
return any(s.build_spec is not s for s in self.traverse(root=True))
|
||||
|
||||
@property
|
||||
def installed(self):
|
||||
"""Installation status of a package.
|
||||
|
||||
Returns:
|
||||
True if the package has been installed, False otherwise.
|
||||
"""
|
||||
if not self.concrete:
|
||||
return False
|
||||
|
||||
try:
|
||||
# If the spec is in the DB, check the installed
|
||||
# attribute of the record
|
||||
return spack.store.db.get_record(self).installed
|
||||
except KeyError:
|
||||
# If the spec is not in the DB, the method
|
||||
# above raises a Key error
|
||||
return False
|
||||
|
||||
@property
|
||||
def installed_upstream(self):
|
||||
"""Whether the spec is installed in an upstream repository.
|
||||
|
||||
Returns:
|
||||
True if the package is installed in an upstream, False otherwise.
|
||||
"""
|
||||
if not self.concrete:
|
||||
return False
|
||||
|
||||
upstream, _ = spack.store.db.query_by_spec_hash(self.dag_hash())
|
||||
return upstream
|
||||
|
||||
def traverse(self, **kwargs):
|
||||
direction = kwargs.get('direction', 'children')
|
||||
depth = kwargs.get('depth', False)
|
||||
@@ -1572,7 +1599,15 @@ def traverse(self, **kwargs):
|
||||
def traverse_edges(self, visited=None, d=0, deptype='all',
|
||||
dep_spec=None, **kwargs):
|
||||
"""Generic traversal of the DAG represented by this spec.
|
||||
This will yield each node in the spec. Options:
|
||||
|
||||
This yields ``DependencySpec`` objects as they are traversed.
|
||||
|
||||
When traversing top-down, an imaginary incoming edge to the root
|
||||
is yielded first as ``DependencySpec(None, root, ())``. When
|
||||
traversing bottom-up, imaginary edges to leaves are yielded first
|
||||
as ``DependencySpec(left, None, ())`` objects.
|
||||
|
||||
Options:
|
||||
|
||||
order [=pre|post]
|
||||
Order to traverse spec nodes. Defaults to preorder traversal.
|
||||
@@ -1727,7 +1762,7 @@ def prefix(self):
|
||||
def prefix(self, value):
|
||||
self._prefix = spack.util.prefix.Prefix(pth.convert_to_platform_path(value))
|
||||
|
||||
def _spec_hash(self, hash):
|
||||
def spec_hash(self, hash):
|
||||
"""Utility method for computing different types of Spec hashes.
|
||||
|
||||
Arguments:
|
||||
@@ -1744,7 +1779,7 @@ def _spec_hash(self, hash):
|
||||
def _cached_hash(self, hash, length=None):
|
||||
"""Helper function for storing a cached hash on the spec.
|
||||
|
||||
This will run _spec_hash() with the deptype and package_hash
|
||||
This will run spec_hash() with the deptype and package_hash
|
||||
parameters, and if this spec is concrete, it will store the value
|
||||
in the supplied attribute on this spec.
|
||||
|
||||
@@ -1752,13 +1787,13 @@ def _cached_hash(self, hash, length=None):
|
||||
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
|
||||
"""
|
||||
if not hash.attr:
|
||||
return self._spec_hash(hash)[:length]
|
||||
return self.spec_hash(hash)[:length]
|
||||
|
||||
hash_string = getattr(self, hash.attr, None)
|
||||
if hash_string:
|
||||
return hash_string[:length]
|
||||
else:
|
||||
hash_string = self._spec_hash(hash)
|
||||
hash_string = self.spec_hash(hash)
|
||||
if self.concrete:
|
||||
setattr(self, hash.attr, hash_string)
|
||||
|
||||
@@ -1771,41 +1806,30 @@ def package_hash(self):
|
||||
def dag_hash(self, length=None):
|
||||
"""This is Spack's default hash, used to identify installations.
|
||||
|
||||
At the moment, it excludes build dependencies to avoid rebuilding
|
||||
packages whenever build dependency versions change. We will
|
||||
revise this to include more detailed provenance when the
|
||||
concretizer can more aggressievly reuse installed dependencies.
|
||||
Same as the full hash (includes package hash and build/link/run deps).
|
||||
Tells us when package files and any dependencies have changes.
|
||||
|
||||
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
|
||||
|
||||
"""
|
||||
return self._cached_hash(ht.dag_hash, length)
|
||||
|
||||
def build_hash(self, length=None):
|
||||
"""Hash used to store specs in environments.
|
||||
|
||||
This hash includes build dependencies, and we need to preserve
|
||||
them to be able to rebuild an entire environment for a user.
|
||||
"""
|
||||
return self._cached_hash(ht.build_hash, length)
|
||||
|
||||
def process_hash(self, length=None):
|
||||
"""Hash used to store specs in environments.
|
||||
"""Hash used to transfer specs among processes.
|
||||
|
||||
This hash includes build and test dependencies and is only used to
|
||||
serialize a spec and pass it around among processes.
|
||||
"""
|
||||
return self._cached_hash(ht.process_hash, length)
|
||||
|
||||
def full_hash(self, length=None):
|
||||
"""Hash to determine when to rebuild packages in the build pipeline.
|
||||
|
||||
This hash includes the package hash, so that we know when package
|
||||
files has changed between builds.
|
||||
"""
|
||||
return self._cached_hash(ht.full_hash, length)
|
||||
|
||||
def dag_hash_bit_prefix(self, bits):
|
||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
|
||||
|
||||
def process_hash_bit_prefix(self, bits):
|
||||
"""Get the first <bits> bits of the DAG hash as an integer type."""
|
||||
return spack.util.hash.base32_prefix_bits(self.process_hash(), bits)
|
||||
|
||||
def to_node_dict(self, hash=ht.dag_hash):
|
||||
"""Create a dictionary representing the state of this Spec.
|
||||
|
||||
@@ -1899,7 +1923,7 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if hasattr(variant, '_patches_in_order_of_appearance'):
|
||||
d['patches'] = variant._patches_in_order_of_appearance
|
||||
|
||||
if hash.package_hash:
|
||||
if self._concrete and hash.package_hash:
|
||||
package_hash = self.package_hash()
|
||||
|
||||
# Full hashes are in bytes
|
||||
@@ -1971,7 +1995,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "readline",
|
||||
"build_hash": "4f47cggum7p4qmp3xna4hi547o66unva",
|
||||
"hash": "4f47cggum7p4qmp3xna4hi547o66unva",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
@@ -1979,16 +2003,14 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
},
|
||||
{
|
||||
"name": "zlib",
|
||||
"build_hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
|
||||
"hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
|
||||
"full_hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
|
||||
"build_hash": "tsjnz7lgob7bu2wd4sqzzjenxewc2zha"
|
||||
"hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
|
||||
},
|
||||
# ... more node dicts for readline and its dependencies ...
|
||||
]
|
||||
@@ -2017,10 +2039,12 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
node_list = [] # Using a list to preserve preorder traversal for hash.
|
||||
hash_set = set()
|
||||
for s in self.traverse(order='pre', deptype=hash.deptype):
|
||||
spec_hash = s.node_dict_with_hashes(hash)[hash.name]
|
||||
spec_hash = s._cached_hash(hash)
|
||||
|
||||
if spec_hash not in hash_set:
|
||||
node_list.append(s.node_dict_with_hashes(hash))
|
||||
hash_set.add(spec_hash)
|
||||
|
||||
if s.build_spec is not s:
|
||||
build_spec_list = s.build_spec.to_dict(hash)['spec']['nodes']
|
||||
for node in build_spec_list:
|
||||
@@ -2028,6 +2052,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
if node_hash not in hash_set:
|
||||
node_list.append(node)
|
||||
hash_set.add(node_hash)
|
||||
|
||||
meta_dict = syaml.syaml_dict([('version', specfile_format_version)])
|
||||
inner_dict = syaml.syaml_dict([('_meta', meta_dict), ('nodes', node_list)])
|
||||
spec_dict = syaml.syaml_dict([('spec', inner_dict)])
|
||||
@@ -2040,38 +2065,19 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
node = self.to_node_dict(hash)
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
# full_hash and build_hash are lazily computed -- but if we write
|
||||
# a spec out, we want them to be included. This is effectively
|
||||
# the last chance we get to compute them accurately.
|
||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||
# to be included. This is effectively the last chance we get to compute
|
||||
# it accurately.
|
||||
if self.concrete:
|
||||
# build and full hashes can be written out if:
|
||||
# 1. they're precomputed (i.e. we read them from somewhere
|
||||
# and they were already on the spec
|
||||
# 2. we can still compute them lazily (i.e. we just made them and
|
||||
# have the full dependency graph on-hand)
|
||||
#
|
||||
# we want to avoid recomputing either hash for specs we read
|
||||
# in from the DB or elsewhere, as we may not have the info
|
||||
# (like patches, package versions, etc.) that we need to
|
||||
# compute them. Unknown hashes are better than wrong hashes.
|
||||
write_full_hash = (
|
||||
self._hashes_final and self._full_hash or # cached and final
|
||||
not self._hashes_final) # lazily compute
|
||||
if write_full_hash:
|
||||
node[ht.full_hash.name] = self.full_hash()
|
||||
# all specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
write_build_hash = 'build' in hash.deptype and (
|
||||
self._hashes_final and self._build_hash or # cached and final
|
||||
not self._hashes_final) # lazily compute
|
||||
if write_build_hash:
|
||||
node[ht.build_hash.name] = self.build_hash()
|
||||
else:
|
||||
node['concrete'] = False
|
||||
|
||||
if hash.name == 'build_hash':
|
||||
node[hash.name] = self.build_hash()
|
||||
elif hash.name == 'process_hash':
|
||||
node[hash.name] = self.process_hash()
|
||||
# we can also give them other hash types if we want
|
||||
if hash.name != ht.dag_hash.name:
|
||||
node[hash.name] = self._cached_hash(hash)
|
||||
|
||||
return node
|
||||
|
||||
@@ -2152,11 +2158,6 @@ def from_node_dict(node):
|
||||
# specs read in are concrete unless marked abstract
|
||||
spec._concrete = node.get('concrete', True)
|
||||
|
||||
# this spec may have been built with older packages than we have
|
||||
# on-hand, and we may not have the build dependencies, so mark it
|
||||
# so we don't recompute full_hash and build_hash.
|
||||
spec._hashes_final = spec._concrete
|
||||
|
||||
if 'patches' in node:
|
||||
patches = node['patches']
|
||||
if len(patches) > 0:
|
||||
@@ -2167,7 +2168,7 @@ def from_node_dict(node):
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_node_dict() is used by
|
||||
# Don't read dependencies here; from_dict() is used by
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
@@ -2194,7 +2195,6 @@ def dependencies_from_node_dict(node):
|
||||
@staticmethod
|
||||
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
@@ -2213,17 +2213,13 @@ def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
dep_hash, deptypes = elt
|
||||
elif isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for key in (ht.full_hash.name,
|
||||
ht.build_hash.name,
|
||||
ht.dag_hash.name,
|
||||
ht.process_hash.name):
|
||||
if key in elt:
|
||||
dep_hash, deptypes = elt[key], elt['type']
|
||||
hash_type = key
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt['type']
|
||||
hash_type = h.name
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError(
|
||||
"Couldn't parse dependency spec.")
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError(
|
||||
"Couldn't parse dependency types in spec.")
|
||||
@@ -2655,11 +2651,11 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
import spack.concretize
|
||||
|
||||
# Add a warning message to inform users that the original concretizer
|
||||
# will be removed in v0.18.0
|
||||
# will be removed
|
||||
if deprecation_warning:
|
||||
msg = ('the original concretizer is currently being used.\n\tUpgrade to '
|
||||
'"clingo" at your earliest convenience. The original concretizer '
|
||||
'will be removed from Spack starting at v0.18.0')
|
||||
'will be removed from Spack in a future version.')
|
||||
warnings.warn(msg)
|
||||
|
||||
if not self.name:
|
||||
@@ -2813,13 +2809,13 @@ def ensure_external_path_if_external(external_spec):
|
||||
|
||||
@staticmethod
|
||||
def ensure_no_deprecated(root):
|
||||
"""Raise is a deprecated spec is in the dag.
|
||||
"""Raise if a deprecated spec is in the dag.
|
||||
|
||||
Args:
|
||||
root (Spec): root spec to be analyzed
|
||||
|
||||
Raises:
|
||||
SpecDeprecatedError: is any deprecated spec is found
|
||||
SpecDeprecatedError: if any deprecated spec is found
|
||||
"""
|
||||
deprecated = []
|
||||
with spack.store.db.read_transaction():
|
||||
@@ -2880,7 +2876,7 @@ def concretize(self, tests=False):
|
||||
|
||||
def _mark_root_concrete(self, value=True):
|
||||
"""Mark just this spec (not dependencies) concrete."""
|
||||
if (not value) and self.concrete and self.package.installed:
|
||||
if (not value) and self.concrete and self.installed:
|
||||
return
|
||||
self._normal = value
|
||||
self._concrete = value
|
||||
@@ -2894,7 +2890,7 @@ def _mark_concrete(self, value=True):
|
||||
# if set to false, clear out all hashes (set to None or remove attr)
|
||||
# may need to change references to respect None
|
||||
for s in self.traverse():
|
||||
if (not value) and s.concrete and s.package.installed:
|
||||
if (not value) and s.concrete and s.installed:
|
||||
continue
|
||||
elif not value:
|
||||
s.clear_cached_hashes()
|
||||
@@ -2911,7 +2907,7 @@ def concretized(self, tests=False):
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
"""
|
||||
clone = self.copy(caches=True)
|
||||
clone = self.copy()
|
||||
clone.concretize(tests=tests)
|
||||
return clone
|
||||
|
||||
@@ -3159,7 +3155,7 @@ def _normalize_helper(self, visited, spec_deps, provider_index, tests):
|
||||
# Avoid recursively adding constraints for already-installed packages:
|
||||
# these may include build dependencies which are not needed for this
|
||||
# install (since this package is already installed).
|
||||
if self.concrete and self.package.installed:
|
||||
if self.concrete and self.installed:
|
||||
return False
|
||||
|
||||
# Combine constraints from package deps with constraints from
|
||||
@@ -3210,8 +3206,8 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
|
||||
"Attempting to normalize anonymous spec")
|
||||
|
||||
# Set _normal and _concrete to False when forced
|
||||
if force:
|
||||
self._mark_concrete(False)
|
||||
if force and not self._concrete:
|
||||
self._normal = False
|
||||
|
||||
if self._normal:
|
||||
return False
|
||||
@@ -3651,7 +3647,6 @@ def virtual_dependencies(self):
|
||||
return [spec for spec in self.traverse() if spec.virtual]
|
||||
|
||||
@property # type: ignore[misc] # decorated prop not supported in mypy
|
||||
@lang.memoized
|
||||
def patches(self):
|
||||
"""Return patch objects for any patch sha256 sums on this Spec.
|
||||
|
||||
@@ -3664,23 +3659,23 @@ def patches(self):
|
||||
if not self.concrete:
|
||||
raise spack.error.SpecError("Spec is not concrete: " + str(self))
|
||||
|
||||
if 'patches' not in self.variants:
|
||||
return []
|
||||
if not hasattr(self, "_patches"):
|
||||
self._patches = []
|
||||
if 'patches' in self.variants:
|
||||
# FIXME: _patches_in_order_of_appearance is attached after
|
||||
# FIXME: concretization to store the order of patches somewhere.
|
||||
# FIXME: Needs to be refactored in a cleaner way.
|
||||
|
||||
# FIXME: _patches_in_order_of_appearance is attached after
|
||||
# FIXME: concretization to store the order of patches somewhere.
|
||||
# FIXME: Needs to be refactored in a cleaner way.
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
self._patches = []
|
||||
for sha256 in self.variants['patches']._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
patch = index.patch_for_package(sha256, self.package)
|
||||
self._patches.append(patch)
|
||||
|
||||
# translate patch sha256sums to patch objects by consulting the index
|
||||
patches = []
|
||||
for sha256 in self.variants['patches']._patches_in_order_of_appearance:
|
||||
index = spack.repo.path.patch_index
|
||||
patch = index.patch_for_package(sha256, self.package)
|
||||
patches.append(patch)
|
||||
return self._patches
|
||||
|
||||
return patches
|
||||
|
||||
def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||
def _dup(self, other, deps=True, cleardeps=True):
|
||||
"""Copy the spec other into self. This is an overwriting
|
||||
copy. It does not copy any dependents (parents), but by default
|
||||
copies dependencies.
|
||||
@@ -3695,10 +3690,6 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||
cleardeps (bool): if True clears the dependencies of ``self``,
|
||||
before possibly copying the dependencies of ``other`` onto
|
||||
``self``
|
||||
caches (bool or None): preserve cached fields such as
|
||||
``_normal``, ``_hash``, and ``_dunder_hash``. By
|
||||
default this is ``False`` if DAG structure would be
|
||||
changed by the copy, ``True`` if it's an exact copy.
|
||||
|
||||
Returns:
|
||||
True if ``self`` changed because of the copy operation,
|
||||
@@ -3749,12 +3740,6 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||
self.extra_attributes = other.extra_attributes
|
||||
self.namespace = other.namespace
|
||||
|
||||
# Cached fields are results of expensive operations.
|
||||
# If we preserved the original structure, we can copy them
|
||||
# safely. If not, they need to be recomputed.
|
||||
if caches is None:
|
||||
caches = (deps is True or deps == dp.all_deptypes)
|
||||
|
||||
# If we copy dependencies, preserve DAG structure in the new spec
|
||||
if deps:
|
||||
# If caller restricted deptypes to be copied, adjust that here.
|
||||
@@ -3762,29 +3747,26 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
|
||||
deptypes = dp.all_deptypes
|
||||
if isinstance(deps, (tuple, list)):
|
||||
deptypes = deps
|
||||
self._dup_deps(other, deptypes, caches)
|
||||
self._dup_deps(other, deptypes)
|
||||
|
||||
self._concrete = other._concrete
|
||||
self._hashes_final = other._hashes_final
|
||||
|
||||
if caches:
|
||||
self._hash = other._hash
|
||||
self._build_hash = other._build_hash
|
||||
if self._concrete:
|
||||
self._dunder_hash = other._dunder_hash
|
||||
self._normal = other._normal
|
||||
self._full_hash = other._full_hash
|
||||
self._package_hash = other._package_hash
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, getattr(other, h.attr, None))
|
||||
else:
|
||||
self._hash = None
|
||||
self._build_hash = None
|
||||
self._dunder_hash = None
|
||||
# Note, we could use other._normal if we are copying all deps, but
|
||||
# always set it False here to avoid the complexity of checking
|
||||
self._normal = False
|
||||
self._full_hash = None
|
||||
self._package_hash = None
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
return changed
|
||||
|
||||
def _dup_deps(self, other, deptypes, caches):
|
||||
def _dup_deps(self, other, deptypes):
|
||||
def spid(spec):
|
||||
return id(spec)
|
||||
|
||||
@@ -3795,11 +3777,11 @@ def spid(spec):
|
||||
|
||||
if spid(edge.parent) not in new_specs:
|
||||
new_specs[spid(edge.parent)] = edge.parent.copy(
|
||||
deps=False, caches=caches
|
||||
deps=False
|
||||
)
|
||||
|
||||
if spid(edge.spec) not in new_specs:
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches)
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], edge.deptypes
|
||||
@@ -4529,7 +4511,7 @@ def tree(self, **kwargs):
|
||||
|
||||
if status_fn:
|
||||
status = status_fn(node)
|
||||
if node.package.installed_upstream:
|
||||
if node.installed_upstream:
|
||||
out += clr.colorize("@g{[^]} ", color=color)
|
||||
elif status is None:
|
||||
out += clr.colorize("@K{ - } ", color=color) # !installed
|
||||
@@ -4677,16 +4659,16 @@ def from_self(name, transitive):
|
||||
return False
|
||||
return True
|
||||
|
||||
self_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
self_nodes = dict((s.name, s.copy(deps=False))
|
||||
for s in self.traverse(root=True)
|
||||
if from_self(s.name, transitive))
|
||||
|
||||
if transitive:
|
||||
other_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
other_nodes = dict((s.name, s.copy(deps=False))
|
||||
for s in other.traverse(root=True))
|
||||
else:
|
||||
# NOTE: Does not fully validate providers; loader races possible
|
||||
other_nodes = dict((s.name, s.copy(deps=False, caches=True))
|
||||
other_nodes = dict((s.name, s.copy(deps=False))
|
||||
for s in other.traverse(root=True)
|
||||
if s is other or s.name not in self)
|
||||
|
||||
@@ -4719,23 +4701,10 @@ def from_self(name, transitive):
|
||||
for dep in ret.traverse(root=True, order='post'):
|
||||
opposite = other_nodes if dep.name in self_nodes else self_nodes
|
||||
if any(name in dep for name in opposite.keys()):
|
||||
# Record whether hashes are already cached
|
||||
# So we don't try to compute a hash from insufficient
|
||||
# provenance later
|
||||
has_build_hash = getattr(dep, ht.build_hash.name, None)
|
||||
has_full_hash = getattr(dep, ht.full_hash.name, None)
|
||||
|
||||
# package hash cannot be affected by splice
|
||||
dep.clear_cached_hashes(ignore=['package_hash'])
|
||||
|
||||
# Since this is a concrete spec, we want to make sure hashes
|
||||
# are cached writing specs only writes cached hashes in case
|
||||
# the spec is too old to have full provenance for these hashes,
|
||||
# so we can't rely on doing it at write time.
|
||||
if has_build_hash:
|
||||
_ = dep.build_hash()
|
||||
if has_full_hash:
|
||||
_ = dep.full_hash()
|
||||
dep.dag_hash()
|
||||
|
||||
return nodes[self.name]
|
||||
|
||||
@@ -4747,16 +4716,21 @@ def clear_cached_hashes(self, ignore=()):
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
self._dunder_hash = None
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the DAG hash and just use
|
||||
# a 64-bit prefix of it. The DAG hash has the advantage that it's
|
||||
# computed once per concrete spec, and it's saved -- so if we
|
||||
# read concrete specs we don't need to recompute the whole hash.
|
||||
# This is good for large, unchanging specs.
|
||||
# If the spec is concrete, we leverage the process hash and just use
|
||||
# a 64-bit prefix of it. The process hash has the advantage that it's
|
||||
# computed once per concrete spec, and it's saved -- so if we read
|
||||
# concrete specs we don't need to recompute the whole hash. This is
|
||||
# good for large, unchanging specs.
|
||||
#
|
||||
# We use the process hash instead of the DAG hash here because the DAG
|
||||
# hash includes the package hash, which can cause infinite recursion,
|
||||
# and which isn't defined unless the spec has a known package.
|
||||
if self.concrete:
|
||||
if not self._dunder_hash:
|
||||
self._dunder_hash = self.dag_hash_bit_prefix(64)
|
||||
self._dunder_hash = self.process_hash_bit_prefix(64)
|
||||
return self._dunder_hash
|
||||
|
||||
# This is the normal hash for lazy_lexicographic_ordering. It's
|
||||
@@ -4821,7 +4795,7 @@ def _spec_from_old_dict(data):
|
||||
if 'dependencies' not in node[name]:
|
||||
continue
|
||||
|
||||
for dname, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
deps[name]._add_dependency(deps[dname], dtypes)
|
||||
|
||||
return spec
|
||||
@@ -4855,7 +4829,7 @@ def _spec_from_dict(data):
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name # use the full_hash provenance
|
||||
hash_type = ht.dag_hash.name
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError("Spec dictionary contains malformed "
|
||||
"dependencies. Old format?")
|
||||
@@ -4865,10 +4839,7 @@ def _spec_from_dict(data):
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
if 'build_spec' in node.keys():
|
||||
node_hash = node[hash_type]
|
||||
else:
|
||||
node_hash = node[hash_type]
|
||||
node_hash = node[hash_type]
|
||||
node_spec = Spec.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]['node_spec'] = node_spec
|
||||
@@ -5053,7 +5024,7 @@ def do_parse(self):
|
||||
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs[-1]._hash:
|
||||
if specs[-1].concrete:
|
||||
raise RedundantSpecError(specs[-1], 'dependency')
|
||||
# command line deps get empty deptypes now.
|
||||
# Real deptypes are assigned later per packages.
|
||||
@@ -5063,9 +5034,8 @@ def do_parse(self):
|
||||
# If the next token can be part of a valid anonymous spec,
|
||||
# create the anonymous spec
|
||||
if self.next.type in (AT, ON, OFF, PCT):
|
||||
# Raise an error if the previous spec is already
|
||||
# concrete (assigned by hash)
|
||||
if specs and specs[-1]._hash:
|
||||
# Raise an error if the previous spec is already concrete
|
||||
if specs and specs[-1].concrete:
|
||||
raise RedundantSpecError(specs[-1],
|
||||
'compiler, version, '
|
||||
'or variant')
|
||||
@@ -5135,10 +5105,15 @@ def parse_compiler(self, text):
|
||||
return self.compiler()
|
||||
|
||||
def spec_by_hash(self):
|
||||
# TODO: Remove parser dependency on active environment and database.
|
||||
import spack.environment
|
||||
self.expect(ID)
|
||||
|
||||
dag_hash = self.token.value
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
matches = []
|
||||
if spack.environment.active_environment():
|
||||
matches = spack.environment.active_environment().get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
matches = spack.store.db.get_by_hash(dag_hash)
|
||||
if not matches:
|
||||
raise NoSuchHashError(dag_hash)
|
||||
|
||||
@@ -5322,7 +5297,7 @@ def save_dependency_specfiles(
|
||||
json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
|
||||
|
||||
with open(json_path, 'w') as fd:
|
||||
fd.write(dep_spec.to_json(hash=ht.build_hash))
|
||||
fd.write(dep_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
|
||||
class SpecParseError(spack.error.SpecError):
|
||||
|
||||
@@ -17,6 +17,8 @@
|
||||
(['wrong-variant-in-depends-on'], 'PKG-DIRECTIVES'),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(['invalid-github-patch-url'], 'PKG-DIRECTIVES'),
|
||||
# This package has a stand-alone 'test' method in build-time callbacks
|
||||
(['test-build-callbacks'], 'PKG-DIRECTIVES'),
|
||||
# This package has no issues
|
||||
(['mpileaks'], None),
|
||||
# This package has a conflict with a trigger which cannot constrain the constraint
|
||||
|
||||
@@ -11,15 +11,12 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.config
|
||||
import spack.hooks.sbang as sbang
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.repo
|
||||
import spack.spec as spec
|
||||
import spack.store
|
||||
import spack.util.gpg
|
||||
import spack.util.web as web_util
|
||||
@@ -394,31 +391,12 @@ def test_built_spec_cache(mirror_dir):
|
||||
|
||||
gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
|
||||
|
||||
full_hash_map = {
|
||||
'garply': gspec.full_hash(),
|
||||
'corge': cspec.full_hash(),
|
||||
}
|
||||
|
||||
gspec_results = bindist.get_mirrors_for_spec(gspec)
|
||||
|
||||
gspec_mirrors = {}
|
||||
for result in gspec_results:
|
||||
s = result['spec']
|
||||
assert(s._full_hash == full_hash_map[s.name])
|
||||
assert(result['mirror_url'] not in gspec_mirrors)
|
||||
gspec_mirrors[result['mirror_url']] = True
|
||||
|
||||
cspec_results = bindist.get_mirrors_for_spec(cspec, full_hash_match=True)
|
||||
|
||||
cspec_mirrors = {}
|
||||
for result in cspec_results:
|
||||
s = result['spec']
|
||||
assert(s._full_hash == full_hash_map[s.name])
|
||||
assert(result['mirror_url'] not in cspec_mirrors)
|
||||
cspec_mirrors[result['mirror_url']] = True
|
||||
for s in [gspec, cspec]:
|
||||
results = bindist.get_mirrors_for_spec(s)
|
||||
assert(any([r['spec'] == s for r in results]))
|
||||
|
||||
|
||||
def fake_full_hash(spec):
|
||||
def fake_dag_hash(spec):
|
||||
# Generate an arbitrary hash that is intended to be different than
|
||||
# whatever a Spec reported before (to test actions that trigger when
|
||||
# the hash changes)
|
||||
@@ -430,7 +408,7 @@ def fake_full_hash(spec):
|
||||
'test_mirror'
|
||||
)
|
||||
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
"""Make sure needs_rebuild properly compares remote full_hash
|
||||
"""Make sure needs_rebuild properly compares remote hash
|
||||
against locally computed one, avoiding unnecessary rebuilds"""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
@@ -445,14 +423,14 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||
# Put installed package in the buildcache
|
||||
buildcache_cmd('create', '-u', '-a', '-d', mirror_dir.strpath, s.name)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True)
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
assert not rebuild
|
||||
|
||||
# Now monkey patch Spec to change the full hash on the package
|
||||
monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
|
||||
# Now monkey patch Spec to change the hash on the package
|
||||
monkeypatch.setattr(spack.spec.Spec, 'dag_hash', fake_dag_hash)
|
||||
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True)
|
||||
rebuild = bindist.needs_rebuild(s, mirror_url)
|
||||
|
||||
assert rebuild
|
||||
|
||||
@@ -624,57 +602,6 @@ def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
|
||||
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
|
||||
|
||||
|
||||
@pytest.mark.usefixtures(
|
||||
'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
|
||||
)
|
||||
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
|
||||
"""Ensure spack buildcache update-index properly fixes up spec descriptor
|
||||
files on the mirror when updating the buildcache index."""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join('mirror_dir')
|
||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
||||
spack.config.set('mirrors', {'test': mirror_url})
|
||||
|
||||
a = Spec('a').concretized()
|
||||
b = Spec('b').concretized()
|
||||
new_b_full_hash = 'abcdef'
|
||||
|
||||
# Install package a with dep b
|
||||
install_cmd('--no-cache', a.name)
|
||||
|
||||
# Create a buildcache for a and its dep b, and update index
|
||||
buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name)
|
||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
||||
|
||||
# Simulate an update to b that only affects full hash by simply overwriting
|
||||
# the full hash in the spec.json file on the mirror
|
||||
b_spec_json_name = bindist.tarball_name(b, '.spec.json')
|
||||
b_spec_json_path = os.path.join(mirror_dir.strpath,
|
||||
bindist.build_cache_relative_path(),
|
||||
b_spec_json_name)
|
||||
fs.filter_file(r'"full_hash":\s"\S+"',
|
||||
'"full_hash": "{0}"'.format(new_b_full_hash),
|
||||
b_spec_json_path)
|
||||
# When we update the index, spack should notice that a's notion of the
|
||||
# full hash of b doesn't match b's notion of it's own full hash, and as
|
||||
# a result, spack should fix the spec.json for a
|
||||
buildcache_cmd('update-index', '-d', mirror_dir.strpath)
|
||||
|
||||
# Read in the concrete spec json of a
|
||||
a_spec_json_name = bindist.tarball_name(a, '.spec.json')
|
||||
a_spec_json_path = os.path.join(mirror_dir.strpath,
|
||||
bindist.build_cache_relative_path(),
|
||||
a_spec_json_name)
|
||||
|
||||
# Turn concrete spec json into a concrete spec (a)
|
||||
with open(a_spec_json_path) as fd:
|
||||
a_prime = spec.Spec.from_json(fd.read())
|
||||
|
||||
# Make sure the full hash of b in a's spec json matches the new value
|
||||
assert(a_prime[b.name].full_hash() == new_b_full_hash)
|
||||
|
||||
|
||||
def test_FetchCacheError_only_accepts_lists_of_errors():
|
||||
with pytest.raises(TypeError, match="list"):
|
||||
bindist.FetchCacheError("error")
|
||||
|
||||
@@ -95,6 +95,25 @@ def _ensure(env_mods):
|
||||
return _ensure
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_module_cmd(monkeypatch):
|
||||
|
||||
class Logger(object):
|
||||
def __init__(self, fn=None):
|
||||
self.fn = fn
|
||||
self.calls = []
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.calls.append((args, kwargs))
|
||||
if self.fn:
|
||||
return self.fn(*args, **kwargs)
|
||||
|
||||
mock_module_cmd = Logger()
|
||||
monkeypatch.setattr(spack.build_environment, 'module', mock_module_cmd)
|
||||
monkeypatch.setattr(spack.build_environment, '_on_cray', lambda: (True, None))
|
||||
return mock_module_cmd
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Static to Shared not supported on Win (yet)")
|
||||
def test_static_to_shared_library(build_environment):
|
||||
@@ -433,3 +452,23 @@ def test_build_jobs_defaults():
|
||||
parallel=True, command_line=None, config_default=1, max_cpus=10) == 1
|
||||
assert determine_number_of_jobs(
|
||||
parallel=True, command_line=None, config_default=100, max_cpus=10) == 10
|
||||
|
||||
|
||||
def test_dirty_disable_module_unload(
|
||||
config, mock_packages, working_env, mock_module_cmd
|
||||
):
|
||||
"""Test that on CRAY platform 'module unload' is not called if the 'dirty'
|
||||
option is on.
|
||||
"""
|
||||
s = spack.spec.Spec('a').concretized()
|
||||
|
||||
# If called with "dirty" we don't unload modules, so no calls to the
|
||||
# `module` function on Cray
|
||||
spack.build_environment.setup_package(s.package, dirty=True)
|
||||
assert not mock_module_cmd.calls
|
||||
|
||||
# If called without "dirty" we unload modules on Cray
|
||||
spack.build_environment.setup_package(s.package, dirty=False)
|
||||
assert mock_module_cmd.calls
|
||||
assert any(('unload', 'cray-libsci') == item[0] for item in mock_module_cmd.calls)
|
||||
assert any(('unload', 'cray-mpich') == item[0] for item in mock_module_cmd.calls)
|
||||
|
||||
@@ -18,26 +18,27 @@
|
||||
@pytest.fixture(
|
||||
scope='function',
|
||||
params=[
|
||||
('configure', 'autotools'),
|
||||
('CMakeLists.txt', 'cmake'),
|
||||
('project.pro', 'qmake'),
|
||||
('pom.xml', 'maven'),
|
||||
('SConstruct', 'scons'),
|
||||
('waf', 'waf'),
|
||||
('setup.py', 'python'),
|
||||
('NAMESPACE', 'r'),
|
||||
('WORKSPACE', 'bazel'),
|
||||
('Makefile.PL', 'perlmake'),
|
||||
('Build.PL', 'perlbuild'),
|
||||
('foo.gemspec', 'ruby'),
|
||||
('Rakefile', 'ruby'),
|
||||
('setup.rb', 'ruby'),
|
||||
('GNUmakefile', 'makefile'),
|
||||
('makefile', 'makefile'),
|
||||
('Makefile', 'makefile'),
|
||||
('meson.build', 'meson'),
|
||||
('configure.py', 'sip'),
|
||||
('foobar', 'generic')
|
||||
('configure', 'autotools'),
|
||||
('CMakeLists.txt', 'cmake'),
|
||||
('project.pro', 'qmake'),
|
||||
('pom.xml', 'maven'),
|
||||
('SConstruct', 'scons'),
|
||||
('waf', 'waf'),
|
||||
('argbah.rockspec', 'lua'),
|
||||
('setup.py', 'python'),
|
||||
('NAMESPACE', 'r'),
|
||||
('WORKSPACE', 'bazel'),
|
||||
('Makefile.PL', 'perlmake'),
|
||||
('Build.PL', 'perlbuild'),
|
||||
('foo.gemspec', 'ruby'),
|
||||
('Rakefile', 'ruby'),
|
||||
('setup.rb', 'ruby'),
|
||||
('GNUmakefile', 'makefile'),
|
||||
('makefile', 'makefile'),
|
||||
('Makefile', 'makefile'),
|
||||
('meson.build', 'meson'),
|
||||
('configure.py', 'sip'),
|
||||
('foobar', 'generic')
|
||||
]
|
||||
)
|
||||
def url_and_build_system(request, tmpdir):
|
||||
|
||||
@@ -44,7 +44,7 @@ def tmp_scope():
|
||||
def test_urlencode_string():
|
||||
s = 'Spack Test Project'
|
||||
|
||||
s_enc = ci.url_encode_string(s)
|
||||
s_enc = ci._url_encode_string(s)
|
||||
|
||||
assert(s_enc == 'Spack+Test+Project')
|
||||
|
||||
@@ -96,9 +96,9 @@ def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
|
||||
|
||||
with e as active_env:
|
||||
for s in active_env.all_specs():
|
||||
hash_dict[s.name] = s.build_hash()
|
||||
hash_dict[s.name] = s.dag_hash()
|
||||
if s.name == 'dyninst':
|
||||
dyninst_hash = s.build_hash()
|
||||
dyninst_hash = s.dag_hash()
|
||||
|
||||
assert(dyninst_hash)
|
||||
|
||||
@@ -107,7 +107,7 @@ def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
|
||||
assert 'root' in spec_map
|
||||
|
||||
concrete_root = spec_map['root']
|
||||
assert(concrete_root.build_hash() == dyninst_hash)
|
||||
assert(concrete_root.dag_hash() == dyninst_hash)
|
||||
|
||||
s = spec.Spec('dyninst')
|
||||
print('nonconc spec name: {0}'.format(s.name))
|
||||
|
||||
@@ -122,13 +122,13 @@ def test_specs_staging(config):
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
|
||||
spec_a_label = ci.spec_deps_key(spec_a)
|
||||
spec_b_label = ci.spec_deps_key(spec_a['b'])
|
||||
spec_c_label = ci.spec_deps_key(spec_a['c'])
|
||||
spec_d_label = ci.spec_deps_key(spec_a['d'])
|
||||
spec_e_label = ci.spec_deps_key(spec_a['e'])
|
||||
spec_f_label = ci.spec_deps_key(spec_a['f'])
|
||||
spec_g_label = ci.spec_deps_key(spec_a['g'])
|
||||
spec_a_label = ci._spec_deps_key(spec_a)
|
||||
spec_b_label = ci._spec_deps_key(spec_a['b'])
|
||||
spec_c_label = ci._spec_deps_key(spec_a['c'])
|
||||
spec_d_label = ci._spec_deps_key(spec_a['d'])
|
||||
spec_e_label = ci._spec_deps_key(spec_a['e'])
|
||||
spec_f_label = ci._spec_deps_key(spec_a['f'])
|
||||
spec_g_label = ci._spec_deps_key(spec_a['g'])
|
||||
|
||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||
|
||||
@@ -767,19 +767,13 @@ def test_ci_rebuild(tmpdir, mutable_mock_env_path,
|
||||
shutil.copyfile(env.lock_path,
|
||||
os.path.join(env_dir.strpath, 'spack.lock'))
|
||||
|
||||
root_spec_build_hash = None
|
||||
job_spec_dag_hash = None
|
||||
job_spec_full_hash = None
|
||||
root_spec_dag_hash = None
|
||||
|
||||
for h, s in env.specs_by_hash.items():
|
||||
if s.name == 'archive-files':
|
||||
root_spec_build_hash = h
|
||||
job_spec_dag_hash = s.dag_hash()
|
||||
job_spec_full_hash = s.full_hash()
|
||||
root_spec_dag_hash = h
|
||||
|
||||
assert root_spec_build_hash
|
||||
assert job_spec_dag_hash
|
||||
assert job_spec_full_hash
|
||||
assert root_spec_dag_hash
|
||||
|
||||
def fake_cdash_register(build_name, base_url, project, site, track):
|
||||
return ('fakebuildid', 'fakestamp')
|
||||
@@ -801,8 +795,8 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
||||
'SPACK_CONCRETE_ENV_DIR': env_dir.strpath,
|
||||
'CI_PIPELINE_ID': '7192',
|
||||
'SPACK_SIGNING_KEY': signing_key,
|
||||
'SPACK_ROOT_SPEC': root_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
|
||||
'SPACK_ROOT_SPEC': root_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
||||
'SPACK_COMPILER_ACTION': 'NONE',
|
||||
'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
|
||||
@@ -816,8 +810,8 @@ def fake_cdash_register(build_name, base_url, project, site, track):
|
||||
|
||||
expected_repro_files = [
|
||||
'install.sh',
|
||||
'root.yaml',
|
||||
'archive-files.yaml',
|
||||
'root.json',
|
||||
'archive-files.json',
|
||||
'spack.yaml',
|
||||
'spack.lock'
|
||||
]
|
||||
@@ -839,14 +833,13 @@ def mystrip(s):
|
||||
install_parts = [mystrip(s) for s in install_line.split(' ')]
|
||||
|
||||
assert('--keep-stage' in install_parts)
|
||||
assert('--require-full-hash-match' in install_parts)
|
||||
assert('--no-check-signature' not in install_parts)
|
||||
assert('--no-add' in install_parts)
|
||||
assert('-f' in install_parts)
|
||||
flag_index = install_parts.index('-f')
|
||||
assert('archive-files.yaml' in install_parts[flag_index + 1])
|
||||
assert('archive-files.json' in install_parts[flag_index + 1])
|
||||
|
||||
broken_spec_file = os.path.join(broken_specs_path, job_spec_full_hash)
|
||||
broken_spec_file = os.path.join(broken_specs_path, root_spec_dag_hash)
|
||||
with open(broken_spec_file) as fd:
|
||||
broken_spec_content = fd.read()
|
||||
assert(ci_job_url in broken_spec_content)
|
||||
@@ -894,13 +887,11 @@ def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
|
||||
env_cmd('create', 'test', './spack.yaml')
|
||||
with ev.read('test') as env:
|
||||
env.concretize()
|
||||
root_spec_build_hash = None
|
||||
job_spec_dag_hash = None
|
||||
root_spec_dag_hash = None
|
||||
|
||||
for h, s in env.specs_by_hash.items():
|
||||
if s.name == 'archive-files':
|
||||
root_spec_build_hash = h
|
||||
job_spec_dag_hash = s.dag_hash()
|
||||
root_spec_dag_hash = h
|
||||
|
||||
# Create environment variables as gitlab would do it
|
||||
os.environ.update({
|
||||
@@ -909,8 +900,8 @@ def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
|
||||
'SPACK_JOB_REPRO_DIR': 'repro_dir',
|
||||
'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
|
||||
'SPACK_CONCRETE_ENV_DIR': tmpdir.strpath,
|
||||
'SPACK_ROOT_SPEC': root_spec_build_hash,
|
||||
'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
|
||||
'SPACK_ROOT_SPEC': root_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
|
||||
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
|
||||
'SPACK_COMPILER_ACTION': 'NONE',
|
||||
'SPACK_REMOTE_MIRROR_URL': mirror_url,
|
||||
@@ -980,7 +971,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
|
||||
spec_map = ci.get_concrete_specs(
|
||||
env, 'patchelf', 'patchelf', 'FIND_ANY')
|
||||
concrete_spec = spec_map['patchelf']
|
||||
spec_json = concrete_spec.to_json(hash=ht.build_hash)
|
||||
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||
json_path = str(tmpdir.join('spec.json'))
|
||||
with open(json_path, 'w') as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
@@ -1323,12 +1314,12 @@ def test_ci_rebuild_index(tmpdir, mutable_mock_env_path,
|
||||
spec_map = ci.get_concrete_specs(
|
||||
env, 'callpath', 'callpath', 'FIND_ANY')
|
||||
concrete_spec = spec_map['callpath']
|
||||
spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
|
||||
yaml_path = str(tmpdir.join('spec.yaml'))
|
||||
with open(yaml_path, 'w') as ypfd:
|
||||
ypfd.write(spec_yaml)
|
||||
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
|
||||
json_path = str(tmpdir.join('spec.json'))
|
||||
with open(json_path, 'w') as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd('--keep-stage', '-f', yaml_path)
|
||||
install_cmd('--keep-stage', '-f', json_path)
|
||||
buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
|
||||
mirror_url, 'callpath')
|
||||
ci_cmd('rebuild-index')
|
||||
@@ -1412,8 +1403,8 @@ def test_ci_generate_bootstrap_prune_dag(
|
||||
# nothing in the environment needs rebuilding. With the monkeypatch, the
|
||||
# process sees the compiler as needing a rebuild, which should then result
|
||||
# in the specs built with that compiler needing a rebuild too.
|
||||
def fake_get_mirrors_for_spec(spec=None, full_hash_match=False,
|
||||
mirrors_to_check=None, index_only=False):
|
||||
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
|
||||
index_only=False):
|
||||
if spec.name == 'gcc':
|
||||
return []
|
||||
else:
|
||||
@@ -1674,14 +1665,14 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||
"""Verify that `broken-specs-url` works as intended"""
|
||||
spec_a = Spec('a')
|
||||
spec_a.concretize()
|
||||
a_full_hash = spec_a.full_hash()
|
||||
a_dag_hash = spec_a.dag_hash()
|
||||
|
||||
spec_flattendeps = Spec('flatten-deps')
|
||||
spec_flattendeps.concretize()
|
||||
flattendeps_full_hash = spec_flattendeps.full_hash()
|
||||
flattendeps_dag_hash = spec_flattendeps.dag_hash()
|
||||
|
||||
# Mark 'a' as broken (but not 'flatten-deps')
|
||||
broken_spec_a_path = str(tmpdir.join(a_full_hash))
|
||||
broken_spec_a_path = str(tmpdir.join(a_dag_hash))
|
||||
with open(broken_spec_a_path, 'w') as bsf:
|
||||
bsf.write('')
|
||||
|
||||
@@ -1718,10 +1709,10 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
|
||||
output = ci_cmd('generate', output=str, fail_on_error=False)
|
||||
assert('known to be broken' in output)
|
||||
|
||||
ex = '({0})'.format(a_full_hash)
|
||||
ex = '({0})'.format(a_dag_hash)
|
||||
assert(ex in output)
|
||||
|
||||
ex = '({0})'.format(flattendeps_full_hash)
|
||||
ex = '({0})'.format(flattendeps_dag_hash)
|
||||
assert(ex not in output)
|
||||
|
||||
|
||||
@@ -1776,15 +1767,15 @@ def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
||||
root_spec = s
|
||||
job_spec = s
|
||||
|
||||
job_spec_yaml_path = os.path.join(
|
||||
working_dir.strpath, 'archivefiles.yaml')
|
||||
with open(job_spec_yaml_path, 'w') as fd:
|
||||
fd.write(job_spec.to_yaml(hash=ht.build_hash))
|
||||
job_spec_json_path = os.path.join(
|
||||
working_dir.strpath, 'archivefiles.json')
|
||||
with open(job_spec_json_path, 'w') as fd:
|
||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
root_spec_yaml_path = os.path.join(
|
||||
working_dir.strpath, 'root.yaml')
|
||||
with open(root_spec_yaml_path, 'w') as fd:
|
||||
fd.write(root_spec.to_yaml(hash=ht.build_hash))
|
||||
root_spec_json_path = os.path.join(
|
||||
working_dir.strpath, 'root.json')
|
||||
with open(root_spec_json_path, 'w') as fd:
|
||||
fd.write(root_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
|
||||
pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
|
||||
@@ -1798,8 +1789,8 @@ def test_ci_reproduce(tmpdir, mutable_mock_env_path,
|
||||
repro_file = os.path.join(working_dir.strpath, 'repro.json')
|
||||
repro_details = {
|
||||
'job_name': job_name,
|
||||
'job_spec_yaml': 'archivefiles.yaml',
|
||||
'root_spec_yaml': 'root.yaml',
|
||||
'job_spec_json': 'archivefiles.json',
|
||||
'root_spec_json': 'root.json',
|
||||
'ci_project_dir': working_dir.strpath
|
||||
}
|
||||
with open(repro_file, 'w') as fd:
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import filecmp
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
@@ -17,16 +18,18 @@
|
||||
import spack.cmd.env
|
||||
import spack.environment as ev
|
||||
import spack.environment.shell
|
||||
import spack.hash_types as ht
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.cmd.env import _env_create
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
from spack.spec import Spec
|
||||
from spack.stage import stage_prefix
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.version import Version
|
||||
|
||||
# TODO-27021
|
||||
# everything here uses the mock_env_path
|
||||
@@ -162,7 +165,7 @@ def test_env_install_all(install_mockery, mock_fetch):
|
||||
e.install_all()
|
||||
env_specs = e._get_environment_specs()
|
||||
spec = next(x for x in env_specs if x.name == 'cmake-client')
|
||||
assert spec.package.installed
|
||||
assert spec.installed
|
||||
|
||||
|
||||
def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
@@ -966,87 +969,6 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
||||
assert not test.user_specs
|
||||
|
||||
|
||||
def create_v1_lockfile_dict(roots, all_specs):
|
||||
test_lockfile_dict = {
|
||||
"_meta": {
|
||||
"lockfile-version": 1,
|
||||
"file-type": "spack-lockfile"
|
||||
},
|
||||
"roots": list(
|
||||
{
|
||||
"hash": s.dag_hash(),
|
||||
"spec": s.name
|
||||
} for s in roots
|
||||
),
|
||||
# Version one lockfiles use the dag hash without build deps as keys,
|
||||
# but they write out the full node dict (including build deps)
|
||||
"concrete_specs": dict(
|
||||
(s.dag_hash(), s.to_node_dict(hash=ht.build_hash))
|
||||
for s in all_specs
|
||||
)
|
||||
}
|
||||
return test_lockfile_dict
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_read_old_lock_and_write_new(tmpdir):
|
||||
build_only = ('build',)
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
mock_repo.add_package('x', [y], [build_only])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
x = Spec('x')
|
||||
x.concretize()
|
||||
|
||||
y = x['y']
|
||||
|
||||
test_lockfile_dict = create_v1_lockfile_dict([x], [x, y])
|
||||
|
||||
test_lockfile_path = str(tmpdir.join('test.lock'))
|
||||
with open(test_lockfile_path, 'w') as f:
|
||||
sjson.dump(test_lockfile_dict, stream=f)
|
||||
|
||||
_env_create('test', test_lockfile_path, with_view=False)
|
||||
|
||||
e = ev.read('test')
|
||||
hashes = set(e._to_lockfile_dict()['concrete_specs'])
|
||||
# When the lockfile is rewritten, it should adopt the new hash scheme
|
||||
# which accounts for all dependencies, including build dependencies
|
||||
assert hashes == set([
|
||||
x.build_hash(),
|
||||
y.build_hash()])
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_read_old_lock_creates_backup(tmpdir):
|
||||
"""When reading a version-1 lockfile, make sure that a backup of that file
|
||||
is created.
|
||||
"""
|
||||
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
y = mock_repo.add_package('y', [], [])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
y = Spec('y')
|
||||
y.concretize()
|
||||
|
||||
test_lockfile_dict = create_v1_lockfile_dict([y], [y])
|
||||
|
||||
env_root = tmpdir.mkdir('test-root')
|
||||
test_lockfile_path = str(env_root.join(ev.lockfile_name))
|
||||
with open(test_lockfile_path, 'w') as f:
|
||||
sjson.dump(test_lockfile_dict, stream=f)
|
||||
|
||||
e = ev.Environment(str(env_root))
|
||||
assert os.path.exists(e._lock_backup_v1_path)
|
||||
with open(e._lock_backup_v1_path, 'r') as backup_v1_file:
|
||||
lockfile_dict_v1 = sjson.load(backup_v1_file)
|
||||
# Make sure that the backup file follows the v1 hash scheme
|
||||
assert y.dag_hash() in lockfile_dict_v1['concrete_specs']
|
||||
|
||||
|
||||
@pytest.mark.usefixtures('config')
|
||||
def test_indirect_build_dep():
|
||||
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
|
||||
@@ -1113,11 +1035,11 @@ def noop(*args):
|
||||
x_spec = Spec('x ^z@2')
|
||||
x_concretized = x_spec.concretized()
|
||||
|
||||
# Even though x chose a different 'z', it should choose the same y
|
||||
# according to the DAG hash (since build deps are excluded from
|
||||
# comparison by default). Although the dag hashes are equal, the specs
|
||||
# are not considered equal because they compare build deps.
|
||||
assert x_concretized['y'].dag_hash() == y_concretized.dag_hash()
|
||||
# Even though x chose a different 'z', the y it chooses should be identical
|
||||
# *aside* from the dependency on 'z'. The dag_hash() will show the difference
|
||||
# in build dependencies.
|
||||
assert x_concretized['y'].eq_node(y_concretized)
|
||||
assert x_concretized['y'].dag_hash() != y_concretized.dag_hash()
|
||||
|
||||
_env_create('test', with_view=False)
|
||||
e = ev.read('test')
|
||||
@@ -1132,7 +1054,13 @@ def noop(*args):
|
||||
y_read = e_read.specs_by_hash[y_env_hash]
|
||||
x_read = e_read.specs_by_hash[x_env_hash]
|
||||
|
||||
# make sure the DAG hashes and build deps are preserved after
|
||||
# a round trip to/from the lockfile
|
||||
assert x_read['z'] != y_read['z']
|
||||
assert x_read['z'].dag_hash() != y_read['z'].dag_hash()
|
||||
|
||||
assert x_read['y'].eq_node(y_read)
|
||||
assert x_read['y'].dag_hash() != y_read.dag_hash()
|
||||
|
||||
|
||||
def test_env_updates_view_install(
|
||||
@@ -1546,8 +1474,10 @@ def test_stack_yaml_force_remove_from_matrix(tmpdir):
|
||||
def test_stack_concretize_extraneous_deps(tmpdir, config, mock_packages):
|
||||
# FIXME: The new concretizer doesn't handle yet soft
|
||||
# FIXME: constraints for stacks
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
pytest.skip('Clingo concretizer does not support soft constraints')
|
||||
# FIXME: This now works for statically-determinable invalid deps
|
||||
# FIXME: But it still does not work for dynamically determined invalid deps
|
||||
# if spack.config.get('config:concretizer') == 'clingo':
|
||||
# pytest.skip('Clingo concretizer does not support soft constraints')
|
||||
|
||||
filename = str(tmpdir.join('spack.yaml'))
|
||||
with open(filename, 'w') as f:
|
||||
@@ -2466,19 +2396,19 @@ def test_newline_in_commented_sequence_is_not_an_issue(tmpdir):
|
||||
abspath = tmpdir.join('spack.yaml')
|
||||
abspath.write(spack_yaml)
|
||||
|
||||
def extract_build_hash(environment):
|
||||
def extract_dag_hash(environment):
|
||||
_, dyninst = next(iter(environment.specs_by_hash.items()))
|
||||
return dyninst['libelf'].build_hash()
|
||||
return dyninst['libelf'].dag_hash()
|
||||
|
||||
# Concretize a first time and create a lockfile
|
||||
with ev.Environment(str(tmpdir)) as e:
|
||||
concretize()
|
||||
libelf_first_hash = extract_build_hash(e)
|
||||
libelf_first_hash = extract_dag_hash(e)
|
||||
|
||||
# Check that a second run won't error
|
||||
with ev.Environment(str(tmpdir)) as e:
|
||||
concretize()
|
||||
libelf_second_hash = extract_build_hash(e)
|
||||
libelf_second_hash = extract_dag_hash(e)
|
||||
|
||||
assert libelf_first_hash == libelf_second_hash
|
||||
|
||||
@@ -2596,7 +2526,6 @@ def test_does_not_rewrite_rel_dev_path_when_keep_relative_is_set(tmpdir):
|
||||
_, _, _, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
env('create', '--keep-relative', 'named_env', str(spack_yaml))
|
||||
with ev.read('named_env') as e:
|
||||
print(e.dev_specs)
|
||||
assert e.dev_specs['mypkg1']['path'] == '../build_folder'
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
@@ -2843,3 +2772,199 @@ def test_environment_view_target_already_exists(
|
||||
# Make sure the dir was left untouched.
|
||||
assert not os.path.lexists(view)
|
||||
assert os.listdir(real_view) == ['file']
|
||||
|
||||
|
||||
def test_environment_query_spec_by_hash(mock_stage, mock_fetch, install_mockery):
|
||||
env('create', 'test')
|
||||
with ev.read('test'):
|
||||
add('libdwarf')
|
||||
concretize()
|
||||
with ev.read('test') as e:
|
||||
spec = e.matching_spec('libelf')
|
||||
install('/{0}'.format(spec.dag_hash()))
|
||||
with ev.read('test') as e:
|
||||
assert not e.matching_spec('libdwarf').installed
|
||||
assert e.matching_spec('libelf').installed
|
||||
|
||||
|
||||
@pytest.mark.parametrize("lockfile", ["v1", "v2", "v3"])
|
||||
def test_read_old_lock_and_write_new(config, tmpdir, lockfile):
|
||||
# v1 lockfiles stored by a coarse DAG hash that did not include build deps.
|
||||
# They could not represent multiple build deps with different build hashes.
|
||||
#
|
||||
# v2 and v3 lockfiles are keyed by a "build hash", so they can represent specs
|
||||
# with different build deps but the same DAG hash. However, those two specs
|
||||
# could never have been built together, because they cannot coexist in a
|
||||
# Spack DB, which is keyed by DAG hash. The second one would just be a no-op
|
||||
# no-op because its DAG hash was already in the DB.
|
||||
#
|
||||
# Newer Spack uses a fine-grained DAG hash that includes build deps, package hash,
|
||||
# and more. But, we still have to identify old specs by their original DAG hash.
|
||||
# Essentially, the name (hash) we give something in Spack at concretization time is
|
||||
# its name forever (otherwise we'd need to relocate prefixes and disrupt existing
|
||||
# installations). So, we just discard the second conflicting dtbuild1 version when
|
||||
# reading v2 and v3 lockfiles. This is what old Spack would've done when installing
|
||||
# the environment, anyway.
|
||||
#
|
||||
# This test ensures the behavior described above.
|
||||
lockfile_path = os.path.join(
|
||||
spack.paths.test_path, "data", "legacy_env", "%s.lock" % lockfile
|
||||
)
|
||||
|
||||
# read in the JSON from a legacy lockfile
|
||||
with open(lockfile_path) as f:
|
||||
old_dict = sjson.load(f)
|
||||
|
||||
# read all DAG hashes from the legacy lockfile and record its shadowed DAG hash.
|
||||
old_hashes = set()
|
||||
shadowed_hash = None
|
||||
for key, spec_dict in old_dict["concrete_specs"].items():
|
||||
if "hash" not in spec_dict:
|
||||
# v1 and v2 key specs by their name in concrete_specs
|
||||
name, spec_dict = next(iter(spec_dict.items()))
|
||||
else:
|
||||
# v3 lockfiles have a `name` field and key by hash
|
||||
name = spec_dict["name"]
|
||||
|
||||
# v1 lockfiles do not have a "hash" field -- they use the key.
|
||||
dag_hash = key if lockfile == "v1" else spec_dict["hash"]
|
||||
old_hashes.add(dag_hash)
|
||||
|
||||
# v1 lockfiles can't store duplicate build dependencies, so they
|
||||
# will not have a shadowed hash.
|
||||
if lockfile != "v1":
|
||||
# v2 and v3 lockfiles store specs by build hash, so they can have multiple
|
||||
# keys for the same DAG hash. We discard the second one (dtbuild@1.0).
|
||||
if name == "dtbuild1" and spec_dict["version"] == "1.0":
|
||||
shadowed_hash = dag_hash
|
||||
|
||||
# make an env out of the old lockfile -- env should be able to read v1/v2/v3
|
||||
test_lockfile_path = str(tmpdir.join("test.lock"))
|
||||
shutil.copy(lockfile_path, test_lockfile_path)
|
||||
_env_create("test", test_lockfile_path, with_view=False)
|
||||
|
||||
# re-read the old env as a new lockfile
|
||||
e = ev.read("test")
|
||||
hashes = set(e._to_lockfile_dict()["concrete_specs"])
|
||||
|
||||
# v1 doesn't have duplicate build deps.
|
||||
# in v2 and v3, the shadowed hash will be gone.
|
||||
if shadowed_hash:
|
||||
old_hashes -= set([shadowed_hash])
|
||||
|
||||
# make sure we see the same hashes in old and new lockfiles
|
||||
assert old_hashes == hashes
|
||||
|
||||
|
||||
def test_read_v1_lock_creates_backup(config, tmpdir):
|
||||
"""When reading a version-1 lockfile, make sure that a backup of that file
|
||||
is created.
|
||||
"""
|
||||
# read in the JSON from a legacy v1 lockfile
|
||||
v1_lockfile_path = os.path.join(
|
||||
spack.paths.test_path, "data", "legacy_env", "v1.lock"
|
||||
)
|
||||
|
||||
# make an env out of the old lockfile
|
||||
test_lockfile_path = str(tmpdir.join(ev.lockfile_name))
|
||||
shutil.copy(v1_lockfile_path, test_lockfile_path)
|
||||
|
||||
e = ev.Environment(str(tmpdir))
|
||||
assert os.path.exists(e._lock_backup_v1_path)
|
||||
assert filecmp.cmp(e._lock_backup_v1_path, v1_lockfile_path)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("lockfile", ["v1", "v2", "v3"])
|
||||
def test_read_legacy_lockfile_and_reconcretize(
|
||||
mock_stage, mock_fetch, install_mockery, lockfile
|
||||
):
|
||||
# In legacy lockfiles v2 and v3 (keyed by build hash), there may be multiple
|
||||
# versions of the same spec with different build dependencies, which means
|
||||
# they will have different build hashes but the same DAG hash.
|
||||
# In the case of DAG hash conflicts, we always keep the spec associated with
|
||||
# whichever root spec came first in the "roots" list.
|
||||
#
|
||||
# After reconcretization with the *new*, finer-grained DAG hash, there should no
|
||||
# longer be conflicts, and the previously conflicting specs can coexist in the
|
||||
# same environment.
|
||||
legacy_lockfile_path = os.path.join(
|
||||
spack.paths.test_path, "data", "legacy_env", "%s.lock" % lockfile
|
||||
)
|
||||
|
||||
# The order of the root specs in this environment is:
|
||||
# [
|
||||
# wci7a3a -> dttop ^dtbuild1@0.5,
|
||||
# 5zg6wxw -> dttop ^dtbuild1@1.0
|
||||
# ]
|
||||
# So in v2 and v3 lockfiles we have two versions of dttop with the same DAG
|
||||
# hash but different build hashes.
|
||||
|
||||
env('create', 'test', legacy_lockfile_path)
|
||||
test = ev.read('test')
|
||||
assert len(test.specs_by_hash) == 1
|
||||
|
||||
single_root = next(iter(test.specs_by_hash.values()))
|
||||
|
||||
# v1 only has version 1.0, because v1 was keyed by DAG hash, and v1.0 overwrote
|
||||
# v0.5 on lockfile creation. v2 only has v0.5, because we specifically prefer
|
||||
# the one that would be installed when we read old lockfiles.
|
||||
if lockfile == "v1":
|
||||
assert single_root['dtbuild1'].version == Version('1.0')
|
||||
else:
|
||||
assert single_root['dtbuild1'].version == Version('0.5')
|
||||
|
||||
# Now forcefully reconcretize
|
||||
with ev.read('test'):
|
||||
concretize('-f')
|
||||
|
||||
# After reconcretizing, we should again see two roots, one depending on each
|
||||
# of the dtbuild1 versions specified in the roots of the original lockfile.
|
||||
test = ev.read('test')
|
||||
assert len(test.specs_by_hash) == 2
|
||||
|
||||
expected_versions = set([Version('0.5'), Version('1.0')])
|
||||
current_versions = set(s['dtbuild1'].version for s in test.specs_by_hash.values())
|
||||
assert current_versions == expected_versions
|
||||
|
||||
|
||||
def test_environment_depfile_makefile(tmpdir, mock_packages):
|
||||
env('create', 'test')
|
||||
make = Executable('make')
|
||||
makefile = str(tmpdir.join('Makefile'))
|
||||
with ev.read('test'):
|
||||
add('libdwarf')
|
||||
concretize()
|
||||
|
||||
# Disable jobserver so we can do a dry run.
|
||||
with ev.read('test'):
|
||||
env('depfile', '-o', makefile, '--make-disable-jobserver',
|
||||
'--make-target-prefix', 'prefix')
|
||||
|
||||
# Do make dry run.
|
||||
all_out = make('-n', '-f', makefile, output=str)
|
||||
|
||||
# Check whether `make` installs everything
|
||||
with ev.read('test') as e:
|
||||
for _, root in e.concretized_specs():
|
||||
for spec in root.traverse(root=True):
|
||||
for task in ('.fetch', '.install'):
|
||||
tgt = os.path.join('prefix', task, spec.dag_hash())
|
||||
assert 'touch {}'.format(tgt) in all_out
|
||||
|
||||
# Check whether make prefix/fetch-all only fetches
|
||||
fetch_out = make('prefix/fetch-all', '-n', '-f', makefile, output=str)
|
||||
assert '.install/' not in fetch_out
|
||||
assert '.fetch/' in fetch_out
|
||||
|
||||
|
||||
def test_environment_depfile_out(tmpdir, mock_packages):
|
||||
env('create', 'test')
|
||||
makefile_path = str(tmpdir.join('Makefile'))
|
||||
with ev.read('test'):
|
||||
add('libdwarf')
|
||||
concretize()
|
||||
with ev.read('test'):
|
||||
env('depfile', '-G', 'make', '-o', makefile_path)
|
||||
stdout = env('depfile', '-G', 'make')
|
||||
with open(makefile_path, 'r') as f:
|
||||
assert stdout == f.read()
|
||||
|
||||
@@ -167,6 +167,48 @@ def test_find_external_cmd_full_repo(
|
||||
assert {'spec': 'find-externals1@1.foo', 'prefix': prefix} in pkg_externals
|
||||
|
||||
|
||||
def test_find_external_no_manifest(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo,
|
||||
_platform_executables, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files does not exist. Ensure that the command does not
|
||||
fail.
|
||||
"""
|
||||
monkeypatch.setenv('PATH', '')
|
||||
monkeypatch.setattr(spack.cray_manifest, 'default_path',
|
||||
os.path.join('a', 'path', 'that', 'doesnt', 'exist'))
|
||||
external('find')
|
||||
|
||||
|
||||
def test_find_external_empty_default_manifest_dir(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch):
|
||||
"""The user runs 'spack external find'; the default path for storing
|
||||
manifest files exists but is empty. Ensure that the command does not
|
||||
fail.
|
||||
"""
|
||||
empty_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
|
||||
monkeypatch.setenv('PATH', '')
|
||||
monkeypatch.setattr(spack.cray_manifest, 'default_path',
|
||||
empty_manifest_dir)
|
||||
external('find')
|
||||
|
||||
|
||||
def test_find_external_nonempty_default_manifest_dir(
|
||||
mutable_database, mutable_mock_repo,
|
||||
_platform_executables, tmpdir, monkeypatch,
|
||||
directory_with_manifest):
|
||||
"""The user runs 'spack external find'; the default manifest directory
|
||||
contains a manifest file. Ensure that the specs are read.
|
||||
"""
|
||||
monkeypatch.setenv('PATH', '')
|
||||
monkeypatch.setattr(spack.cray_manifest, 'default_path',
|
||||
str(directory_with_manifest))
|
||||
external('find')
|
||||
specs = spack.store.db.query('hwloc')
|
||||
assert any(x.dag_hash() == 'hwlocfakehashaaa' for x in specs)
|
||||
|
||||
|
||||
def test_find_external_merge(mutable_config, mutable_mock_repo):
|
||||
"""Check that 'spack find external' doesn't overwrite an existing spec
|
||||
entry in packages.yaml.
|
||||
|
||||
@@ -26,18 +26,14 @@ def parser():
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def info_lines():
|
||||
lines = []
|
||||
return lines
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_print(monkeypatch, info_lines):
|
||||
def print_buffer(monkeypatch):
|
||||
buffer = []
|
||||
|
||||
def _print(*args):
|
||||
info_lines.extend(args)
|
||||
buffer.extend(args)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.info.color, 'cprint', _print, raising=False)
|
||||
return buffer
|
||||
|
||||
|
||||
@pytest.mark.parametrize('pkg', [
|
||||
@@ -52,11 +48,11 @@ def test_it_just_runs(pkg):
|
||||
info(pkg)
|
||||
|
||||
|
||||
def test_info_noversion(mock_packages, info_lines, mock_print):
|
||||
def test_info_noversion(mock_packages, print_buffer):
|
||||
"""Check that a mock package with no versions or variants outputs None."""
|
||||
info('noversion')
|
||||
|
||||
line_iter = info_lines.__iter__()
|
||||
line_iter = iter(print_buffer)
|
||||
for line in line_iter:
|
||||
if 'version' in line:
|
||||
has = [desc in line for desc in ['Preferred', 'Safe', 'Deprecated']]
|
||||
@@ -72,12 +68,11 @@ def test_info_noversion(mock_packages, info_lines, mock_print):
|
||||
('zlib', 'False'),
|
||||
('gcc', 'True (version, variants)'),
|
||||
])
|
||||
@pytest.mark.usefixtures('mock_print')
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, info_lines):
|
||||
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
|
||||
args = parser.parse_args(['--detectable', pkg_query])
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
line_iter = info_lines.__iter__()
|
||||
line_iter = iter(print_buffer)
|
||||
for line in line_iter:
|
||||
if 'Externally Detectable' in line:
|
||||
is_externally_detectable = next(line_iter).strip()
|
||||
@@ -90,9 +85,7 @@ def test_is_externally_detectable(pkg_query, expected, parser, info_lines):
|
||||
'trilinos',
|
||||
'gcc' # This should ensure --test's c_names processing loop covered
|
||||
])
|
||||
@pytest.mark.usefixtures('mock_print')
|
||||
def test_info_fields(pkg_query, parser, info_lines):
|
||||
|
||||
def test_info_fields(pkg_query, parser, print_buffer):
|
||||
expected_fields = (
|
||||
'Description:',
|
||||
'Homepage:',
|
||||
@@ -108,5 +101,4 @@ def test_info_fields(pkg_query, parser, info_lines):
|
||||
spack.cmd.info.info(parser, args)
|
||||
|
||||
for text in expected_fields:
|
||||
match = [x for x in info_lines if text in x]
|
||||
assert match
|
||||
assert any(x for x in print_buffer if text in x)
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import filecmp
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
@@ -87,7 +86,6 @@ def check(pkg):
|
||||
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'unit_test_check', check)
|
||||
install('--test=all', 'a')
|
||||
install('--run-tests', 'a')
|
||||
|
||||
|
||||
def test_install_package_already_installed(
|
||||
@@ -534,7 +532,7 @@ def test_cdash_report_concretization_error(tmpdir, mock_fetch, install_mockery,
|
||||
# new or the old concretizer
|
||||
expected_messages = (
|
||||
'Conflicts in concretized spec',
|
||||
'A conflict was triggered',
|
||||
'conflicts with',
|
||||
)
|
||||
assert any(x in content for x in expected_messages)
|
||||
|
||||
@@ -622,20 +620,20 @@ def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
|
||||
def test_cdash_install_from_spec_json(tmpdir, mock_fetch, install_mockery,
|
||||
capfd, mock_packages, mock_archive,
|
||||
config):
|
||||
# capfd interferes with Spack's capturing
|
||||
with capfd.disabled():
|
||||
with tmpdir.as_cwd():
|
||||
|
||||
spec_yaml_path = str(tmpdir.join('spec.yaml'))
|
||||
spec_json_path = str(tmpdir.join('spec.json'))
|
||||
|
||||
pkg_spec = Spec('a')
|
||||
pkg_spec.concretize()
|
||||
|
||||
with open(spec_yaml_path, 'w') as fd:
|
||||
fd.write(pkg_spec.to_yaml(hash=ht.build_hash))
|
||||
with open(spec_json_path, 'w') as fd:
|
||||
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install(
|
||||
'--log-format=cdash',
|
||||
@@ -643,7 +641,7 @@ def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
|
||||
'--cdash-build=my_custom_build',
|
||||
'--cdash-site=my_custom_site',
|
||||
'--cdash-track=my_custom_track',
|
||||
'-f', spec_yaml_path)
|
||||
'-f', spec_json_path)
|
||||
|
||||
report_dir = tmpdir.join('cdash_reports')
|
||||
assert report_dir in tmpdir.listdir()
|
||||
@@ -847,14 +845,14 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
|
||||
post_install_specs = e.all_specs()
|
||||
assert all([s in env_specs for s in post_install_specs])
|
||||
|
||||
# Make sure we can install a concrete dependency spec from a spec.yaml
|
||||
# Make sure we can install a concrete dependency spec from a spec.json
|
||||
# file on disk, using the ``--no-add` option, and the spec is installed
|
||||
# but not added as a root
|
||||
mpi_spec_yaml_path = tmpdir.join('{0}.yaml'.format(mpi_spec.name))
|
||||
with open(mpi_spec_yaml_path.strpath, 'w') as fd:
|
||||
fd.write(mpi_spec.to_yaml(hash=ht.build_hash))
|
||||
mpi_spec_json_path = tmpdir.join('{0}.json'.format(mpi_spec.name))
|
||||
with open(mpi_spec_json_path.strpath, 'w') as fd:
|
||||
fd.write(mpi_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install('--no-add', '-f', mpi_spec_yaml_path.strpath)
|
||||
install('--no-add', '-f', mpi_spec_json_path.strpath)
|
||||
assert(mpi_spec not in e.roots())
|
||||
|
||||
find_output = find('-l', output=str)
|
||||
@@ -1017,76 +1015,6 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
|
||||
assert 'using the `spack.yaml` in this directory' in output
|
||||
|
||||
|
||||
default_full_hash = spack.spec.Spec.full_hash
|
||||
|
||||
|
||||
def fake_full_hash(spec):
|
||||
# Generate an arbitrary hash that is intended to be different than
|
||||
# whatever a Spec reported before (to test actions that trigger when
|
||||
# the hash changes)
|
||||
if spec.name == 'libdwarf':
|
||||
return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
|
||||
return default_full_hash(spec)
|
||||
|
||||
|
||||
def test_cache_install_full_hash_match(
|
||||
install_mockery_mutable_config, mock_packages, mock_fetch,
|
||||
mock_archive, mutable_config, monkeypatch, tmpdir):
|
||||
"""Make sure installing from cache respects full hash argument"""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join('mirror_dir')
|
||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
||||
|
||||
s = Spec('libdwarf').concretized()
|
||||
package_id = spack.installer.package_id(s.package)
|
||||
|
||||
# Install a package
|
||||
install(s.name)
|
||||
|
||||
# Put installed package in the buildcache
|
||||
buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, s.name)
|
||||
|
||||
# Now uninstall the package
|
||||
uninstall('-y', s.name)
|
||||
|
||||
# Configure the mirror with the binary package in it
|
||||
mirror('add', 'test-mirror', mirror_url)
|
||||
|
||||
# Make sure we get the binary version by default
|
||||
install_output = install('--no-check-signature', s.name, output=str)
|
||||
expect_extract_msg = 'Extracting {0} from binary cache'.format(package_id)
|
||||
|
||||
assert expect_extract_msg in install_output
|
||||
|
||||
uninstall('-y', s.name)
|
||||
|
||||
# Now monkey patch Spec to change the full hash on the package
|
||||
monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
|
||||
|
||||
# Check that even if the full hash changes, we install from binary when
|
||||
# we don't explicitly require the full hash to match
|
||||
install_output = install('--no-check-signature', s.name, output=str)
|
||||
assert expect_extract_msg in install_output
|
||||
|
||||
uninstall('-y', s.name)
|
||||
|
||||
# Finally, make sure that if we insist on the full hash match, spack
|
||||
# installs from source.
|
||||
install_output = install('--require-full-hash-match', s.name, output=str)
|
||||
expect_msg = 'No binary for {0} found: installing from source'.format(
|
||||
package_id)
|
||||
|
||||
assert expect_msg in install_output
|
||||
|
||||
uninstall('-y', s.name)
|
||||
mirror('rm', 'test-mirror')
|
||||
|
||||
# Get rid of that libdwarf binary in the mirror so other tests don't try to
|
||||
# use it and fail because of NoVerifyException
|
||||
shutil.rmtree(mirror_dir.strpath)
|
||||
|
||||
|
||||
def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
|
||||
install_mockery, mutable_mock_env_path):
|
||||
env('create', 'test')
|
||||
@@ -1117,3 +1045,16 @@ def test_install_empty_env(tmpdir, mock_packages, mock_fetch,
|
||||
assert env_name in out
|
||||
assert 'environment' in out
|
||||
assert 'no specs to install' in out
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.parametrize('name,method', [
|
||||
('test-build-callbacks', 'undefined-build-test'),
|
||||
('test-install-callbacks', 'undefined-install-test')
|
||||
])
|
||||
def test_install_callbacks_fail(install_mockery, mock_fetch, name, method):
|
||||
output = install('--test=root', '--no-cache', name, fail_on_error=False)
|
||||
|
||||
assert output.count(method) == 2
|
||||
assert output.count('method not implemented') == 1
|
||||
assert output.count('TestFailure: 1 tests failed') == 1
|
||||
|
||||
@@ -54,7 +54,8 @@ def mock_pkg_git_repo(tmpdir_factory):
|
||||
git('init')
|
||||
|
||||
# initial commit with mock packages
|
||||
git('add', '.')
|
||||
# the -f is necessary in case people ignore build-* in their ignores
|
||||
git('add', '-f', '.')
|
||||
git('config', 'user.email', 'testing@spack.io')
|
||||
git('config', 'user.name', 'Spack Testing')
|
||||
git('-c', 'commit.gpgsign=false', 'commit',
|
||||
|
||||
@@ -79,6 +79,11 @@ def test_spec_json():
|
||||
assert 'mpich' in mpileaks
|
||||
|
||||
|
||||
def test_spec_format(database, config):
|
||||
output = spec('--format', '{name}-{^mpi.name}', 'mpileaks^mpich')
|
||||
assert output.rstrip('\n') == "mpileaks-mpich"
|
||||
|
||||
|
||||
def _parse_types(string):
|
||||
"""Parse deptypes for specs from `spack spec -t` output."""
|
||||
lines = string.strip().split('\n')
|
||||
|
||||
@@ -9,9 +9,12 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import copy_tree
|
||||
|
||||
import spack.cmd.install
|
||||
import spack.config
|
||||
import spack.package
|
||||
import spack.paths
|
||||
import spack.store
|
||||
from spack.main import SpackCommand
|
||||
|
||||
@@ -218,6 +221,8 @@ def test_test_list_all(mock_packages):
|
||||
"simple-standalone-test",
|
||||
"test-error",
|
||||
"test-fail",
|
||||
"test-build-callbacks",
|
||||
"test-install-callbacks"
|
||||
])
|
||||
|
||||
|
||||
@@ -240,30 +245,25 @@ def test_has_test_method_fails(capsys):
|
||||
assert 'is not a class' in captured
|
||||
|
||||
|
||||
def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
|
||||
install_mockery_mutable_config):
|
||||
"""Ensure output printed from pkgs is captured by output redirection."""
|
||||
install('printing-package')
|
||||
spack_test('run', '--alias', 'printpkg', 'printing-package')
|
||||
def test_read_old_results(mock_test_stage):
|
||||
"""Take test data generated before the switch to full hash everywhere
|
||||
and make sure we can still read it in"""
|
||||
# Test data was generated with:
|
||||
# spack install printing-package
|
||||
# spack test run --alias printpkg printing-package
|
||||
|
||||
stage_files = os.listdir(mock_test_stage)
|
||||
test_data_src = os.path.join(
|
||||
spack.paths.test_path, 'data', 'test', 'test_stage')
|
||||
|
||||
# Grab test stage directory contents
|
||||
testdir = os.path.join(mock_test_stage, stage_files[0])
|
||||
# Copy the old test data into the mock stage directory
|
||||
copy_tree(test_data_src, mock_test_stage)
|
||||
|
||||
outfile = os.path.join(testdir, 'test_suite.lock')
|
||||
with open(outfile, 'r') as f:
|
||||
output = f.read()
|
||||
changed_hash = output.replace(
|
||||
spack.store.db.query('printing-package')[0].full_hash(),
|
||||
'fakehash492ucwhwvzhxfbmcc45x49ha')
|
||||
with open(outfile, 'w') as f:
|
||||
f.write(changed_hash)
|
||||
|
||||
# The find command should show the contents
|
||||
# The find command should print info about the old test, under
|
||||
# the alias used at test generation time
|
||||
find_output = spack_test('find')
|
||||
assert 'printpkg' in find_output
|
||||
# The results should be obtainable
|
||||
|
||||
# The results command should still print the old test results
|
||||
results_output = spack_test('results')
|
||||
assert 'PASSED' in results_output
|
||||
|
||||
|
||||
@@ -501,7 +501,7 @@ def test_gcc_flags():
|
||||
unsupported_flag_test("cxx14_flag", "gcc@4.7")
|
||||
supported_flag_test("cxx14_flag", "-std=c++1y", "gcc@4.8")
|
||||
supported_flag_test("cxx14_flag", "-std=c++14", "gcc@4.9")
|
||||
supported_flag_test("cxx14_flag", "", "gcc@6.0")
|
||||
supported_flag_test("cxx14_flag", "-std=c++14", "gcc@6.0")
|
||||
unsupported_flag_test("cxx17_flag", "gcc@4.9")
|
||||
supported_flag_test("cxx17_flag", "-std=c++1z", "gcc@5.0")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "gcc@6.0")
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import jinja2
|
||||
@@ -15,6 +16,7 @@
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.variant as vt
|
||||
@@ -129,6 +131,8 @@ class Root(Package):
|
||||
|
||||
version(1.0, sha256='abcde')
|
||||
depends_on('changing')
|
||||
|
||||
conflicts('changing~foo')
|
||||
"""
|
||||
packages_dir.join('root', 'package.py').write(
|
||||
root_pkg_str, ensure=True
|
||||
@@ -139,36 +143,91 @@ class Changing(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/changing-1.0.tar.gz"
|
||||
|
||||
|
||||
{% if not delete_version %}
|
||||
version(1.0, sha256='abcde')
|
||||
{% endif %}
|
||||
version(0.9, sha256='abcde')
|
||||
|
||||
{% if not delete_variant %}
|
||||
variant('fee', default=True, description='nope')
|
||||
{% endif %}
|
||||
variant('foo', default=True, description='nope')
|
||||
{% if add_variant %}
|
||||
variant('fum', default=True, description='nope')
|
||||
variant('fum2', default=True, description='nope')
|
||||
{% endif %}
|
||||
"""
|
||||
repo = spack.repo.Repo(str(repo_dir))
|
||||
mutable_mock_repo.put_first(repo)
|
||||
|
||||
class _ChangingPackage(object):
|
||||
def change(self, context):
|
||||
# To ensure we get the changed package we need to
|
||||
# invalidate the cache
|
||||
repo._modules = {}
|
||||
default_context = [
|
||||
('delete_version', True),
|
||||
('delete_variant', False),
|
||||
('add_variant', False)
|
||||
]
|
||||
|
||||
def __init__(self, repo_directory):
|
||||
self.repo_dir = repo_directory
|
||||
self.repo = spack.repo.Repo(str(repo_directory))
|
||||
mutable_mock_repo.put_first(self.repo)
|
||||
|
||||
def change(self, changes=None):
|
||||
changes = changes or {}
|
||||
context = dict(self.default_context)
|
||||
context.update(changes)
|
||||
# Remove the repo object and delete Python modules
|
||||
mutable_mock_repo.remove(self.repo)
|
||||
# TODO: this mocks a change in the recipe that should happen in a
|
||||
# TODO: different process space. Leaving this comment as a hint
|
||||
# TODO: in case tests using this fixture start failing.
|
||||
if sys.modules.get('spack.pkg.changing.changing'):
|
||||
del sys.modules['spack.pkg.changing.changing']
|
||||
del sys.modules['spack.pkg.changing.root']
|
||||
del sys.modules['spack.pkg.changing']
|
||||
|
||||
# Change the recipe
|
||||
t = jinja2.Template(changing_template)
|
||||
changing_pkg_str = t.render(**context)
|
||||
packages_dir.join('changing', 'package.py').write(
|
||||
changing_pkg_str, ensure=True
|
||||
)
|
||||
|
||||
_changing_pkg = _ChangingPackage()
|
||||
_changing_pkg.change({'delete_variant': False, 'add_variant': False})
|
||||
# Re-add the repository
|
||||
self.repo = spack.repo.Repo(str(self.repo_dir))
|
||||
mutable_mock_repo.put_first(self.repo)
|
||||
|
||||
_changing_pkg = _ChangingPackage(repo_dir)
|
||||
_changing_pkg.change({
|
||||
'delete_version': False,
|
||||
'delete_variant': False,
|
||||
'add_variant': False
|
||||
})
|
||||
|
||||
return _changing_pkg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def additional_repo_with_c(tmpdir_factory, mutable_mock_repo):
|
||||
"""Add a repository with a simple package"""
|
||||
repo_dir = tmpdir_factory.mktemp('myrepo')
|
||||
repo_dir.join('repo.yaml').write("""
|
||||
repo:
|
||||
namespace: myrepo
|
||||
""", ensure=True)
|
||||
packages_dir = repo_dir.ensure('packages', dir=True)
|
||||
package_py = """
|
||||
class C(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/root-1.0.tar.gz"
|
||||
|
||||
version(1.0, sha256='abcde')
|
||||
"""
|
||||
packages_dir.join('c', 'package.py').write(package_py, ensure=True)
|
||||
repo = spack.repo.Repo(str(repo_dir))
|
||||
mutable_mock_repo.put_first(repo)
|
||||
return repo
|
||||
|
||||
|
||||
# This must use the mutable_config fixture because the test
|
||||
# adjusting_default_target_based_on_compiler uses the current_host fixture,
|
||||
# which changes the config.
|
||||
@@ -579,14 +638,11 @@ def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Testing debug statements specific to new concretizer')
|
||||
|
||||
monkeypatch.setattr(spack.solver.asp, 'full_cores', True)
|
||||
monkeypatch.setattr(spack.solver.asp, 'minimize_cores', False)
|
||||
|
||||
s = Spec(conflict_spec)
|
||||
with pytest.raises(spack.error.SpackError) as e:
|
||||
s.concretize()
|
||||
|
||||
assert "conflict_trigger(" in e.value.message
|
||||
assert "conflict" in e.value.message
|
||||
|
||||
def test_conflict_in_all_directives_true(self):
|
||||
s = Spec('when-directives-true')
|
||||
@@ -1022,8 +1078,6 @@ def test_external_packages_have_consistent_hash(self):
|
||||
s._old_concretize(), t._new_concretize()
|
||||
|
||||
assert s.dag_hash() == t.dag_hash()
|
||||
assert s.build_hash() == t.build_hash()
|
||||
assert s.full_hash() == t.full_hash()
|
||||
|
||||
def test_external_that_would_require_a_virtual_dependency(self):
|
||||
s = Spec('requires-virtual').concretized()
|
||||
@@ -1210,10 +1264,12 @@ def mock_fn(*args, **kwargs):
|
||||
{'add_variant': False, 'delete_variant': True},
|
||||
{'add_variant': True, 'delete_variant': True}
|
||||
])
|
||||
@pytest.mark.xfail()
|
||||
def test_reuse_installed_packages_when_package_def_changes(
|
||||
self, context, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
# Install a spec
|
||||
root = Spec('root').concretized()
|
||||
dependency = root['changing'].copy()
|
||||
@@ -1223,11 +1279,21 @@ def test_reuse_installed_packages_when_package_def_changes(
|
||||
repo_with_changing_recipe.change(context)
|
||||
|
||||
# Try to concretize with the spec installed previously
|
||||
new_root = Spec('root ^/{0}'.format(
|
||||
new_root_with_reuse = Spec('root ^/{0}'.format(
|
||||
dependency.dag_hash())
|
||||
).concretized()
|
||||
|
||||
assert root.dag_hash() == new_root.dag_hash()
|
||||
new_root_without_reuse = Spec('root').concretized()
|
||||
|
||||
# validate that the graphs are the same with reuse, but not without
|
||||
assert ht.build_hash(root) == ht.build_hash(new_root_with_reuse)
|
||||
assert ht.build_hash(root) != ht.build_hash(new_root_without_reuse)
|
||||
|
||||
# DAG hash should be the same with reuse since only the dependency changed
|
||||
assert root.dag_hash() == new_root_with_reuse.dag_hash()
|
||||
|
||||
# Structure and package hash will be different without reuse
|
||||
assert root.dag_hash() != new_root_without_reuse.dag_hash()
|
||||
|
||||
@pytest.mark.regression('20784')
|
||||
def test_concretization_of_test_dependencies(self):
|
||||
@@ -1422,7 +1488,7 @@ def test_concrete_specs_are_not_modified_on_reuse(
|
||||
# the answer set produced by clingo.
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.spec.Spec(spec_str).concretized()
|
||||
assert s.package.installed is expect_installed
|
||||
assert s.installed is expect_installed
|
||||
assert s.satisfies(spec_str, strict=True)
|
||||
|
||||
@pytest.mark.regression('26721,19736')
|
||||
@@ -1485,3 +1551,120 @@ def test_conditional_values_in_conditional_variant(self):
|
||||
|
||||
s = Spec('conditional-values-in-variant@1.60.0').concretized()
|
||||
assert 'cxxstd' in s.variants
|
||||
|
||||
def test_target_granularity(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip(
|
||||
'Original concretizer cannot account for target granularity'
|
||||
)
|
||||
|
||||
# The test architecture uses core2 as the default target. Check that when
|
||||
# we configure Spack for "generic" granularity we concretize for x86_64
|
||||
s = Spec('python')
|
||||
assert s.concretized().satisfies('target=core2')
|
||||
with spack.config.override('concretizer:targets', {'granularity': 'generic'}):
|
||||
assert s.concretized().satisfies('target=x86_64')
|
||||
|
||||
def test_host_compatible_concretization(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip(
|
||||
'Original concretizer cannot account for host compatibility'
|
||||
)
|
||||
|
||||
# Check that after setting "host_compatible" to false we cannot concretize.
|
||||
# Here we use "k10" to set a target non-compatible with the current host
|
||||
# to avoid a lot of boilerplate when mocking the test platform. The issue
|
||||
# is that the defaults for the test platform are very old, so there's no
|
||||
# compiler supporting e.g. icelake etc.
|
||||
s = Spec('python target=k10')
|
||||
assert s.concretized()
|
||||
with spack.config.override('concretizer:targets', {'host_compatible': True}):
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
s.concretized()
|
||||
|
||||
def test_add_microarchitectures_on_explicit_request(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip(
|
||||
'Original concretizer cannot account for host compatibility'
|
||||
)
|
||||
|
||||
# Check that if we consider only "generic" targets, we can still solve for
|
||||
# specific microarchitectures on explicit requests
|
||||
with spack.config.override('concretizer:targets', {'granularity': 'generic'}):
|
||||
s = Spec('python target=k10').concretized()
|
||||
assert s.satisfies('target=k10')
|
||||
|
||||
@pytest.mark.regression('29201')
|
||||
def test_delete_version_and_reuse(
|
||||
self, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
"""Test that we can reuse installed specs with versions not
|
||||
declared in package.py
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
root = Spec('root').concretized()
|
||||
root.package.do_install(fake=True, explicit=True)
|
||||
repo_with_changing_recipe.change({'delete_version': True})
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
new_root = Spec('root').concretized()
|
||||
|
||||
assert root.dag_hash() == new_root.dag_hash()
|
||||
|
||||
@pytest.mark.regression('29201')
|
||||
def test_installed_version_is_selected_only_for_reuse(
|
||||
self, mutable_database, repo_with_changing_recipe
|
||||
):
|
||||
"""Test that a version coming from an installed spec is a possible
|
||||
version only for reuse
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
# Install a dependency that cannot be reused with "root"
|
||||
# because of a conflict a variant, then delete its version
|
||||
dependency = Spec('changing@1.0~foo').concretized()
|
||||
dependency.package.do_install(fake=True, explicit=True)
|
||||
repo_with_changing_recipe.change({'delete_version': True})
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
new_root = Spec('root').concretized()
|
||||
|
||||
assert not new_root['changing'].satisfies('@1.0')
|
||||
|
||||
@pytest.mark.regression('28259')
|
||||
def test_reuse_with_unknown_namespace_dont_raise(
|
||||
self, additional_repo_with_c, mutable_mock_repo
|
||||
):
|
||||
s = Spec('c').concretized()
|
||||
assert s.namespace == 'myrepo'
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# TODO: To mock repo removal we need to recreate the RepoPath
|
||||
mutable_mock_repo.remove(additional_repo_with_c)
|
||||
spack.repo.path = spack.repo.RepoPath(*spack.repo.path.repos)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec('c').concretized()
|
||||
assert s.namespace == 'builtin.mock'
|
||||
|
||||
@pytest.mark.regression('28259')
|
||||
def test_reuse_with_unknown_package_dont_raise(
|
||||
self, additional_repo_with_c, mutable_mock_repo, monkeypatch
|
||||
):
|
||||
s = Spec('c').concretized()
|
||||
assert s.namespace == 'myrepo'
|
||||
s.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# Here we delete the package.py instead of removing the repo and we
|
||||
# make it such that "c" doesn't exist in myrepo
|
||||
del sys.modules['spack.pkg.myrepo.c']
|
||||
c_dir = os.path.join(additional_repo_with_c.root, 'packages', 'c')
|
||||
shutil.rmtree(c_dir)
|
||||
monkeypatch.setattr(additional_repo_with_c, 'exists', lambda x: False)
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = Spec('c').concretized()
|
||||
assert s.namespace == 'builtin.mock'
|
||||
|
||||
@@ -43,6 +43,7 @@
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.test.cray_manifest
|
||||
import spack.util.executable
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -809,7 +810,16 @@ def database(mock_store, mock_packages, config):
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_database(database, _store_dir_and_cache):
|
||||
def database_mutable_config(mock_store, mock_packages, mutable_config,
|
||||
monkeypatch):
|
||||
"""This activates the mock store, packages, AND config."""
|
||||
with spack.store.use_store(str(mock_store)) as store:
|
||||
yield store.db
|
||||
store.db.last_seen_verifier = ''
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def mutable_database(database_mutable_config, _store_dir_and_cache):
|
||||
"""Writeable version of the fixture, restored to its initial state
|
||||
after each test.
|
||||
"""
|
||||
@@ -817,7 +827,7 @@ def mutable_database(database, _store_dir_and_cache):
|
||||
store_path, store_cache = _store_dir_and_cache
|
||||
store_path.join('.spack-db').chmod(mode=0o755, rec=1)
|
||||
|
||||
yield database
|
||||
yield database_mutable_config
|
||||
|
||||
# Restore the initial state by copying the content of the cache back into
|
||||
# the store and making the database read-only
|
||||
@@ -1247,11 +1257,15 @@ def mock_git_repository(tmpdir_factory):
|
||||
|______/_____________________/
|
||||
c0 (r0)
|
||||
|
||||
There are two branches aside from 'master': 'test-branch' and 'tag-branch';
|
||||
We used to test with 'master', but git has since developed the ability to
|
||||
have differently named default branches, so now we query the user's config to
|
||||
determine what the default branch should be.
|
||||
|
||||
There are two branches aside from 'default': 'test-branch' and 'tag-branch';
|
||||
each has one commit; the tag-branch has a tag referring to its commit
|
||||
(c2 in the diagram).
|
||||
|
||||
Two submodules are added as part of the very first commit on 'master'; each
|
||||
Two submodules are added as part of the very first commit on 'default'; each
|
||||
of these refers to a repository with a single commit.
|
||||
|
||||
c0, c1, and c2 include information to define explicit versions in the
|
||||
@@ -1325,7 +1339,16 @@ def mock_git_repository(tmpdir_factory):
|
||||
tag = 'test-tag'
|
||||
git('tag', tag)
|
||||
|
||||
git('checkout', 'master')
|
||||
try:
|
||||
default_branch = git(
|
||||
'config',
|
||||
'--get',
|
||||
'init.defaultBranch',
|
||||
output=str,
|
||||
).strip()
|
||||
except Exception:
|
||||
default_branch = 'master'
|
||||
git('checkout', default_branch)
|
||||
|
||||
r2_file = 'r2_file'
|
||||
repodir.ensure(r2_file)
|
||||
@@ -1333,7 +1356,7 @@ def mock_git_repository(tmpdir_factory):
|
||||
git('-c', 'commit.gpgsign=false', 'commit', '-m', 'mock-git-repo r2')
|
||||
|
||||
rev_hash = lambda x: git('rev-parse', x, output=str).strip()
|
||||
r2 = rev_hash('master')
|
||||
r2 = rev_hash(default_branch)
|
||||
|
||||
# Record the commit hash of the (only) commit from test-branch and
|
||||
# the file added by that commit
|
||||
@@ -1346,8 +1369,8 @@ def mock_git_repository(tmpdir_factory):
|
||||
# revision for the version; a file associated with (and particular to)
|
||||
# that revision/branch.
|
||||
checks = {
|
||||
'master': Bunch(
|
||||
revision='master', file=r0_file, args={'git': url}
|
||||
'default': Bunch(
|
||||
revision=default_branch, file=r0_file, args={'git': url}
|
||||
),
|
||||
'branch': Bunch(
|
||||
revision=branch, file=branch_file, args={
|
||||
@@ -1368,8 +1391,8 @@ def mock_git_repository(tmpdir_factory):
|
||||
# In this case, the version() args do not include a 'git' key:
|
||||
# this is the norm for packages, so this tests how the fetching logic
|
||||
# would most-commonly assemble a Git fetcher
|
||||
'master-no-per-version-git': Bunch(
|
||||
revision='master', file=r0_file, args={'branch': 'master'}
|
||||
'default-no-per-version-git': Bunch(
|
||||
revision=default_branch, file=r0_file, args={'branch': default_branch}
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1635,6 +1658,19 @@ def brand_new_binary_cache():
|
||||
spack.binary_distribution._binary_index)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def directory_with_manifest(tmpdir):
|
||||
"""Create a manifest file in a directory. Used by 'spack external'.
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = 'external-db.json'
|
||||
with open(test_db_fname, 'w') as db_file:
|
||||
json.dump(spack.test.cray_manifest.create_manifest_content(),
|
||||
db_file)
|
||||
|
||||
yield str(tmpdir)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def noncyclical_dir_structure(tmpdir):
|
||||
"""
|
||||
|
||||
338
lib/spack/spack/test/cray_manifest.py
Normal file
338
lib/spack/spack/test/cray_manifest.py
Normal file
@@ -0,0 +1,338 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
Note that where possible, this should produce specs using `entries_to_specs`
|
||||
rather than `spec_from_entry`, since the former does additional work to
|
||||
establish dependency relationships (and in general the manifest-parsing
|
||||
logic needs to consume all related specs in a single pass).
|
||||
"""
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
import spack
|
||||
import spack.cray_manifest as cray_manifest
|
||||
from spack.cray_manifest import compiler_from_entry, entries_to_specs
|
||||
|
||||
example_x_json_str = """\
|
||||
{
|
||||
"name": "packagex",
|
||||
"hash": "hash-of-x",
|
||||
"prefix": "/path/to/packagex-install/",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "linux",
|
||||
"platform_os": "centos8",
|
||||
"target": {
|
||||
"name": "haswell"
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "10.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"packagey": {
|
||||
"hash": "hash-of-y",
|
||||
"type": ["link"]
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"precision": ["double", "float"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
example_compiler_entry = """\
|
||||
{
|
||||
"name": "gcc",
|
||||
"prefix": "/path/to/compiler/",
|
||||
"version": "7.5.0",
|
||||
"arch": {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"executables": {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class JsonSpecEntry(object):
|
||||
def __init__(self, name, hash, prefix, version, arch, compiler,
|
||||
dependencies, parameters):
|
||||
self.name = name
|
||||
self.hash = hash
|
||||
self.prefix = prefix
|
||||
self.version = version
|
||||
self.arch = arch
|
||||
self.compiler = compiler
|
||||
self.dependencies = dependencies
|
||||
self.parameters = parameters
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'hash': self.hash,
|
||||
'prefix': self.prefix,
|
||||
'version': self.version,
|
||||
'arch': self.arch,
|
||||
'compiler': self.compiler,
|
||||
'dependencies': self.dependencies,
|
||||
'parameters': self.parameters
|
||||
}
|
||||
|
||||
def as_dependency(self, deptypes):
|
||||
return (self.name,
|
||||
{'hash': self.hash,
|
||||
'type': list(deptypes)})
|
||||
|
||||
|
||||
class JsonArchEntry(object):
|
||||
def __init__(self, platform, os, target):
|
||||
self.platform = platform
|
||||
self.os = os
|
||||
self.target = target
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'platform': self.platform,
|
||||
'platform_os': self.os,
|
||||
'target': {
|
||||
'name': self.target
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class JsonCompilerEntry(object):
|
||||
def __init__(self, name, version, arch=None, executables=None):
|
||||
self.name = name
|
||||
self.version = version
|
||||
if not arch:
|
||||
arch = {
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
}
|
||||
if not executables:
|
||||
executables = {
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
self.arch = arch
|
||||
self.executables = executables
|
||||
|
||||
def compiler_json(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'version': self.version,
|
||||
'arch': self.arch,
|
||||
'executables': self.executables,
|
||||
}
|
||||
|
||||
def spec_json(self):
|
||||
"""The compiler spec only lists the name/version, not
|
||||
arch/executables.
|
||||
"""
|
||||
return {
|
||||
'name': self.name,
|
||||
'version': self.version,
|
||||
}
|
||||
|
||||
|
||||
_common_arch = JsonArchEntry(
|
||||
platform='linux',
|
||||
os='centos8',
|
||||
target='haswell'
|
||||
).to_dict()
|
||||
|
||||
# Intended to match example_compiler_entry above
|
||||
_common_compiler = JsonCompilerEntry(
|
||||
name='gcc',
|
||||
version='10.2.0',
|
||||
arch={
|
||||
"os": "centos8",
|
||||
"target": "x86_64"
|
||||
},
|
||||
executables={
|
||||
"cc": "/path/to/compiler/cc",
|
||||
"cxx": "/path/to/compiler/cxx",
|
||||
"fc": "/path/to/compiler/fc"
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_compatibility():
|
||||
"""Make sure that JsonSpecEntry outputs the expected JSON structure
|
||||
by comparing it with JSON parsed from an example string. This
|
||||
ensures that the testing objects like JsonSpecEntry produce the
|
||||
same JSON structure as the expected file format.
|
||||
"""
|
||||
y = JsonSpecEntry(
|
||||
name='packagey',
|
||||
hash='hash-of-y',
|
||||
prefix='/path/to/packagey-install/',
|
||||
version='1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={}
|
||||
)
|
||||
|
||||
x = JsonSpecEntry(
|
||||
name='packagex',
|
||||
hash='hash-of-x',
|
||||
prefix='/path/to/packagex-install/',
|
||||
version='1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([y.as_dependency(deptypes=['link'])]),
|
||||
parameters={'precision': ['double', 'float']}
|
||||
)
|
||||
|
||||
x_from_entry = x.to_dict()
|
||||
x_from_str = json.loads(example_x_json_str)
|
||||
assert x_from_entry == x_from_str
|
||||
|
||||
|
||||
def test_compiler_from_entry():
|
||||
compiler_data = json.loads(example_compiler_entry)
|
||||
compiler_from_entry(compiler_data)
|
||||
|
||||
|
||||
def generate_openmpi_entries():
|
||||
"""Generate two example JSON entries that refer to an OpenMPI
|
||||
installation and a hwloc dependency.
|
||||
"""
|
||||
# The hashes need to be padded with 'a' at the end to align with 8-byte
|
||||
# boundaries (for base-32 decoding)
|
||||
hwloc = JsonSpecEntry(
|
||||
name='hwloc',
|
||||
hash='hwlocfakehashaaa',
|
||||
prefix='/path/to/hwloc-install/',
|
||||
version='2.0.3',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={}
|
||||
)
|
||||
|
||||
# This includes a variant which is guaranteed not to appear in the
|
||||
# OpenMPI package: we need to make sure we can use such package
|
||||
# descriptions.
|
||||
openmpi = JsonSpecEntry(
|
||||
name='openmpi',
|
||||
hash='openmpifakehasha',
|
||||
prefix='/path/to/openmpi-install/',
|
||||
version='4.1.0',
|
||||
arch=_common_arch,
|
||||
compiler=_common_compiler.spec_json(),
|
||||
dependencies=dict([hwloc.as_dependency(deptypes=['link'])]),
|
||||
parameters={
|
||||
'internal-hwloc': False,
|
||||
'fabrics': ['psm'],
|
||||
'missing_variant': True
|
||||
}
|
||||
)
|
||||
|
||||
return [openmpi, hwloc]
|
||||
|
||||
|
||||
def test_generate_specs_from_manifest():
|
||||
"""Given JSON entries, check that we can form a set of Specs
|
||||
including dependency references.
|
||||
"""
|
||||
entries = list(x.to_dict() for x in generate_openmpi_entries())
|
||||
specs = entries_to_specs(entries)
|
||||
openmpi_spec, = list(x for x in specs.values() if x.name == 'openmpi')
|
||||
assert openmpi_spec['hwloc']
|
||||
|
||||
|
||||
def test_translate_compiler_name():
|
||||
nvidia_compiler = JsonCompilerEntry(
|
||||
name='nvidia',
|
||||
version='19.1',
|
||||
executables={
|
||||
"cc": "/path/to/compiler/nvc",
|
||||
"cxx": "/path/to/compiler/nvc++",
|
||||
}
|
||||
)
|
||||
|
||||
compiler = compiler_from_entry(nvidia_compiler.compiler_json())
|
||||
assert compiler.name == 'nvhpc'
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
name='hwloc',
|
||||
hash='hwlocfakehashaaa',
|
||||
prefix='/path/to/hwloc-install/',
|
||||
version='2.0.3',
|
||||
arch=_common_arch,
|
||||
compiler=nvidia_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={}
|
||||
).to_dict()
|
||||
|
||||
spec, = entries_to_specs([spec_json]).values()
|
||||
assert spec.compiler.name == 'nvhpc'
|
||||
|
||||
|
||||
def test_failed_translate_compiler_name():
|
||||
unknown_compiler = JsonCompilerEntry(
|
||||
name='unknown',
|
||||
version='1.0'
|
||||
)
|
||||
|
||||
with pytest.raises(spack.compilers.UnknownCompilerError):
|
||||
compiler_from_entry(unknown_compiler.compiler_json())
|
||||
|
||||
spec_json = JsonSpecEntry(
|
||||
name='packagey',
|
||||
hash='hash-of-y',
|
||||
prefix='/path/to/packagey-install/',
|
||||
version='1.0',
|
||||
arch=_common_arch,
|
||||
compiler=unknown_compiler.spec_json(),
|
||||
dependencies={},
|
||||
parameters={}
|
||||
).to_dict()
|
||||
|
||||
with pytest.raises(spack.compilers.UnknownCompilerError):
|
||||
entries_to_specs([spec_json])
|
||||
|
||||
|
||||
def create_manifest_content():
|
||||
return {
|
||||
'specs': list(x.to_dict() for x in generate_openmpi_entries()),
|
||||
'compilers': []
|
||||
}
|
||||
|
||||
|
||||
def test_read_cray_manifest(
|
||||
tmpdir, mutable_config, mock_packages, mutable_database):
|
||||
"""Check that (a) we can read the cray manifest and add it to the Spack
|
||||
Database and (b) we can concretize specs based on that.
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
pytest.skip("The ASP-based concretizer is currently picky about "
|
||||
" OS matching and will fail.")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
test_db_fname = 'external-db.json'
|
||||
with open(test_db_fname, 'w') as db_file:
|
||||
json.dump(create_manifest_content(), db_file)
|
||||
cray_manifest.read(test_db_fname, True)
|
||||
query_specs = spack.store.db.query('openmpi')
|
||||
assert any(x.dag_hash() == 'openmpifakehasha' for x in query_specs)
|
||||
|
||||
concretized_specs = spack.cmd.parse_specs(
|
||||
'depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64'
|
||||
' ^/openmpifakehasha'.split(),
|
||||
concretize=True)
|
||||
assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
|
||||
5
lib/spack/spack/test/data/config/concretizer.yaml
Normal file
5
lib/spack/spack/test/data/config/concretizer.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
concretizer:
|
||||
# reuse is missing on purpose, see "test_concretizer_arguments"
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
host_compatible: false
|
||||
@@ -7,9 +7,8 @@ config:
|
||||
- $spack/lib/spack/spack/test/data/templates_again
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
source_cache: $spack/var/spack/cache
|
||||
misc_cache: ~/.spack/cache
|
||||
source_cache: $user_cache_path/source
|
||||
misc_cache: $user_cache_path/cache
|
||||
verify_ssl: true
|
||||
checksum: true
|
||||
dirty: false
|
||||
|
||||
@@ -17,6 +17,9 @@ modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
roots:
|
||||
tcl: $user_cache_path/tcl
|
||||
lmod: $user_cache_path/lmod
|
||||
prefix_inspections:
|
||||
bin:
|
||||
- PATH
|
||||
|
||||
383
lib/spack/spack/test/data/legacy_env/v1.lock
Normal file
383
lib/spack/spack/test/data/legacy_env/v1.lock
Normal file
@@ -0,0 +1,383 @@
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 1
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "rtc6yg6kszj6shpc4wfnnattyxcqdwbk",
|
||||
"spec": "dttop ^dtbuild1@0.5"
|
||||
},
|
||||
{
|
||||
"hash": "rtc6yg6kszj6shpc4wfnnattyxcqdwbk",
|
||||
"spec": "dttop ^dtbuild1@1.0"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"rtc6yg6kszj6shpc4wfnnattyxcqdwbk": {
|
||||
"dttop": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild1": {
|
||||
"hash": "dbfu5piza5tmsxd47ccdenbpnrt4cwxp",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink1": {
|
||||
"hash": "zlgayt66nyopbltittef4ve7w75teyyw",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun1": {
|
||||
"hash": "r23pvzyjxs2citoucn2atswjdlagqs3b",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"dbfu5piza5tmsxd47ccdenbpnrt4cwxp": {
|
||||
"dtbuild1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild2": {
|
||||
"hash": "rzegrliakybnu33aafpezeohibhjfbey",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink2": {
|
||||
"hash": "z4z2hz675fiulykkquhfsidnctvna2cv",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun2": {
|
||||
"hash": "powghwfbyefncy7n3rohsaa7eurksm6m",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"rzegrliakybnu33aafpezeohibhjfbey": {
|
||||
"dtbuild2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"z4z2hz675fiulykkquhfsidnctvna2cv": {
|
||||
"dtlink2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"powghwfbyefncy7n3rohsaa7eurksm6m": {
|
||||
"dtrun2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"zlgayt66nyopbltittef4ve7w75teyyw": {
|
||||
"dtlink1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtlink3": {
|
||||
"hash": "bjgkddw2inn42okhdzhdy4xcuqmdjlzv",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"bjgkddw2inn42okhdzhdy4xcuqmdjlzv": {
|
||||
"dtlink3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild2": {
|
||||
"hash": "rzegrliakybnu33aafpezeohibhjfbey",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink4": {
|
||||
"hash": "tvjtpacvyky4jlplt22krwshcwgyqcxd",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"tvjtpacvyky4jlplt22krwshcwgyqcxd": {
|
||||
"dtlink4": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"r23pvzyjxs2citoucn2atswjdlagqs3b": {
|
||||
"dtrun1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtlink5": {
|
||||
"hash": "e5gnrubxp2rl4l6gfit2qb65vrmudbny",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun3": {
|
||||
"hash": "4qhelrleofze5wzr7freg2zesngxxusd",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"e5gnrubxp2rl4l6gfit2qb65vrmudbny": {
|
||||
"dtlink5": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
},
|
||||
"4qhelrleofze5wzr7freg2zesngxxusd": {
|
||||
"dtrun3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild3": {
|
||||
"hash": "7qx3epm7xmhv7myz27pmik4je6berknr",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"7qx3epm7xmhv7myz27pmik4je6berknr": {
|
||||
"dtbuild3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": "x86_64"
|
||||
},
|
||||
"compiler": {
|
||||
"name": "clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
919
lib/spack/spack/test/data/legacy_env/v2.lock
Normal file
919
lib/spack/spack/test/data/legacy_env/v2.lock
Normal file
@@ -0,0 +1,919 @@
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "fvsxmvcsdvwx6uz44bytxu3jit4wxbxx",
|
||||
"spec": "dttop ^dtbuild1@0.5"
|
||||
},
|
||||
{
|
||||
"hash": "eymyqmyzk62nkbztpg7dmjnntznuaoy3",
|
||||
"spec": "dttop ^dtbuild1@1.0"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"fvsxmvcsdvwx6uz44bytxu3jit4wxbxx": {
|
||||
"dttop": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild1": {
|
||||
"hash": "zw4o7pprzvvlmsofxj544jt7bdop7vyl",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink1": {
|
||||
"hash": "fmsqm7vbwvxcibfzlcgo5ac5zly3bgox",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun1": {
|
||||
"hash": "skcjwhzqovl2vumdbyjijmsn3dyhqbbb",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "bvzqjnqrl7abwal5azcbut2adbxjltxw"
|
||||
}
|
||||
},
|
||||
"zw4o7pprzvvlmsofxj544jt7bdop7vyl": {
|
||||
"dtbuild1": {
|
||||
"version": "0.5",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild2": {
|
||||
"hash": "ojbqgzn53vd7do3ehbrwueyz4nk6xqin",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink2": {
|
||||
"hash": "kpckvggymdnvjndepbqjz236upav5ymm",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun2": {
|
||||
"hash": "4hj7adm23qwhtx5lxhy543jtkztvbcfe",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "nz7d6bnl2anbiwckijzbgpdukow2mlb4"
|
||||
}
|
||||
},
|
||||
"ojbqgzn53vd7do3ehbrwueyz4nk6xqin": {
|
||||
"dtbuild2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "ojbqgzn53vd7do3ehbrwueyz4nk6xqin"
|
||||
}
|
||||
},
|
||||
"kpckvggymdnvjndepbqjz236upav5ymm": {
|
||||
"dtlink2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "kpckvggymdnvjndepbqjz236upav5ymm"
|
||||
}
|
||||
},
|
||||
"4hj7adm23qwhtx5lxhy543jtkztvbcfe": {
|
||||
"dtrun2": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "4hj7adm23qwhtx5lxhy543jtkztvbcfe"
|
||||
}
|
||||
},
|
||||
"fmsqm7vbwvxcibfzlcgo5ac5zly3bgox": {
|
||||
"dtlink1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtlink3": {
|
||||
"hash": "clkcipbhtglfceg6j5b7qklri33msmy3",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "bwi2mxyehnjoxp7kpkhoxbloh2tvmz2l"
|
||||
}
|
||||
},
|
||||
"clkcipbhtglfceg6j5b7qklri33msmy3": {
|
||||
"dtlink3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild2": {
|
||||
"hash": "ojbqgzn53vd7do3ehbrwueyz4nk6xqin",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink4": {
|
||||
"hash": "gznxlq6w5f274fngfrwsotcrg2gnkigp",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "3ty6ao54qcoejcnckim6aygo64bxajqm"
|
||||
}
|
||||
},
|
||||
"gznxlq6w5f274fngfrwsotcrg2gnkigp": {
|
||||
"dtlink4": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "gznxlq6w5f274fngfrwsotcrg2gnkigp"
|
||||
}
|
||||
},
|
||||
"skcjwhzqovl2vumdbyjijmsn3dyhqbbb": {
|
||||
"dtrun1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtlink5": {
|
||||
"hash": "y557umbvt4y57wxc4ktwkixzguai3dio",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun3": {
|
||||
"hash": "kmcnoznhtf4qgkxrfcsitzthiqswci3t",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "5eh47vivjja4zgrvkconqt47ptsnggpj"
|
||||
}
|
||||
},
|
||||
"y557umbvt4y57wxc4ktwkixzguai3dio": {
|
||||
"dtlink5": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "y557umbvt4y57wxc4ktwkixzguai3dio"
|
||||
}
|
||||
},
|
||||
"kmcnoznhtf4qgkxrfcsitzthiqswci3t": {
|
||||
"dtrun3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild3": {
|
||||
"hash": "ms6xv6czf2dw7gal3cx5gzmgp3zw25z7",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "at5j7haicypoprvn37rclbv7wtfotdm4"
|
||||
}
|
||||
},
|
||||
"ms6xv6czf2dw7gal3cx5gzmgp3zw25z7": {
|
||||
"dtbuild3": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "ms6xv6czf2dw7gal3cx5gzmgp3zw25z7"
|
||||
}
|
||||
},
|
||||
"eymyqmyzk62nkbztpg7dmjnntznuaoy3": {
|
||||
"dttop": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild1": {
|
||||
"hash": "jdcbg7gynk3xybuwurmqnpu7f3f4lyru",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink1": {
|
||||
"hash": "fmsqm7vbwvxcibfzlcgo5ac5zly3bgox",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun1": {
|
||||
"hash": "skcjwhzqovl2vumdbyjijmsn3dyhqbbb",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "bvzqjnqrl7abwal5azcbut2adbxjltxw"
|
||||
}
|
||||
},
|
||||
"jdcbg7gynk3xybuwurmqnpu7f3f4lyru": {
|
||||
"dtbuild1": {
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "macos",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": {
|
||||
"dtbuild2": {
|
||||
"hash": "ojbqgzn53vd7do3ehbrwueyz4nk6xqin",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
"dtlink2": {
|
||||
"hash": "kpckvggymdnvjndepbqjz236upav5ymm",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
"dtrun2": {
|
||||
"hash": "4hj7adm23qwhtx5lxhy543jtkztvbcfe",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
},
|
||||
"hash": "4a6yogpekskoz6kznhgwv6vdbyhoh4yy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
924
lib/spack/spack/test/data/legacy_env/v3.lock
Normal file
924
lib/spack/spack/test/data/legacy_env/v3.lock
Normal file
@@ -0,0 +1,924 @@
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 3,
|
||||
"specfile-version": 2
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "bzapmjnw5faayjwdemyipyky4wdksh5t",
|
||||
"spec": "dttop ^dtbuild1@0.5"
|
||||
},
|
||||
{
|
||||
"hash": "2n4kyt2vzexalndoo46aa63vclquruuk",
|
||||
"spec": "dttop ^dtbuild1@1.0"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"bzapmjnw5faayjwdemyipyky4wdksh5t": {
|
||||
"name": "dttop",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild1",
|
||||
"build_hash": "hu3l6iyiyhfhgpljz6mm2arkzatex5kl",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtlink1",
|
||||
"build_hash": "seallhwetilvtxoxaun3zvauga6erllj",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtrun1",
|
||||
"build_hash": "s7wjy5kbni43lswlhsdywh7fsz3kruax",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "m5m2tbnyh2xnjd4nzwiuobvpk2wn4wix"
|
||||
},
|
||||
"hu3l6iyiyhfhgpljz6mm2arkzatex5kl": {
|
||||
"name": "dtbuild1",
|
||||
"version": "0.5",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild2",
|
||||
"build_hash": "pk3aetkbsbsktaslbuthagh746f2nj72",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtlink2",
|
||||
"build_hash": "rmgjkspuni3egfwusig4xlotnd74hgik",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtrun2",
|
||||
"build_hash": "ehpgq3io4lcrlfucjwxt2t64q7ibxqy4",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "pyxv3jfcry5my4ebahaktbk6vral4gcg"
|
||||
},
|
||||
"pk3aetkbsbsktaslbuthagh746f2nj72": {
|
||||
"name": "dtbuild2",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "pk3aetkbsbsktaslbuthagh746f2nj72"
|
||||
},
|
||||
"rmgjkspuni3egfwusig4xlotnd74hgik": {
|
||||
"name": "dtlink2",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "rmgjkspuni3egfwusig4xlotnd74hgik"
|
||||
},
|
||||
"ehpgq3io4lcrlfucjwxt2t64q7ibxqy4": {
|
||||
"name": "dtrun2",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "ehpgq3io4lcrlfucjwxt2t64q7ibxqy4"
|
||||
},
|
||||
"seallhwetilvtxoxaun3zvauga6erllj": {
|
||||
"name": "dtlink1",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtlink3",
|
||||
"build_hash": "xe6nm3jplyqo6pi3yag6h23oekcy45ie",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "gm4epmarycggqb3bsergwfiusz3hcqdn"
|
||||
},
|
||||
"xe6nm3jplyqo6pi3yag6h23oekcy45ie": {
|
||||
"name": "dtlink3",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild2",
|
||||
"build_hash": "pk3aetkbsbsktaslbuthagh746f2nj72",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtlink4",
|
||||
"build_hash": "6c5wagoxq3jeo6uuvyql56yqna4rm2cx",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "iuj6oe423ocaabhlrhhyf45o54r3u4ny"
|
||||
},
|
||||
"6c5wagoxq3jeo6uuvyql56yqna4rm2cx": {
|
||||
"name": "dtlink4",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "6c5wagoxq3jeo6uuvyql56yqna4rm2cx"
|
||||
},
|
||||
"s7wjy5kbni43lswlhsdywh7fsz3kruax": {
|
||||
"name": "dtrun1",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtlink5",
|
||||
"build_hash": "qphelcbihtf4umgz6h66ppikwn3ernif",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtrun3",
|
||||
"build_hash": "kierl2q24kzqcl35r5o6rj3ecg2463yn",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "axoyavsuklr3jdzx3lljffqfmiqmq5xj"
|
||||
},
|
||||
"qphelcbihtf4umgz6h66ppikwn3ernif": {
|
||||
"name": "dtlink5",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "qphelcbihtf4umgz6h66ppikwn3ernif"
|
||||
},
|
||||
"kierl2q24kzqcl35r5o6rj3ecg2463yn": {
|
||||
"name": "dtrun3",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild3",
|
||||
"build_hash": "e4ue4tdy3yscgshzqpzsh7yjf7bear4k",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "4bzkvvvuex7vybtto7rej6sg7uzqdqs2"
|
||||
},
|
||||
"e4ue4tdy3yscgshzqpzsh7yjf7bear4k": {
|
||||
"name": "dtbuild3",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "e4ue4tdy3yscgshzqpzsh7yjf7bear4k"
|
||||
},
|
||||
"2n4kyt2vzexalndoo46aa63vclquruuk": {
|
||||
"name": "dttop",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild1",
|
||||
"build_hash": "habtzkoojlaipvyw3an6tjncw6dhmevz",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtlink1",
|
||||
"build_hash": "seallhwetilvtxoxaun3zvauga6erllj",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtrun1",
|
||||
"build_hash": "s7wjy5kbni43lswlhsdywh7fsz3kruax",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "m5m2tbnyh2xnjd4nzwiuobvpk2wn4wix"
|
||||
},
|
||||
"habtzkoojlaipvyw3an6tjncw6dhmevz": {
|
||||
"name": "dtbuild1",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "darwin",
|
||||
"platform_os": "bigsur",
|
||||
"target": {
|
||||
"name": "skylake",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"adx",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"f16c",
|
||||
"fma",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdrand",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"ssse3",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"broadwell"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "apple-clang",
|
||||
"version": "13.0.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "dtbuild2",
|
||||
"build_hash": "pk3aetkbsbsktaslbuthagh746f2nj72",
|
||||
"type": [
|
||||
"build"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtlink2",
|
||||
"build_hash": "rmgjkspuni3egfwusig4xlotnd74hgik",
|
||||
"type": [
|
||||
"build",
|
||||
"link"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dtrun2",
|
||||
"build_hash": "ehpgq3io4lcrlfucjwxt2t64q7ibxqy4",
|
||||
"type": [
|
||||
"run"
|
||||
]
|
||||
}
|
||||
],
|
||||
"hash": "o4q27mbuz2zalxa36gdsykhystn6giin"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
==> Testing package printing-package-1.0-hzgcoow
|
||||
BEFORE TEST
|
||||
==> [2022-02-28-20:21:46.510616] test: true: expect command status in [0]
|
||||
==> [2022-02-28-20:21:46.510937] '/bin/true'
|
||||
PASSED
|
||||
AFTER TEST
|
||||
@@ -0,0 +1 @@
|
||||
printing-package-1.0-hzgcoow PASSED
|
||||
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"specs": [
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {
|
||||
"version": 2
|
||||
},
|
||||
"nodes": [
|
||||
{
|
||||
"name": "printing-package",
|
||||
"version": "1.0",
|
||||
"arch": {
|
||||
"platform": "test",
|
||||
"platform_os": "debian6",
|
||||
"target": {
|
||||
"name": "core2",
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3"
|
||||
],
|
||||
"generation": 0,
|
||||
"parents": [
|
||||
"nocona"
|
||||
]
|
||||
}
|
||||
},
|
||||
"compiler": {
|
||||
"name": "gcc",
|
||||
"version": "4.5.0"
|
||||
},
|
||||
"namespace": "builtin.mock",
|
||||
"parameters": {
|
||||
"cflags": [],
|
||||
"cppflags": [],
|
||||
"cxxflags": [],
|
||||
"fflags": [],
|
||||
"ldflags": [],
|
||||
"ldlibs": []
|
||||
},
|
||||
"hash": "hzgcoowzej2ftjj3v4nkdling63w2xcc",
|
||||
"full_hash": "fakehash492ucwhwvzhxfbmcc45x49ha"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"alias": "printpkg"
|
||||
}
|
||||
@@ -64,6 +64,36 @@ def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
|
||||
downstream_db, downstream_layout
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Upstreams currently unsupported on Windows")
|
||||
def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch):
|
||||
"""Test whether Spec.installed_upstream() works."""
|
||||
upstream_write_db, upstream_db, upstream_layout, \
|
||||
downstream_db, downstream_layout = upstream_and_downstream_db
|
||||
|
||||
# a known installed spec should say that it's installed
|
||||
mock_repo = MockPackageMultiRepo()
|
||||
mock_repo.add_package('x', [], [])
|
||||
|
||||
with spack.repo.use_repositories(mock_repo):
|
||||
spec = spack.spec.Spec("x").concretized()
|
||||
assert not spec.installed
|
||||
assert not spec.installed_upstream
|
||||
|
||||
upstream_write_db.add(spec, upstream_layout)
|
||||
upstream_db._read()
|
||||
|
||||
monkeypatch.setattr(spack.store, "db", downstream_db)
|
||||
assert spec.installed
|
||||
assert spec.installed_upstream
|
||||
assert spec.copy().installed
|
||||
|
||||
# an abstract spec should say it's not installed
|
||||
spec = spack.spec.Spec("not-a-real-package")
|
||||
assert not spec.installed
|
||||
assert not spec.installed_upstream
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="Upstreams currently unsupported on Windows")
|
||||
@pytest.mark.usefixtures('config')
|
||||
@@ -726,11 +756,11 @@ def test_regression_issue_8036(mutable_database, usr_folder_exists):
|
||||
# do_install.
|
||||
s = spack.spec.Spec('externaltool@0.9')
|
||||
s.concretize()
|
||||
assert not s.package.installed
|
||||
assert not s.installed
|
||||
|
||||
# Now install the external package and check again the `installed` property
|
||||
s.package.do_install(fake=True)
|
||||
assert s.package.installed
|
||||
assert s.installed
|
||||
|
||||
|
||||
@pytest.mark.regression('11118')
|
||||
@@ -761,7 +791,7 @@ def test_old_external_entries_prefix(mutable_database):
|
||||
def test_uninstall_by_spec(mutable_database):
|
||||
with mutable_database.write_transaction():
|
||||
for spec in mutable_database.query():
|
||||
if spec.package.installed:
|
||||
if spec.installed:
|
||||
spack.package.PackageBase.uninstall_by_spec(spec, force=True)
|
||||
else:
|
||||
mutable_database.remove(spec)
|
||||
@@ -1023,3 +1053,19 @@ def test_consistency_of_dependents_upon_remove(mutable_database):
|
||||
s = mutable_database.query_one('dyninst')
|
||||
parents = s.dependents(name='callpath')
|
||||
assert len(parents) == 2
|
||||
|
||||
|
||||
@pytest.mark.regression('30187')
|
||||
def test_query_installed_when_package_unknown(database):
|
||||
"""Test that we can query the installation status of a spec
|
||||
when we don't know its package.py
|
||||
"""
|
||||
with spack.repo.use_repositories(MockPackageMultiRepo()):
|
||||
specs = database.query('mpileaks')
|
||||
for s in specs:
|
||||
# Assert that we can query the installation methods even though we
|
||||
# don't have the package.py available
|
||||
assert s.installed
|
||||
assert not s.installed_upstream
|
||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||
s.package
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user