Compare commits
534 Commits
dag-ordere
...
bugfix/com
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
55ac6fb542 | ||
|
|
68874a72fb | ||
|
|
e560beed19 | ||
|
|
de586bb66c | ||
|
|
846cd05c7e | ||
|
|
1ef313b604 | ||
|
|
73026f4f4b | ||
|
|
08dd6d1a21 | ||
|
|
e9173a59fd | ||
|
|
7401c97037 | ||
|
|
15433cfaf1 | ||
|
|
99aa0ef0cd | ||
|
|
28934e5f77 | ||
|
|
f08598427d | ||
|
|
cc4f7c224a | ||
|
|
ee5b2936e4 | ||
|
|
f7a6446d3f | ||
|
|
624e28ee03 | ||
|
|
784e5f5789 | ||
|
|
180618b25a | ||
|
|
aefcce51fc | ||
|
|
884a356b1e | ||
|
|
ee69f2d516 | ||
|
|
bc5bb06f1f | ||
|
|
1b8561f752 | ||
|
|
7d54c24939 | ||
|
|
960923287d | ||
|
|
4a9ffdcfa2 | ||
|
|
22d4e79037 | ||
|
|
2777ca83eb | ||
|
|
a2423f5736 | ||
|
|
81765e0278 | ||
|
|
0d4f9b26b8 | ||
|
|
87c21a58d1 | ||
|
|
5900378cff | ||
|
|
d54611af2c | ||
|
|
39adb65dc7 | ||
|
|
db15e1895f | ||
|
|
7610926e5e | ||
|
|
703f687ca0 | ||
|
|
983a56e729 | ||
|
|
cbd0770497 | ||
|
|
b06648eb64 | ||
|
|
80d784c401 | ||
|
|
5b3ad0adaa | ||
|
|
3feadc0a36 | ||
|
|
8ec86e05c4 | ||
|
|
b34fd98915 | ||
|
|
a93d143f17 | ||
|
|
d0ced9da94 | ||
|
|
c37d6f97dc | ||
|
|
ec73157a34 | ||
|
|
e447c365ee | ||
|
|
c5c67145d3 | ||
|
|
a5bc83d635 | ||
|
|
1760553b70 | ||
|
|
62d9bf5fef | ||
|
|
cb49da1b6f | ||
|
|
c79d9ac5bd | ||
|
|
871ca3e805 | ||
|
|
89176bd3f6 | ||
|
|
b29a607ceb | ||
|
|
0c06ecc711 | ||
|
|
73d1e36da5 | ||
|
|
0d57c2ab24 | ||
|
|
272e69b2fd | ||
|
|
8efde89c0e | ||
|
|
c7ec47c658 | ||
|
|
013e82f74f | ||
|
|
fff7e6d626 | ||
|
|
ac1fe8765a | ||
|
|
acbf46d786 | ||
|
|
a753fa12fb | ||
|
|
27b2dc1608 | ||
|
|
d8f8b42bcb | ||
|
|
3995428ad2 | ||
|
|
76d41b7f9f | ||
|
|
a7501105b1 | ||
|
|
da33334488 | ||
|
|
68dfcd10e6 | ||
|
|
4e6a8a40b7 | ||
|
|
20f663d089 | ||
|
|
acf61daf99 | ||
|
|
a0233d2560 | ||
|
|
8c989e0aee | ||
|
|
2f5e7fb38c | ||
|
|
b9bc911921 | ||
|
|
462df718ff | ||
|
|
b20271a8e8 | ||
|
|
a62992b4b1 | ||
|
|
818459e6fc | ||
|
|
38bd499c09 | ||
|
|
f9de4c2da8 | ||
|
|
335ae31a59 | ||
|
|
c4e5ee8831 | ||
|
|
73c358819b | ||
|
|
6f396aff99 | ||
|
|
047b99fab9 | ||
|
|
a12a290ee1 | ||
|
|
bd6c9085f0 | ||
|
|
c0a0d60378 | ||
|
|
bc84ca126e | ||
|
|
3bb7570e02 | ||
|
|
a5b80662ae | ||
|
|
0c5360e3fd | ||
|
|
2ff337a2a5 | ||
|
|
f3841774f7 | ||
|
|
97c2dd3a5a | ||
|
|
3aae80ca07 | ||
|
|
973bc92813 | ||
|
|
42a02411b4 | ||
|
|
4561536403 | ||
|
|
6b694749d3 | ||
|
|
c0f48b30cf | ||
|
|
046416479a | ||
|
|
b17113b63d | ||
|
|
479f5a74a3 | ||
|
|
895886959f | ||
|
|
0bdb0b07fd | ||
|
|
817b59900a | ||
|
|
2ddd66ca48 | ||
|
|
7ddd796f89 | ||
|
|
1e2ef16b39 | ||
|
|
77355fd348 | ||
|
|
1facf99f51 | ||
|
|
07c8939679 | ||
|
|
28f4b5729a | ||
|
|
dd7af323ed | ||
|
|
2ba1f700e6 | ||
|
|
739ab89b65 | ||
|
|
7e5099627f | ||
|
|
eb29889f6e | ||
|
|
ae27df4113 | ||
|
|
ae2c8c02a2 | ||
|
|
caff9e4292 | ||
|
|
beb3414b4d | ||
|
|
ed07cee852 | ||
|
|
7c1a164219 | ||
|
|
18e0b893f2 | ||
|
|
df5b25764b | ||
|
|
44705b0a6e | ||
|
|
3bb03ea7d1 | ||
|
|
3c411bf135 | ||
|
|
f2363c1cb5 | ||
|
|
7188eeb604 | ||
|
|
583d89e95a | ||
|
|
4a24401ed0 | ||
|
|
2796794b19 | ||
|
|
dd854e86d9 | ||
|
|
1500246ab7 | ||
|
|
206cd94c0b | ||
|
|
ff1a425c8d | ||
|
|
ab999d5af9 | ||
|
|
513ff34e66 | ||
|
|
b59c8e9a43 | ||
|
|
9483e34d15 | ||
|
|
7e02139cad | ||
|
|
a08d86c201 | ||
|
|
4e70532cd1 | ||
|
|
2d2a1c82d4 | ||
|
|
a47ebe5784 | ||
|
|
4f97bb118f | ||
|
|
d71eca74a3 | ||
|
|
f88dec43df | ||
|
|
2e8306d324 | ||
|
|
d64d77e99e | ||
|
|
d7b11af731 | ||
|
|
68372a4dfe | ||
|
|
54500a5fca | ||
|
|
146464e063 | ||
|
|
07e251c887 | ||
|
|
f9f51cb930 | ||
|
|
ad3c22fae9 | ||
|
|
bedcc5449a | ||
|
|
a1a54fa8b7 | ||
|
|
2a97bcbd5a | ||
|
|
99fb4c4a47 | ||
|
|
5b52685216 | ||
|
|
53a924c20f | ||
|
|
09ad541e98 | ||
|
|
b109e16fba | ||
|
|
043a80ff9e | ||
|
|
b7f7af1713 | ||
|
|
e5bd319c19 | ||
|
|
90ad65a6e7 | ||
|
|
779e80b7c1 | ||
|
|
502e216ee2 | ||
|
|
d6ff426d48 | ||
|
|
c4311a250a | ||
|
|
ceaa304f5f | ||
|
|
038efa4173 | ||
|
|
773fd5ad84 | ||
|
|
9b46e92e13 | ||
|
|
f004311611 | ||
|
|
a4b949492b | ||
|
|
6ab792fb03 | ||
|
|
313c7386c4 | ||
|
|
b0b4a05d44 | ||
|
|
4e13b5374f | ||
|
|
07897900eb | ||
|
|
d286146c64 | ||
|
|
6b27aebeb4 | ||
|
|
5c7cccd052 | ||
|
|
26c314f04f | ||
|
|
9982d76bb1 | ||
|
|
3d41b71664 | ||
|
|
e27d3c4f75 | ||
|
|
5545fd34c0 | ||
|
|
6ebf9f65c0 | ||
|
|
3be9af8c13 | ||
|
|
6e477d547d | ||
|
|
9d6630e245 | ||
|
|
b8d15e816b | ||
|
|
0a233ce83a | ||
|
|
27ee08f5bb | ||
|
|
cb4c60c709 | ||
|
|
8e84dcd7ef | ||
|
|
447ad2a3e3 | ||
|
|
6f65ff4952 | ||
|
|
16adda3db9 | ||
|
|
cddef35ef8 | ||
|
|
1636c89aba | ||
|
|
9110f5dfb6 | ||
|
|
bc24a8f290 | ||
|
|
33bf1fd033 | ||
|
|
5b8917188a | ||
|
|
fbd5c5c2cd | ||
|
|
55e3026152 | ||
|
|
874f76e45e | ||
|
|
ebc24b7063 | ||
|
|
ce9f8143cc | ||
|
|
c1ff7bbf04 | ||
|
|
aa708c8981 | ||
|
|
712d358f7e | ||
|
|
e8238fe330 | ||
|
|
e8a19aa089 | ||
|
|
4a844a971a | ||
|
|
5e337d907b | ||
|
|
208c9585b8 | ||
|
|
1ae4ca611e | ||
|
|
54eddb2301 | ||
|
|
01dafb8047 | ||
|
|
6ef2c33b0e | ||
|
|
beddf4dcc4 | ||
|
|
e143065448 | ||
|
|
9d9ea07424 | ||
|
|
c769582709 | ||
|
|
1958c2f986 | ||
|
|
3016da79fe | ||
|
|
0500a3cec0 | ||
|
|
313797e3d4 | ||
|
|
ae189cfab8 | ||
|
|
7c09ab174b | ||
|
|
c14b6b80cc | ||
|
|
899d2708a3 | ||
|
|
5ccc2e1a8e | ||
|
|
7c01d3ba35 | ||
|
|
81f11d5562 | ||
|
|
747f032ec2 | ||
|
|
97ea182ee0 | ||
|
|
011c28a538 | ||
|
|
86320eb569 | ||
|
|
c42a4ec1ec | ||
|
|
fb644de086 | ||
|
|
352d56d6b0 | ||
|
|
b711d5a6a4 | ||
|
|
983c68cc30 | ||
|
|
7d8d538a71 | ||
|
|
fa3ced5242 | ||
|
|
73fcda19d2 | ||
|
|
65ee062bf6 | ||
|
|
573f7bf4cd | ||
|
|
9f3f4b38e8 | ||
|
|
476a29e1b6 | ||
|
|
603569e321 | ||
|
|
b935809948 | ||
|
|
b278a02406 | ||
|
|
9ff64b1627 | ||
|
|
5fc5ef8c58 | ||
|
|
946816d787 | ||
|
|
03fb5c6aa9 | ||
|
|
68f82357c8 | ||
|
|
f073a9d589 | ||
|
|
d17cc42a48 | ||
|
|
0468205751 | ||
|
|
68b711c1ad | ||
|
|
69369429b6 | ||
|
|
f360ce7035 | ||
|
|
96ac4c7f98 | ||
|
|
df72ba0d46 | ||
|
|
b2e6da36ef | ||
|
|
832c435588 | ||
|
|
5079086019 | ||
|
|
bfe06f694c | ||
|
|
ae022e98d8 | ||
|
|
3e1ba67e00 | ||
|
|
6eea171077 | ||
|
|
1734127b7a | ||
|
|
44ed0de8c0 | ||
|
|
09eb86e077 | ||
|
|
50691ccdd9 | ||
|
|
ce693ff304 | ||
|
|
18027d07d4 | ||
|
|
f31e8adc6b | ||
|
|
a6731b732e | ||
|
|
a00b92a69f | ||
|
|
90261029d8 | ||
|
|
85966a96b1 | ||
|
|
417d8ef547 | ||
|
|
dd434ec413 | ||
|
|
1894b64851 | ||
|
|
9551312e9d | ||
|
|
b282b66579 | ||
|
|
226a9b0e7f | ||
|
|
7aeee3339c | ||
|
|
38512d18e9 | ||
|
|
e75a07d155 | ||
|
|
9b3c4e0696 | ||
|
|
54f783e656 | ||
|
|
34441c9eaa | ||
|
|
932a9dfc57 | ||
|
|
3430c55b0a | ||
|
|
1a69d436e4 | ||
|
|
c5d7ea04d0 | ||
|
|
1bc425ddad | ||
|
|
471684add4 | ||
|
|
973d33a8f1 | ||
|
|
d0387cbbaf | ||
|
|
97d59c2efb | ||
|
|
b69378c8cb | ||
|
|
f1b004a0d3 | ||
|
|
1f7c59eb06 | ||
|
|
e1341d70ce | ||
|
|
38c321abb3 | ||
|
|
82041ac5a3 | ||
|
|
ecf93c77ae | ||
|
|
34e42d5540 | ||
|
|
cdcbf7dc46 | ||
|
|
e25501f76c | ||
|
|
7fec7cd013 | ||
|
|
3d881dbad7 | ||
|
|
c818e36d79 | ||
|
|
4fbbb23933 | ||
|
|
b35af7d9e7 | ||
|
|
f7de22eb14 | ||
|
|
72f57ffede | ||
|
|
12f43380b9 | ||
|
|
ffdc85e8ce | ||
|
|
259a32e5e4 | ||
|
|
8c0b8c785f | ||
|
|
02d3bd782d | ||
|
|
b6906be846 | ||
|
|
a0ce6f7890 | ||
|
|
09cf265ff4 | ||
|
|
0d72b29193 | ||
|
|
7f1467e795 | ||
|
|
f89cd29054 | ||
|
|
89720583c3 | ||
|
|
7e78efcc44 | ||
|
|
126accfce1 | ||
|
|
27c2ff6c64 | ||
|
|
9bde77199c | ||
|
|
f5ed18f6a3 | ||
|
|
ccd11666c6 | ||
|
|
df80cffafa | ||
|
|
b52d4b8abf | ||
|
|
96624d1490 | ||
|
|
1648968514 | ||
|
|
8358f430a4 | ||
|
|
ec045f993b | ||
|
|
7fe2039b01 | ||
|
|
25cb55ccd9 | ||
|
|
d4e075f667 | ||
|
|
ae98d2ba2f | ||
|
|
8e49bf0c5b | ||
|
|
1bb119dbd7 | ||
|
|
a7f39da5db | ||
|
|
1d3a74d926 | ||
|
|
0448f18ab2 | ||
|
|
2516ed181a | ||
|
|
7740b37923 | ||
|
|
358cc5ed1a | ||
|
|
30b8cfad98 | ||
|
|
383a343412 | ||
|
|
3714d3443b | ||
|
|
42a452d54c | ||
|
|
a913ed229d | ||
|
|
7c122da48b | ||
|
|
fdaa54941d | ||
|
|
135832650f | ||
|
|
29d710fdec | ||
|
|
13c4f92907 | ||
|
|
7a2c9601e8 | ||
|
|
31959b72b0 | ||
|
|
8109877424 | ||
|
|
91243ecb5b | ||
|
|
1fc2bf846d | ||
|
|
848344d9a5 | ||
|
|
e08da4e2b6 | ||
|
|
75a72766ff | ||
|
|
797e230498 | ||
|
|
1c6993145e | ||
|
|
00573d6ea2 | ||
|
|
4c0116bd64 | ||
|
|
f0d8355248 | ||
|
|
01c21d0496 | ||
|
|
43057e2edd | ||
|
|
ba8d9f22ef | ||
|
|
07d7c32d79 | ||
|
|
017a15988c | ||
|
|
1ad290e5a2 | ||
|
|
f6fa64f979 | ||
|
|
1826a41cdd | ||
|
|
509a8ea5e2 | ||
|
|
ca202ba11e | ||
|
|
0b1d51e450 | ||
|
|
2936573fc6 | ||
|
|
5d4c250354 | ||
|
|
b0913b1bf8 | ||
|
|
c49e2e5620 | ||
|
|
60624265f8 | ||
|
|
79aa9e9c87 | ||
|
|
f2b0c1deab | ||
|
|
7d50680d9c | ||
|
|
defa4a2340 | ||
|
|
1e1d1ec43b | ||
|
|
0e41788812 | ||
|
|
ddecf07045 | ||
|
|
9865a42b20 | ||
|
|
2432be5911 | ||
|
|
002bd8d20b | ||
|
|
eac04af0e7 | ||
|
|
448bd31c87 | ||
|
|
46466302a9 | ||
|
|
5e39acea16 | ||
|
|
cfdf0b6987 | ||
|
|
6be6935671 | ||
|
|
68233db9f6 | ||
|
|
aea2c73b04 | ||
|
|
5dc5db6679 | ||
|
|
51702a725b | ||
|
|
931c0edaf4 | ||
|
|
689bdd6f36 | ||
|
|
a426db06e7 | ||
|
|
4b12d015e1 | ||
|
|
ecd4eac184 | ||
|
|
4d502c8ff7 | ||
|
|
227c6061e5 | ||
|
|
3453f59ba3 | ||
|
|
3201b4e2a1 | ||
|
|
80d26168b5 | ||
|
|
67040e79c5 | ||
|
|
a116775ff3 | ||
|
|
de2aaeb26f | ||
|
|
830e3211e5 | ||
|
|
6b3b7f8b7f | ||
|
|
99f3716346 | ||
|
|
4f6ef3b698 | ||
|
|
2dc020af31 | ||
|
|
17f2d66285 | ||
|
|
32f480936a | ||
|
|
481b598963 | ||
|
|
4b186df5b4 | ||
|
|
f4dac7cd4c | ||
|
|
48a63719b2 | ||
|
|
f576b4b6c5 | ||
|
|
9f5d9266e6 | ||
|
|
28c4809a8f | ||
|
|
57d6b70226 | ||
|
|
508fcd8240 | ||
|
|
bec79d9ee1 | ||
|
|
27775163ca | ||
|
|
b1cf512d78 | ||
|
|
0bfd06d0b6 | ||
|
|
367bd4d670 | ||
|
|
8f359df2d3 | ||
|
|
cc2ae9f270 | ||
|
|
75f1077b4b | ||
|
|
b1e6507060 | ||
|
|
2981b4e5ee | ||
|
|
6847d73504 | ||
|
|
eeba92e788 | ||
|
|
84917cfa79 | ||
|
|
14e327be23 | ||
|
|
c329f7de33 | ||
|
|
f686a90779 | ||
|
|
918bb63c3a | ||
|
|
8d0cbb9812 | ||
|
|
95a76de7d5 | ||
|
|
0b388ff930 | ||
|
|
d25ac66a5d | ||
|
|
682ffd30ac | ||
|
|
af3dba8db6 | ||
|
|
08a24b5e03 | ||
|
|
65fbc5a461 | ||
|
|
80f3888cc8 | ||
|
|
5845750a10 | ||
|
|
c529a0fddf | ||
|
|
d7265d7ddc | ||
|
|
df0be87ada | ||
|
|
af2aeb275f | ||
|
|
4fef0bac20 | ||
|
|
b32a07bbe1 | ||
|
|
8709dbc232 | ||
|
|
54b9e3ce07 | ||
|
|
d35c24a3ed | ||
|
|
d4d200952e | ||
|
|
902a40bf72 | ||
|
|
a2d51cd4db | ||
|
|
f8e433689a | ||
|
|
5f8c09fd33 | ||
|
|
8eb4807615 | ||
|
|
047a481e48 | ||
|
|
051abfb894 | ||
|
|
0447ba1213 | ||
|
|
ff3dbe9394 | ||
|
|
90d00f8a59 | ||
|
|
fdc6bd3f1c | ||
|
|
d17aaf8729 | ||
|
|
ef6699b874 | ||
|
|
f5418ac344 | ||
|
|
909a5b1d83 | ||
|
|
9a0884bfed | ||
|
|
a9d5db572c | ||
|
|
16dbbb9b26 | ||
|
|
3695200a3c | ||
|
|
a1b2ba412b | ||
|
|
c81f1235a9 | ||
|
|
eaa16338de | ||
|
|
115b6b2a51 | ||
|
|
cd2d6a6397 | ||
|
|
c5086a5d0e | ||
|
|
b5fc217dc2 | ||
|
|
a4b8753456 |
@@ -1,3 +1,5 @@
|
|||||||
# .git-blame-ignore-revs
|
# .git-blame-ignore-revs
|
||||||
# Formatted entire codebase with black
|
# Formatted entire codebase with black 23
|
||||||
|
603569e321013a1a63a637813c94c2834d0a0023
|
||||||
|
# Formatted entire codebase with black 22
|
||||||
f52f6e99dbf1131886a80112b8c79dfc414afb7c
|
f52f6e99dbf1131886a80112b8c79dfc414afb7c
|
||||||
|
|||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,3 +1,4 @@
|
|||||||
*.py diff=python
|
*.py diff=python
|
||||||
*.lp linguist-language=Prolog
|
*.lp linguist-language=Prolog
|
||||||
lib/spack/external/* linguist-vendored
|
lib/spack/external/* linguist-vendored
|
||||||
|
*.bat text eol=crlf
|
||||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -89,7 +89,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
|
uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||||
@@ -106,7 +106,7 @@ jobs:
|
|||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@37abcedcc1da61a57767b7588cb9d03eb57e28b3 # @v2
|
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
|
|||||||
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
|
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
|
|||||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,3 +1,28 @@
|
|||||||
|
# v0.19.1 (2023-02-07)
|
||||||
|
|
||||||
|
### Spack Bugfixes
|
||||||
|
|
||||||
|
* `buildcache create`: make "file exists" less verbose (#35019)
|
||||||
|
* `spack mirror create`: don't change paths to urls (#34992)
|
||||||
|
* Improve error message for requirements (#33988)
|
||||||
|
* uninstall: fix accidental cubic complexity (#34005)
|
||||||
|
* scons: fix signature for `install_args` (#34481)
|
||||||
|
* Fix `combine_phase_logs` text encoding issues (#34657)
|
||||||
|
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
|
||||||
|
* PackageBase should not define builder legacy attributes (#33942)
|
||||||
|
* Forward lookup of the "run_tests" attribute (#34531)
|
||||||
|
* Bugfix for timers (#33917, #33900)
|
||||||
|
* Fix path handling in prefix inspections (#35318)
|
||||||
|
* Fix libtool filter for Fujitsu compilers (#34916)
|
||||||
|
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
|
||||||
|
* FileCache: delete the new cache file on exception (#34623)
|
||||||
|
* Propagate exceptions from Spack python console (#34547)
|
||||||
|
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
|
||||||
|
* Various CI fixes (#33953, #34560, #34560, #34828)
|
||||||
|
* Docs: remove monitors and analyzers, typos (#34358, #33926)
|
||||||
|
* bump release version for tutorial command (#33859)
|
||||||
|
|
||||||
|
|
||||||
# v0.19.0 (2022-11-11)
|
# v0.19.0 (2022-11-11)
|
||||||
|
|
||||||
`v0.19.0` is a major feature release.
|
`v0.19.0` is a major feature release.
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ config:
|
|||||||
root: $TMP_DIR/install
|
root: $TMP_DIR/install
|
||||||
misc_cache: $$user_cache_path/cache
|
misc_cache: $$user_cache_path/cache
|
||||||
source_cache: $$user_cache_path/source
|
source_cache: $$user_cache_path/source
|
||||||
|
environments_root: $TMP_DIR/envs
|
||||||
EOF
|
EOF
|
||||||
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
|
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
|
||||||
bootstrap:
|
bootstrap:
|
||||||
|
|||||||
@@ -83,6 +83,16 @@ if defined _sp_flags (
|
|||||||
exit /B 0
|
exit /B 0
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
if not defined _sp_subcommand (
|
||||||
|
if not defined _sp_args (
|
||||||
|
if not defined _sp_flags (
|
||||||
|
python "%spack%" --help
|
||||||
|
exit /B 0
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
:: pass parsed variables outside of local scope. Need to do
|
:: pass parsed variables outside of local scope. Need to do
|
||||||
:: this because delayedexpansion can only be set by setlocal
|
:: this because delayedexpansion can only be set by setlocal
|
||||||
echo %_sp_flags%>flags
|
echo %_sp_flags%>flags
|
||||||
@@ -92,24 +102,24 @@ endlocal
|
|||||||
set /p _sp_subcommand=<subcmd
|
set /p _sp_subcommand=<subcmd
|
||||||
set /p _sp_flags=<flags
|
set /p _sp_flags=<flags
|
||||||
set /p _sp_args=<args
|
set /p _sp_args=<args
|
||||||
set str_subcommand=%_sp_subcommand:"='%
|
if "%_sp_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||||
set str_flags=%_sp_flags:"='%
|
if "%_sp_subcommand%"=="ECHO is on." (set "_sp_subcommand=")
|
||||||
set str_args=%_sp_args:"='%
|
if "%_sp_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||||
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
if "%_sp_flags%"=="ECHO is on." (set "_sp_flags=")
|
||||||
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
|
if "%_sp_args%"=="ECHO is off." (set "_sp_args=")
|
||||||
if "%str_args%"=="ECHO is off." (set "_sp_args=")
|
if "%_sp_args%"=="ECHO is on." (set "_sp_args=")
|
||||||
del subcmd
|
del subcmd
|
||||||
del flags
|
del flags
|
||||||
del args
|
del args
|
||||||
|
|
||||||
:: Filter out some commands. For any others, just run the command.
|
:: Filter out some commands. For any others, just run the command.
|
||||||
if "%_sp_subcommand%" == "cd" (
|
if %_sp_subcommand% == "cd" (
|
||||||
goto :case_cd
|
goto :case_cd
|
||||||
) else if "%_sp_subcommand%" == "env" (
|
) else if %_sp_subcommand% == "env" (
|
||||||
goto :case_env
|
goto :case_env
|
||||||
) else if "%_sp_subcommand%" == "load" (
|
) else if %_sp_subcommand% == "load" (
|
||||||
goto :case_load
|
goto :case_load
|
||||||
) else if "%_sp_subcommand%" == "unload" (
|
) else if %_sp_subcommand% == "unload" (
|
||||||
goto :case_load
|
goto :case_load
|
||||||
) else (
|
) else (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
@@ -143,19 +153,21 @@ goto :end_switch
|
|||||||
:: If no args or args contain --bat or -h/--help: just execute.
|
:: If no args or args contain --bat or -h/--help: just execute.
|
||||||
if NOT defined _sp_args (
|
if NOT defined _sp_args (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
)
|
||||||
|
set args_no_quote=%_sp_args:"=%
|
||||||
|
if NOT "%args_no_quote%"=="%args_no_quote:--help=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
) else if NOT "%args_no_quote%"=="%args_no_quote: -h=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
) else if NOT "%args_no_quote%"=="%args_no_quote:--bat=%" (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
) else if NOT "%args_no_quote%"=="%args_no_quote:deactivate=%" (
|
||||||
for /f "tokens=* USEBACKQ" %%I in (
|
for /f "tokens=* USEBACKQ" %%I in (
|
||||||
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
`call python %spack% %_sp_flags% env deactivate --bat %args_no_quote:deactivate=%`
|
||||||
) do %%I
|
) do %%I
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
) else if NOT "%args_no_quote%"=="%args_no_quote:activate=%" (
|
||||||
for /f "tokens=* USEBACKQ" %%I in (
|
for /f "tokens=* USEBACKQ" %%I in (
|
||||||
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
|
`python %spack% %_sp_flags% env activate --bat %args_no_quote:activate=%`
|
||||||
) do %%I
|
) do %%I
|
||||||
) else (
|
) else (
|
||||||
goto :default_case
|
goto :default_case
|
||||||
@@ -214,10 +226,10 @@ for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
|||||||
exit /b 0
|
exit /b 0
|
||||||
|
|
||||||
:: set module system roots
|
:: set module system roots
|
||||||
:_sp_multi_pathadd
|
:_sp_multi_pathadd
|
||||||
for %%I in (%~2) do (
|
for %%I in (%~2) do (
|
||||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||||
:pathadd "%~1" "%%I\%%Z"
|
:pathadd "%~1" "%%I\%%Z"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
exit /B %ERRORLEVEL%
|
exit /B %ERRORLEVEL%
|
||||||
|
|||||||
@@ -20,9 +20,10 @@ concretizer:
|
|||||||
# needed to reach a solution increases noticeably with the number of targets
|
# needed to reach a solution increases noticeably with the number of targets
|
||||||
# considered.
|
# considered.
|
||||||
targets:
|
targets:
|
||||||
# Determine whether we want to target specific or generic microarchitectures.
|
# Determine whether we want to target specific or generic
|
||||||
# An example of the first kind might be for instance "skylake" or "bulldozer",
|
# microarchitectures. Valid values are: "microarchitectures" or "generic".
|
||||||
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
|
# An example of "microarchitectures" would be "skylake" or "bulldozer",
|
||||||
|
# while an example of "generic" would be "aarch64" or "x86_64_v4".
|
||||||
granularity: microarchitectures
|
granularity: microarchitectures
|
||||||
# If "false" allow targets that are incompatible with the current host (for
|
# If "false" allow targets that are incompatible with the current host (for
|
||||||
# instance concretize with target "icelake" while running on "haswell").
|
# instance concretize with target "icelake" while running on "haswell").
|
||||||
@@ -33,4 +34,4 @@ concretizer:
|
|||||||
# environments can always be activated. When "false" perform concretization separately
|
# environments can always be activated. When "false" perform concretization separately
|
||||||
# on each root spec, allowing different versions and variants of the same package in
|
# on each root spec, allowing different versions and variants of the same package in
|
||||||
# an environment.
|
# an environment.
|
||||||
unify: true
|
unify: true
|
||||||
|
|||||||
@@ -81,6 +81,10 @@ config:
|
|||||||
source_cache: $spack/var/spack/cache
|
source_cache: $spack/var/spack/cache
|
||||||
|
|
||||||
|
|
||||||
|
## Directory where spack managed environments are created and stored
|
||||||
|
# environments_root: $spack/var/spack/environments
|
||||||
|
|
||||||
|
|
||||||
# Cache directory for miscellaneous files, like the package index.
|
# Cache directory for miscellaneous files, like the package index.
|
||||||
# This can be purged with `spack clean --misc-cache`
|
# This can be purged with `spack clean --misc-cache`
|
||||||
misc_cache: $user_cache_path/cache
|
misc_cache: $user_cache_path/cache
|
||||||
@@ -181,7 +185,7 @@ config:
|
|||||||
# when Spack needs to manage its own package metadata and all operations are
|
# when Spack needs to manage its own package metadata and all operations are
|
||||||
# expected to complete within the default time limit. The timeout should
|
# expected to complete within the default time limit. The timeout should
|
||||||
# therefore generally be left untouched.
|
# therefore generally be left untouched.
|
||||||
db_lock_timeout: 3
|
db_lock_timeout: 60
|
||||||
|
|
||||||
|
|
||||||
# How long to wait when attempting to modify a package (e.g. to install it).
|
# How long to wait when attempting to modify a package (e.g. to install it).
|
||||||
|
|||||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -5,3 +5,4 @@ llnl*.rst
|
|||||||
_build
|
_build
|
||||||
.spack-env
|
.spack-env
|
||||||
spack.lock
|
spack.lock
|
||||||
|
_spack_root
|
||||||
|
|||||||
@@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
|
|||||||
it uses the meson build system. Meson uses the default
|
it uses the meson build system. Meson uses the default
|
||||||
``pyproject.toml`` keys to list dependencies.
|
``pyproject.toml`` keys to list dependencies.
|
||||||
|
|
||||||
See https://meson-python.readthedocs.io/en/latest/usage/start.html
|
See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html
|
||||||
for more information.
|
for more information.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -58,9 +58,7 @@ Testing
|
|||||||
``WafPackage`` also provides ``test`` and ``installtest`` methods,
|
``WafPackage`` also provides ``test`` and ``installtest`` methods,
|
||||||
which are run after the ``build`` and ``install`` phases, respectively.
|
which are run after the ``build`` and ``install`` phases, respectively.
|
||||||
By default, these phases do nothing, but you can override them to
|
By default, these phases do nothing, but you can override them to
|
||||||
run package-specific unit tests. For example, the
|
run package-specific unit tests.
|
||||||
`py-py2cairo <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-py2cairo/package.py>`_
|
|
||||||
package uses:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
|||||||
@@ -89,6 +89,7 @@
|
|||||||
# Enable todo items
|
# Enable todo items
|
||||||
todo_include_todos = True
|
todo_include_todos = True
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Disable duplicate cross-reference warnings.
|
# Disable duplicate cross-reference warnings.
|
||||||
#
|
#
|
||||||
@@ -353,9 +354,7 @@ class SpackStyle(DefaultStyle):
|
|||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
|
||||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
|
||||||
]
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
@@ -402,7 +401,7 @@ class SpackStyle(DefaultStyle):
|
|||||||
"Spack",
|
"Spack",
|
||||||
"One line description of project.",
|
"One line description of project.",
|
||||||
"Miscellaneous",
|
"Miscellaneous",
|
||||||
),
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
@@ -418,6 +417,4 @@ class SpackStyle(DefaultStyle):
|
|||||||
# -- Extension configuration -------------------------------------------------
|
# -- Extension configuration -------------------------------------------------
|
||||||
|
|
||||||
# sphinx.ext.intersphinx
|
# sphinx.ext.intersphinx
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||||
"python": ("https://docs.python.org/3", None),
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man
|
|||||||
ccache`` to learn more about the default settings and how to change
|
ccache`` to learn more about the default settings and how to change
|
||||||
them). Please note that we currently disable ccache's ``hash_dir``
|
them). Please note that we currently disable ccache's ``hash_dir``
|
||||||
feature to avoid an issue with the stage directory (see
|
feature to avoid an issue with the stage directory (see
|
||||||
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
https://github.com/spack/spack/pull/3761#issuecomment-294352232).
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
``shared_linking:type``
|
``shared_linking:type``
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest
|
|||||||
<http://pytest.org/>`_ as our tests framework, and these types of
|
<http://pytest.org/>`_ as our tests framework, and these types of
|
||||||
arguments are just passed to the ``pytest`` command underneath. See `the
|
arguments are just passed to the ``pytest`` command underneath. See `the
|
||||||
pytest docs
|
pytest docs
|
||||||
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
|
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||||
for more details on test selection syntax.
|
for more details on test selection syntax.
|
||||||
|
|
||||||
``spack unit-test`` has a few special options that can help you
|
``spack unit-test`` has a few special options that can help you
|
||||||
@@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in
|
|||||||
|
|
||||||
You can also combine any of these options with a ``pytest`` keyword
|
You can also combine any of these options with a ``pytest`` keyword
|
||||||
search. See the `pytest usage docs
|
search. See the `pytest usage docs
|
||||||
<https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
|
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||||
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
|
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
|
||||||
or "concretize" somewhere in their names:
|
or "concretize" somewhere in their names:
|
||||||
|
|
||||||
|
|||||||
@@ -58,9 +58,9 @@ Using Environments
|
|||||||
Here we follow a typical use case of creating, concretizing,
|
Here we follow a typical use case of creating, concretizing,
|
||||||
installing and loading an environment.
|
installing and loading an environment.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Creating a named Environment
|
Creating a managed Environment
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
An environment is created by:
|
An environment is created by:
|
||||||
|
|
||||||
@@ -72,7 +72,8 @@ Spack then creates the directory ``var/spack/environments/myenv``.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
All named environments are stored in the ``var/spack/environments`` folder.
|
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||||
|
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||||
|
|
||||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ creates a simple python file:
|
|||||||
|
|
||||||
# FIXME: Add a list of GitHub accounts to
|
# FIXME: Add a list of GitHub accounts to
|
||||||
# notify when the package is updated.
|
# notify when the package is updated.
|
||||||
# maintainers = ["github_user1", "github_user2"]
|
# maintainers("github_user1", "github_user2")
|
||||||
|
|
||||||
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ be present on the machine where Spack is run:
|
|||||||
:header-rows: 1
|
:header-rows: 1
|
||||||
|
|
||||||
These requirements can be easily installed on most modern Linux systems;
|
These requirements can be easily installed on most modern Linux systems;
|
||||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||||
to run on HPC platforms like Cray. Not all packages should be expected
|
to run on HPC platforms like Cray. Not all packages should be expected
|
||||||
to work on all platforms.
|
to work on all platforms.
|
||||||
@@ -1506,7 +1506,7 @@ Spack On Windows
|
|||||||
|
|
||||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Step 1: Install prerequisites
|
Step 1: Install prerequisites
|
||||||
@@ -1516,7 +1516,7 @@ To use Spack on Windows, you will need the following packages:
|
|||||||
|
|
||||||
Required:
|
Required:
|
||||||
* Microsoft Visual Studio
|
* Microsoft Visual Studio
|
||||||
* Python
|
* Python
|
||||||
* Git
|
* Git
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
@@ -1547,8 +1547,8 @@ Intel Fortran
|
|||||||
"""""""""""""
|
"""""""""""""
|
||||||
|
|
||||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||||
The suite is free to download from Intel's website, located at
|
The suite is free to download from Intel's website, located at
|
||||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html.
|
||||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
93
lib/spack/env/cc
vendored
93
lib/spack/env/cc
vendored
@@ -427,6 +427,48 @@ isystem_include_dirs_list=""
|
|||||||
libs_list=""
|
libs_list=""
|
||||||
other_args_list=""
|
other_args_list=""
|
||||||
|
|
||||||
|
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||||
|
wl_expect_rpath=no
|
||||||
|
|
||||||
|
parse_Wl() {
|
||||||
|
# drop -Wl
|
||||||
|
shift
|
||||||
|
while [ $# -ne 0 ]; do
|
||||||
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
|
rp="$1"
|
||||||
|
wl_expect_rpath=no
|
||||||
|
else
|
||||||
|
rp=""
|
||||||
|
case "$1" in
|
||||||
|
-rpath=*)
|
||||||
|
rp="${1#-rpath=}"
|
||||||
|
;;
|
||||||
|
--rpath=*)
|
||||||
|
rp="${1#--rpath=}"
|
||||||
|
;;
|
||||||
|
-rpath|--rpath)
|
||||||
|
wl_expect_rpath=yes
|
||||||
|
;;
|
||||||
|
"$dtags_to_strip")
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
append other_args_list "-Wl,$1"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
if [ -n "$rp" ]; then
|
||||||
|
if system_dir "$rp"; then
|
||||||
|
append system_rpath_dirs_list "$rp"
|
||||||
|
else
|
||||||
|
append rpath_dirs_list "$rp"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
# By lack of local variables, always set this to empty string.
|
||||||
|
rp=""
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
|
|
||||||
@@ -526,54 +568,9 @@ while [ $# -ne 0 ]; do
|
|||||||
append other_args_list "-l$arg"
|
append other_args_list "-l$arg"
|
||||||
;;
|
;;
|
||||||
-Wl,*)
|
-Wl,*)
|
||||||
arg="${1#-Wl,}"
|
IFS=,
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
parse_Wl $1
|
||||||
case "$arg" in
|
unset IFS
|
||||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
|
||||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
|
||||||
-rpath,*) rp="${arg#-rpath,}" ;;
|
|
||||||
--rpath,*) rp="${arg#--rpath,}" ;;
|
|
||||||
-rpath|--rpath)
|
|
||||||
shift; arg="$1"
|
|
||||||
case "$arg" in
|
|
||||||
-Wl,*)
|
|
||||||
rp="${arg#-Wl,}"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
die "-Wl,-rpath was not followed by -Wl,*"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
|
||||||
"$dtags_to_strip")
|
|
||||||
: # We want to remove explicitly this flag
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append other_args_list "-Wl,$arg"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
|
||||||
-Xlinker,*)
|
|
||||||
arg="${1#-Xlinker,}"
|
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
|
||||||
|
|
||||||
case "$arg" in
|
|
||||||
-rpath=*) rp="${arg#-rpath=}" ;;
|
|
||||||
--rpath=*) rp="${arg#--rpath=}" ;;
|
|
||||||
-rpath|--rpath)
|
|
||||||
shift; arg="$1"
|
|
||||||
case "$arg" in
|
|
||||||
-Xlinker,*)
|
|
||||||
rp="${arg#-Xlinker,}"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append other_args_list "-Xlinker,$arg"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
;;
|
||||||
-Xlinker)
|
-Xlinker)
|
||||||
if [ "$2" = "-rpath" ]; then
|
if [ "$2" = "-rpath" ]; then
|
||||||
|
|||||||
@@ -16,19 +16,16 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from sys import platform as _platform
|
|
||||||
from typing import Callable, List, Match, Optional, Tuple, Union
|
from typing import Callable, List, Match, Optional, Tuple, Union
|
||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, symlink
|
from llnl.util.symlink import islink, symlink
|
||||||
|
|
||||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
from spack.util.executable import Executable, which
|
||||||
from spack.util.path import path_to_os_path, system_path_filter
|
from spack.util.path import path_to_os_path, system_path_filter
|
||||||
|
|
||||||
is_windows = _platform == "win32"
|
if sys.platform != "win32":
|
||||||
|
|
||||||
if not is_windows:
|
|
||||||
import grp
|
import grp
|
||||||
import pwd
|
import pwd
|
||||||
else:
|
else:
|
||||||
@@ -84,9 +81,77 @@
|
|||||||
"visit_directory_tree",
|
"visit_directory_tree",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if sys.version_info < (3, 7, 4):
|
||||||
|
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||||
|
# files on Lustre when using Python < 3.7.4
|
||||||
|
|
||||||
|
def copystat(src, dst, follow_symlinks=True):
|
||||||
|
"""Copy file metadata
|
||||||
|
Copy the permission bits, last access time, last modification time, and
|
||||||
|
flags from `src` to `dst`. On Linux, copystat() also copies the "extended
|
||||||
|
attributes" where possible. The file contents, owner, and group are
|
||||||
|
unaffected. `src` and `dst` are path names given as strings.
|
||||||
|
If the optional flag `follow_symlinks` is not set, symlinks aren't
|
||||||
|
followed if and only if both `src` and `dst` are symlinks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _nop(args, ns=None, follow_symlinks=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# follow symlinks (aka don't not follow symlinks)
|
||||||
|
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||||
|
if follow:
|
||||||
|
# use the real function if it exists
|
||||||
|
def lookup(name):
|
||||||
|
return getattr(os, name, _nop)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# use the real function only if it exists
|
||||||
|
# *and* it supports follow_symlinks
|
||||||
|
def lookup(name):
|
||||||
|
fn = getattr(os, name, _nop)
|
||||||
|
if sys.version_info >= (3, 3):
|
||||||
|
if fn in os.supports_follow_symlinks: # novermin
|
||||||
|
return fn
|
||||||
|
return _nop
|
||||||
|
|
||||||
|
st = lookup("stat")(src, follow_symlinks=follow)
|
||||||
|
mode = stat.S_IMODE(st.st_mode)
|
||||||
|
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns), follow_symlinks=follow)
|
||||||
|
|
||||||
|
# We must copy extended attributes before the file is (potentially)
|
||||||
|
# chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
|
||||||
|
shutil._copyxattr(src, dst, follow_symlinks=follow)
|
||||||
|
|
||||||
|
try:
|
||||||
|
lookup("chmod")(dst, mode, follow_symlinks=follow)
|
||||||
|
except NotImplementedError:
|
||||||
|
# if we got a NotImplementedError, it's because
|
||||||
|
# * follow_symlinks=False,
|
||||||
|
# * lchown() is unavailable, and
|
||||||
|
# * either
|
||||||
|
# * fchownat() is unavailable or
|
||||||
|
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
|
||||||
|
# (it returned ENOSUP.)
|
||||||
|
# therefore we're out of options--we simply cannot chown the
|
||||||
|
# symlink. give up, suppress the error.
|
||||||
|
# (which is what shutil always did in this circumstance.)
|
||||||
|
pass
|
||||||
|
if hasattr(st, "st_flags"):
|
||||||
|
try:
|
||||||
|
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
|
||||||
|
except OSError as why:
|
||||||
|
for err in "EOPNOTSUPP", "ENOTSUP":
|
||||||
|
if hasattr(errno, err) and why.errno == getattr(errno, err):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
shutil.copystat = copystat
|
||||||
|
|
||||||
|
|
||||||
def getuid():
|
def getuid():
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
@@ -99,7 +164,7 @@ def getuid():
|
|||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||||
# so check for that case
|
# so check for that case
|
||||||
if os.path.exists(dst) or os.path.islink(dst):
|
if os.path.exists(dst) or os.path.islink(dst):
|
||||||
@@ -117,13 +182,7 @@ def path_contains_subdirectory(path, root):
|
|||||||
@memoized
|
@memoized
|
||||||
def file_command(*args):
|
def file_command(*args):
|
||||||
"""Creates entry point to `file` system command with provided arguments"""
|
"""Creates entry point to `file` system command with provided arguments"""
|
||||||
try:
|
file_cmd = which("file", required=True)
|
||||||
file_cmd = which("file", required=True)
|
|
||||||
except CommandNotFoundError as e:
|
|
||||||
if is_windows:
|
|
||||||
raise CommandNotFoundError("`file` utility is not available on Windows")
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
file_cmd.add_default_arg(arg)
|
file_cmd.add_default_arg(arg)
|
||||||
return file_cmd
|
return file_cmd
|
||||||
@@ -134,7 +193,11 @@ def _get_mime_type():
|
|||||||
"""Generate method to call `file` system command to aquire mime type
|
"""Generate method to call `file` system command to aquire mime type
|
||||||
for a specified path
|
for a specified path
|
||||||
"""
|
"""
|
||||||
return file_command("-b", "-h", "--mime-type")
|
if sys.platform == "win32":
|
||||||
|
# -h option (no-dereference) does not exist in Windows
|
||||||
|
return file_command("-b", "--mime-type")
|
||||||
|
else:
|
||||||
|
return file_command("-b", "-h", "--mime-type")
|
||||||
|
|
||||||
|
|
||||||
@memoized
|
@memoized
|
||||||
@@ -270,7 +333,6 @@ def groupid_to_group(x):
|
|||||||
regex = re.escape(regex)
|
regex = re.escape(regex)
|
||||||
filenames = path_to_os_path(*filenames)
|
filenames = path_to_os_path(*filenames)
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
|
|
||||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||||
tty.debug(msg.format(filename, regex))
|
tty.debug(msg.format(filename, regex))
|
||||||
|
|
||||||
@@ -486,7 +548,7 @@ def get_owner_uid(path, err_msg=None):
|
|||||||
else:
|
else:
|
||||||
p_stat = os.stat(path)
|
p_stat = os.stat(path)
|
||||||
|
|
||||||
if _platform != "win32":
|
if sys.platform != "win32":
|
||||||
owner_uid = p_stat.st_uid
|
owner_uid = p_stat.st_uid
|
||||||
else:
|
else:
|
||||||
sid = win32security.GetFileSecurity(
|
sid = win32security.GetFileSecurity(
|
||||||
@@ -519,7 +581,7 @@ def group_ids(uid=None):
|
|||||||
Returns:
|
Returns:
|
||||||
(list of int): gids of groups the user is a member of
|
(list of int): gids of groups the user is a member of
|
||||||
"""
|
"""
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
tty.warn("Function is not supported on Windows")
|
tty.warn("Function is not supported on Windows")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@@ -539,7 +601,7 @@ def group_ids(uid=None):
|
|||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def chgrp(path, group, follow_symlinks=True):
|
def chgrp(path, group, follow_symlinks=True):
|
||||||
"""Implement the bash chgrp function on a single path"""
|
"""Implement the bash chgrp function on a single path"""
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||||
|
|
||||||
if isinstance(group, str):
|
if isinstance(group, str):
|
||||||
@@ -1066,7 +1128,7 @@ def open_if_filename(str_or_file, mode="r"):
|
|||||||
@system_path_filter
|
@system_path_filter
|
||||||
def touch(path):
|
def touch(path):
|
||||||
"""Creates an empty file at the specified path."""
|
"""Creates an empty file at the specified path."""
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
perms = os.O_WRONLY | os.O_CREAT
|
perms = os.O_WRONLY | os.O_CREAT
|
||||||
else:
|
else:
|
||||||
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
|
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
|
||||||
@@ -1128,7 +1190,7 @@ def temp_cwd():
|
|||||||
yield tmp_dir
|
yield tmp_dir
|
||||||
finally:
|
finally:
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
kwargs["ignore_errors"] = False
|
kwargs["ignore_errors"] = False
|
||||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||||
shutil.rmtree(tmp_dir, **kwargs)
|
shutil.rmtree(tmp_dir, **kwargs)
|
||||||
@@ -1222,7 +1284,6 @@ def traverse_tree(
|
|||||||
# target is relative to the link, then that may not resolve properly
|
# target is relative to the link, then that may not resolve properly
|
||||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||||
|
|
||||||
# When follow_nonexisting isn't set, don't descend into dirs
|
# When follow_nonexisting isn't set, don't descend into dirs
|
||||||
# in source that do not exist in dest
|
# in source that do not exist in dest
|
||||||
if follow_nonexisting or os.path.exists(dest_child):
|
if follow_nonexisting or os.path.exists(dest_child):
|
||||||
@@ -1374,7 +1435,7 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
|||||||
try:
|
try:
|
||||||
isdir = f.is_dir()
|
isdir = f.is_dir()
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||||
# if path is a symlink, determine destination and
|
# if path is a symlink, determine destination and
|
||||||
# evaluate file vs directory
|
# evaluate file vs directory
|
||||||
link_target = resolve_link_target_relative_to_the_link(f)
|
link_target = resolve_link_target_relative_to_the_link(f)
|
||||||
@@ -1483,11 +1544,11 @@ def readonly_file_handler(ignore_errors=False):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def error_remove_readonly(func, path, exc):
|
def error_remove_readonly(func, path, exc):
|
||||||
if not is_windows:
|
if sys.platform != "win32":
|
||||||
raise RuntimeError("This method should only be invoked on Windows")
|
raise RuntimeError("This method should only be invoked on Windows")
|
||||||
excvalue = exc[1]
|
excvalue = exc[1]
|
||||||
if (
|
if (
|
||||||
is_windows
|
sys.platform == "win32"
|
||||||
and func in (os.rmdir, os.remove, os.unlink)
|
and func in (os.rmdir, os.remove, os.unlink)
|
||||||
and excvalue.errno == errno.EACCES
|
and excvalue.errno == errno.EACCES
|
||||||
):
|
):
|
||||||
@@ -1517,7 +1578,7 @@ def remove_linked_tree(path):
|
|||||||
|
|
||||||
# Windows readonly files cannot be removed by Python
|
# Windows readonly files cannot be removed by Python
|
||||||
# directly.
|
# directly.
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
kwargs["ignore_errors"] = False
|
kwargs["ignore_errors"] = False
|
||||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||||
|
|
||||||
@@ -1664,7 +1725,6 @@ def find(root, files, recursive=True):
|
|||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def _find_recursive(root, search_files):
|
def _find_recursive(root, search_files):
|
||||||
|
|
||||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||||
# we want to poke the filesystem as little as possible, but still maintain
|
# we want to poke the filesystem as little as possible, but still maintain
|
||||||
# stability in the order of the answer. Thus we are recording each library
|
# stability in the order of the answer. Thus we are recording each library
|
||||||
@@ -2032,7 +2092,7 @@ def names(self):
|
|||||||
# on non Windows platform
|
# on non Windows platform
|
||||||
# Windows valid library extensions are:
|
# Windows valid library extensions are:
|
||||||
# ['.dll', '.lib']
|
# ['.dll', '.lib']
|
||||||
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
|
valid_exts = [".dll", ".lib"] if sys.platform == "win32" else [".dylib", ".so", ".a"]
|
||||||
for ext in valid_exts:
|
for ext in valid_exts:
|
||||||
i = name.rfind(ext)
|
i = name.rfind(ext)
|
||||||
if i != -1:
|
if i != -1:
|
||||||
@@ -2180,7 +2240,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
|||||||
message = message.format(find_libraries.__name__, type(libraries))
|
message = message.format(find_libraries.__name__, type(libraries))
|
||||||
raise TypeError(message)
|
raise TypeError(message)
|
||||||
|
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
static_ext = "lib"
|
static_ext = "lib"
|
||||||
# For linking (runtime=False) you need the .lib files regardless of
|
# For linking (runtime=False) you need the .lib files regardless of
|
||||||
# whether you are doing a shared or static link
|
# whether you are doing a shared or static link
|
||||||
@@ -2212,7 +2272,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
|||||||
# finally search all of root recursively. The search stops when the first
|
# finally search all of root recursively. The search stops when the first
|
||||||
# match is found.
|
# match is found.
|
||||||
common_lib_dirs = ["lib", "lib64"]
|
common_lib_dirs = ["lib", "lib64"]
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
common_lib_dirs.extend(["bin", "Lib"])
|
common_lib_dirs.extend(["bin", "Lib"])
|
||||||
|
|
||||||
for subdir in common_lib_dirs:
|
for subdir in common_lib_dirs:
|
||||||
@@ -2347,7 +2407,7 @@ def _link(self, path, dest_dir):
|
|||||||
# For py2 compatibility, we have to catch the specific Windows error code
|
# For py2 compatibility, we have to catch the specific Windows error code
|
||||||
# associate with trying to create a file that already exists (winerror 183)
|
# associate with trying to create a file that already exists (winerror 183)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.winerror == 183:
|
if sys.platform == "win32" and e.winerror == 183:
|
||||||
# We have either already symlinked or we are encoutering a naming clash
|
# We have either already symlinked or we are encoutering a naming clash
|
||||||
# either way, we don't want to overwrite existing libraries
|
# either way, we don't want to overwrite existing libraries
|
||||||
already_linked = islink(dest_file)
|
already_linked = islink(dest_file)
|
||||||
@@ -2635,3 +2695,28 @@ def temporary_dir(
|
|||||||
yield tmp_dir
|
yield tmp_dir
|
||||||
finally:
|
finally:
|
||||||
remove_directory_contents(tmp_dir)
|
remove_directory_contents(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||||
|
"""Create a small summary of the given file. Does not error
|
||||||
|
when file does not exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
print_bytes (int): Number of bytes to print from start/end of file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of size and byte string containing first n .. last n bytes.
|
||||||
|
Size is 0 if file cannot be read."""
|
||||||
|
try:
|
||||||
|
n = print_bytes
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
size = os.fstat(f.fileno()).st_size
|
||||||
|
if size <= 2 * n:
|
||||||
|
short_contents = f.read(2 * n)
|
||||||
|
else:
|
||||||
|
short_contents = f.read(n)
|
||||||
|
f.seek(-n, 2)
|
||||||
|
short_contents += b"..." + f.read(n)
|
||||||
|
return size, short_contents
|
||||||
|
except OSError:
|
||||||
|
return 0, b""
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
|
|||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||||
raise UnhashableArguments(
|
raise UnhashableArguments(
|
||||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
|
||||||
) from e
|
) from e
|
||||||
|
|
||||||
return _memoized_function
|
return _memoized_function
|
||||||
@@ -237,6 +237,7 @@ def decorator_with_or_without_args(decorator):
|
|||||||
@decorator
|
@decorator
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# See https://stackoverflow.com/questions/653368 for more on this
|
# See https://stackoverflow.com/questions/653368 for more on this
|
||||||
@functools.wraps(decorator)
|
@functools.wraps(decorator)
|
||||||
def new_dec(*args, **kwargs):
|
def new_dec(*args, **kwargs):
|
||||||
@@ -990,8 +991,7 @@ def enum(**kwargs):
|
|||||||
|
|
||||||
|
|
||||||
def stable_partition(
|
def stable_partition(
|
||||||
input_iterable: Iterable,
|
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||||
predicate_fn: Callable[[Any], bool],
|
|
||||||
) -> Tuple[List[Any], List[Any]]:
|
) -> Tuple[List[Any], List[Any]]:
|
||||||
"""Partition the input iterable according to a custom predicate.
|
"""Partition the input iterable according to a custom predicate.
|
||||||
|
|
||||||
@@ -1104,11 +1104,7 @@ def __enter__(self):
|
|||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, tb):
|
def __exit__(self, exc_type, exc_value, tb):
|
||||||
if exc_value is not None:
|
if exc_value is not None:
|
||||||
self._handler._receive_forwarded(
|
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||||
self._context,
|
|
||||||
exc_value,
|
|
||||||
traceback.format_tb(tb),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Suppress any exception from being re-raised:
|
# Suppress any exception from being re-raised:
|
||||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ def __init__(self, ignore=None):
|
|||||||
# so that we have a fast lookup and can run mkdir in order.
|
# so that we have a fast lookup and can run mkdir in order.
|
||||||
self.directories = OrderedDict()
|
self.directories = OrderedDict()
|
||||||
|
|
||||||
# Files to link. Maps dst_rel to (src_rel, src_root)
|
# Files to link. Maps dst_rel to (src_root, src_rel)
|
||||||
self.files = OrderedDict()
|
self.files = OrderedDict()
|
||||||
|
|
||||||
def before_visit_dir(self, root, rel_path, depth):
|
def before_visit_dir(self, root, rel_path, depth):
|
||||||
@@ -430,6 +430,11 @@ class MergeConflictError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ConflictingSpecsError(MergeConflictError):
|
||||||
|
def __init__(self, spec_1, spec_2):
|
||||||
|
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||||
|
|
||||||
|
|
||||||
class SingleMergeConflictError(MergeConflictError):
|
class SingleMergeConflictError(MergeConflictError):
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ class Barrier:
|
|||||||
|
|
||||||
Python 2 doesn't have multiprocessing barriers so we implement this.
|
Python 2 doesn't have multiprocessing barriers so we implement this.
|
||||||
|
|
||||||
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
|
See https://greenteapress.com/semaphores/LittleBookOfSemaphores.pdf, p. 41.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, n, timeout=None):
|
def __init__(self, n, timeout=None):
|
||||||
|
|||||||
@@ -5,15 +5,13 @@
|
|||||||
import errno
|
import errno
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from os.path import exists, join
|
from os.path import exists, join
|
||||||
from sys import platform as _platform
|
|
||||||
|
|
||||||
from llnl.util import lang
|
from llnl.util import lang
|
||||||
|
|
||||||
is_windows = _platform == "win32"
|
if sys.platform == "win32":
|
||||||
|
|
||||||
if is_windows:
|
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
|
|
||||||
@@ -23,7 +21,7 @@ def symlink(real_path, link_path):
|
|||||||
|
|
||||||
On Windows, use junctions if os.symlink fails.
|
On Windows, use junctions if os.symlink fails.
|
||||||
"""
|
"""
|
||||||
if not is_windows:
|
if sys.platform != "win32":
|
||||||
os.symlink(real_path, link_path)
|
os.symlink(real_path, link_path)
|
||||||
elif _win32_can_symlink():
|
elif _win32_can_symlink():
|
||||||
# Windows requires target_is_directory=True when the target is a dir.
|
# Windows requires target_is_directory=True when the target is a dir.
|
||||||
@@ -99,7 +97,7 @@ def _win32_is_junction(path):
|
|||||||
if os.path.islink(path):
|
if os.path.islink(path):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
import ctypes.wintypes
|
import ctypes.wintypes
|
||||||
|
|
||||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||||
|
|||||||
@@ -108,7 +108,6 @@ class SuppressOutput:
|
|||||||
"""Class for disabling output in a scope using 'with' keyword"""
|
"""Class for disabling output in a scope using 'with' keyword"""
|
||||||
|
|
||||||
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
||||||
|
|
||||||
self._msg_enabled_initial = _msg_enabled
|
self._msg_enabled_initial = _msg_enabled
|
||||||
self._warn_enabled_initial = _warn_enabled
|
self._warn_enabled_initial = _warn_enabled
|
||||||
self._error_enabled_initial = _error_enabled
|
self._error_enabled_initial = _error_enabled
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from typing import IO, Any, List, Optional
|
||||||
|
|
||||||
from llnl.util.tty import terminal_size
|
from llnl.util.tty import terminal_size
|
||||||
from llnl.util.tty.color import cextra, clen
|
from llnl.util.tty.color import cextra, clen
|
||||||
@@ -97,7 +98,16 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
|
|||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
def colify(elts, **options):
|
def colify(
|
||||||
|
elts: List[Any],
|
||||||
|
cols: int = 0,
|
||||||
|
output: Optional[IO] = None,
|
||||||
|
indent: int = 0,
|
||||||
|
padding: int = 2,
|
||||||
|
tty: Optional[bool] = None,
|
||||||
|
method: str = "variable",
|
||||||
|
console_cols: Optional[int] = None,
|
||||||
|
):
|
||||||
"""Takes a list of elements as input and finds a good columnization
|
"""Takes a list of elements as input and finds a good columnization
|
||||||
of them, similar to how gnu ls does. This supports both
|
of them, similar to how gnu ls does. This supports both
|
||||||
uniform-width and variable-width (tighter) columns.
|
uniform-width and variable-width (tighter) columns.
|
||||||
@@ -106,31 +116,21 @@ def colify(elts, **options):
|
|||||||
using ``str()``.
|
using ``str()``.
|
||||||
|
|
||||||
Keyword Arguments:
|
Keyword Arguments:
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output: A file object to write to. Default is ``sys.stdout``
|
||||||
indent (int): Optionally indent all columns by some number of spaces
|
indent: Optionally indent all columns by some number of spaces
|
||||||
padding (int): Spaces between columns. Default is 2
|
padding: Spaces between columns. Default is 2
|
||||||
width (int): Width of the output. Default is 80 if tty not detected
|
width: Width of the output. Default is 80 if tty not detected
|
||||||
cols (int): Force number of columns. Default is to size to terminal, or
|
cols: Force number of columns. Default is to size to terminal, or
|
||||||
single-column if no tty
|
single-column if no tty
|
||||||
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
|
tty: Whether to attempt to write to a tty. Default is to autodetect a
|
||||||
tty. Set to False to force single-column output
|
tty. Set to False to force single-column output
|
||||||
method (str): Method to use to fit columns. Options are variable or uniform.
|
method: Method to use to fit columns. Options are variable or uniform.
|
||||||
Variable-width columns are tighter, uniform columns are all the same width
|
Variable-width columns are tighter, uniform columns are all the same width
|
||||||
and fit less data on the screen
|
and fit less data on the screen
|
||||||
|
console_cols: number of columns on this console (default: autodetect)
|
||||||
"""
|
"""
|
||||||
# Get keyword arguments or set defaults
|
if output is None:
|
||||||
cols = options.pop("cols", 0)
|
output = sys.stdout
|
||||||
output = options.pop("output", sys.stdout)
|
|
||||||
indent = options.pop("indent", 0)
|
|
||||||
padding = options.pop("padding", 2)
|
|
||||||
tty = options.pop("tty", None)
|
|
||||||
method = options.pop("method", "variable")
|
|
||||||
console_cols = options.pop("width", None)
|
|
||||||
|
|
||||||
if options:
|
|
||||||
raise TypeError(
|
|
||||||
"'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
|
|
||||||
)
|
|
||||||
|
|
||||||
# elts needs to be an array of strings so we can count the elements
|
# elts needs to be an array of strings so we can count the elements
|
||||||
elts = [str(elt) for elt in elts]
|
elts = [str(elt) for elt in elts]
|
||||||
@@ -153,10 +153,11 @@ def colify(elts, **options):
|
|||||||
cols = 1
|
cols = 1
|
||||||
|
|
||||||
# Specify the number of character columns to use.
|
# Specify the number of character columns to use.
|
||||||
if not console_cols:
|
if console_cols is None:
|
||||||
console_rows, console_cols = terminal_size()
|
console_rows, console_cols = terminal_size()
|
||||||
elif type(console_cols) != int:
|
elif not isinstance(console_cols, int):
|
||||||
raise ValueError("Number of columns must be an int")
|
raise ValueError("Number of columns must be an int")
|
||||||
|
|
||||||
console_cols = max(1, console_cols - indent)
|
console_cols = max(1, console_cols - indent)
|
||||||
|
|
||||||
# Choose a method. Variable-width colums vs uniform-width.
|
# Choose a method. Variable-width colums vs uniform-width.
|
||||||
@@ -192,7 +193,13 @@ def colify(elts, **options):
|
|||||||
return (config.cols, tuple(config.widths))
|
return (config.cols, tuple(config.widths))
|
||||||
|
|
||||||
|
|
||||||
def colify_table(table, **options):
|
def colify_table(
|
||||||
|
table: List[List[Any]],
|
||||||
|
output: Optional[IO] = None,
|
||||||
|
indent: int = 0,
|
||||||
|
padding: int = 2,
|
||||||
|
console_cols: Optional[int] = None,
|
||||||
|
):
|
||||||
"""Version of ``colify()`` for data expressed in rows, (list of lists).
|
"""Version of ``colify()`` for data expressed in rows, (list of lists).
|
||||||
|
|
||||||
Same as regular colify but:
|
Same as regular colify but:
|
||||||
@@ -218,20 +225,38 @@ def transpose():
|
|||||||
for row in table:
|
for row in table:
|
||||||
yield row[i]
|
yield row[i]
|
||||||
|
|
||||||
if "cols" in options:
|
colify(
|
||||||
raise ValueError("Cannot override columsn in colify_table.")
|
transpose(),
|
||||||
options["cols"] = columns
|
cols=columns, # this is always the number of cols in the table
|
||||||
|
tty=True, # don't reduce to 1 column for non-tty
|
||||||
# don't reduce to 1 column for non-tty
|
output=output,
|
||||||
options["tty"] = True
|
indent=indent,
|
||||||
|
padding=padding,
|
||||||
colify(transpose(), **options)
|
console_cols=console_cols,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def colified(elts, **options):
|
def colified(
|
||||||
|
elts: List[Any],
|
||||||
|
cols: int = 0,
|
||||||
|
output: Optional[IO] = None,
|
||||||
|
indent: int = 0,
|
||||||
|
padding: int = 2,
|
||||||
|
tty: Optional[bool] = None,
|
||||||
|
method: str = "variable",
|
||||||
|
console_cols: Optional[int] = None,
|
||||||
|
):
|
||||||
"""Invokes the ``colify()`` function but returns the result as a string
|
"""Invokes the ``colify()`` function but returns the result as a string
|
||||||
instead of writing it to an output string."""
|
instead of writing it to an output string."""
|
||||||
sio = io.StringIO()
|
sio = io.StringIO()
|
||||||
options["output"] = sio
|
colify(
|
||||||
colify(elts, **options)
|
elts,
|
||||||
|
cols=cols,
|
||||||
|
output=sio,
|
||||||
|
indent=indent,
|
||||||
|
padding=padding,
|
||||||
|
tty=tty,
|
||||||
|
method=method,
|
||||||
|
console_cols=console_cols,
|
||||||
|
)
|
||||||
return sio.getvalue()
|
return sio.getvalue()
|
||||||
|
|||||||
@@ -161,10 +161,7 @@ def _is_background(self):
|
|||||||
def _get_canon_echo_flags(self):
|
def _get_canon_echo_flags(self):
|
||||||
"""Get current termios canonical and echo settings."""
|
"""Get current termios canonical and echo settings."""
|
||||||
cfg = termios.tcgetattr(self.stream)
|
cfg = termios.tcgetattr(self.stream)
|
||||||
return (
|
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||||
bool(cfg[3] & termios.ICANON),
|
|
||||||
bool(cfg[3] & termios.ECHO),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _enable_keyboard_input(self):
|
def _enable_keyboard_input(self):
|
||||||
"""Disable canonical input and echoing on ``self.stream``."""
|
"""Disable canonical input and echoing on ``self.stream``."""
|
||||||
|
|||||||
@@ -77,10 +77,7 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
|
|||||||
def get_canon_echo_attrs(self):
|
def get_canon_echo_attrs(self):
|
||||||
"""Get echo and canon attributes of the terminal of controller_fd."""
|
"""Get echo and canon attributes of the terminal of controller_fd."""
|
||||||
cfg = termios.tcgetattr(self.controller_fd)
|
cfg = termios.tcgetattr(self.controller_fd)
|
||||||
return (
|
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||||
bool(cfg[3] & termios.ICANON),
|
|
||||||
bool(cfg[3] & termios.ECHO),
|
|
||||||
)
|
|
||||||
|
|
||||||
def horizontal_line(self, name):
|
def horizontal_line(self, name):
|
||||||
"""Labled horizontal line for debugging."""
|
"""Labled horizontal line for debugging."""
|
||||||
@@ -92,11 +89,7 @@ def status(self):
|
|||||||
if self.debug:
|
if self.debug:
|
||||||
canon, echo = self.get_canon_echo_attrs()
|
canon, echo = self.get_canon_echo_attrs()
|
||||||
sys.stderr.write(
|
sys.stderr.write(
|
||||||
"canon: %s, echo: %s\n"
|
"canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
|
||||||
% (
|
|
||||||
"on" if canon else "off",
|
|
||||||
"on" if echo else "off",
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
sys.stderr.write("input: %s\n" % self.input_on())
|
sys.stderr.write("input: %s\n" % self.input_on())
|
||||||
sys.stderr.write("bg: %s\n" % self.background())
|
sys.stderr.write("bg: %s\n" % self.background())
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ def architecture_compatible(self, target, constraint):
|
|||||||
return (
|
return (
|
||||||
not target.architecture
|
not target.architecture
|
||||||
or not constraint.architecture
|
or not constraint.architecture
|
||||||
or target.architecture.satisfies(constraint.architecture)
|
or target.architecture.intersects(constraint.architecture)
|
||||||
)
|
)
|
||||||
|
|
||||||
@memoized
|
@memoized
|
||||||
@@ -104,7 +104,7 @@ def compiler_compatible(self, parent, child, **kwargs):
|
|||||||
for cversion in child.compiler.versions:
|
for cversion in child.compiler.versions:
|
||||||
# For a few compilers use specialized comparisons.
|
# For a few compilers use specialized comparisons.
|
||||||
# Otherwise match on version match.
|
# Otherwise match on version match.
|
||||||
if pversion.satisfies(cversion):
|
if pversion.intersects(cversion):
|
||||||
return True
|
return True
|
||||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||||
pversion, cversion
|
pversion, cversion
|
||||||
|
|||||||
@@ -321,8 +321,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
"patch URL in package {0} must end with {1}".format(
|
||||||
pkg_cls.name,
|
pkg_cls.name, full_index_arg
|
||||||
full_index_arg,
|
|
||||||
),
|
),
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
@@ -722,7 +721,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
dependency_pkg_cls = None
|
dependency_pkg_cls = None
|
||||||
try:
|
try:
|
||||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||||
assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
|
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = (
|
summary = (
|
||||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import io
|
||||||
|
import itertools
|
||||||
import json
|
import json
|
||||||
import multiprocessing.pool
|
import multiprocessing.pool
|
||||||
import os
|
import os
|
||||||
@@ -20,7 +22,8 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing, contextmanager
|
||||||
|
from gzip import GzipFile
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.error import HTTPError, URLError
|
||||||
|
|
||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
@@ -40,6 +43,8 @@
|
|||||||
import spack.relocate as relocate
|
import spack.relocate as relocate
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.traverse as traverse
|
||||||
|
import spack.util.crypto
|
||||||
import spack.util.file_cache as file_cache
|
import spack.util.file_cache as file_cache
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
@@ -47,7 +52,7 @@
|
|||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.caches import misc_cache_location
|
from spack.caches import misc_cache_location
|
||||||
from spack.relocate import utf8_paths_to_single_binary_regex
|
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
@@ -209,10 +214,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
self._mirrors_for_spec[dag_hash].append(
|
self._mirrors_for_spec[dag_hash].append(
|
||||||
{
|
{"mirror_url": mirror_url, "spec": indexed_spec}
|
||||||
"mirror_url": mirror_url,
|
|
||||||
"spec": indexed_spec,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
@@ -294,10 +296,9 @@ def update_spec(self, spec, found_list):
|
|||||||
cur_entry["spec"] = new_entry["spec"]
|
cur_entry["spec"] = new_entry["spec"]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
current_list.append = {
|
current_list.append(
|
||||||
"mirror_url": new_entry["mirror_url"],
|
{"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
|
||||||
"spec": new_entry["spec"],
|
)
|
||||||
}
|
|
||||||
|
|
||||||
def update(self, with_cooldown=False):
|
def update(self, with_cooldown=False):
|
||||||
"""Make sure local cache of buildcache index files is up to date.
|
"""Make sure local cache of buildcache index files is up to date.
|
||||||
@@ -364,8 +365,7 @@ def update(self, with_cooldown=False):
|
|||||||
# May need to fetch the index and update the local caches
|
# May need to fetch the index and update the local caches
|
||||||
try:
|
try:
|
||||||
needs_regen = self._fetch_and_cache_index(
|
needs_regen = self._fetch_and_cache_index(
|
||||||
cached_mirror_url,
|
cached_mirror_url, cache_entry=cache_entry
|
||||||
cache_entry=cache_entry,
|
|
||||||
)
|
)
|
||||||
self._last_fetch_times[cached_mirror_url] = (now, True)
|
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||||
all_methods_failed = False
|
all_methods_failed = False
|
||||||
@@ -557,7 +557,12 @@ class NoChecksumException(spack.error.SpackError):
|
|||||||
Raised if file fails checksum verification.
|
Raised if file fails checksum verification.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
pass
|
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||||
|
super(NoChecksumException, self).__init__(
|
||||||
|
f"{algorithm} checksum failed for {path}",
|
||||||
|
f"Expected {expected} but got {computed}. "
|
||||||
|
f"File size = {size} bytes. Contents = {contents!r}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class NewLayoutException(spack.error.SpackError):
|
class NewLayoutException(spack.error.SpackError):
|
||||||
@@ -737,34 +742,31 @@ def get_buildfile_manifest(spec):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def write_buildinfo_file(spec, workdir, rel=False):
|
def prefixes_to_hashes(spec):
|
||||||
"""
|
return {
|
||||||
Create a cache file containing information
|
str(s.prefix): s.dag_hash()
|
||||||
required for the relocation
|
for s in itertools.chain(
|
||||||
"""
|
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_buildinfo_dict(spec, rel=False):
|
||||||
|
"""Create metadata for a tarball"""
|
||||||
manifest = get_buildfile_manifest(spec)
|
manifest = get_buildfile_manifest(spec)
|
||||||
|
|
||||||
prefix_to_hash = dict()
|
return {
|
||||||
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
|
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||||
deps = spack.build_environment.get_rpath_deps(spec.package)
|
"relative_rpaths": rel,
|
||||||
for d in deps + spec.dependencies(deptype="run"):
|
"buildpath": spack.store.layout.root,
|
||||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
"spackprefix": spack.paths.prefix,
|
||||||
|
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||||
# Create buildinfo data and write it to disk
|
"relocate_textfiles": manifest["text_to_relocate"],
|
||||||
buildinfo = {}
|
"relocate_binaries": manifest["binary_to_relocate"],
|
||||||
buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
|
"relocate_links": manifest["link_to_relocate"],
|
||||||
buildinfo["relative_rpaths"] = rel
|
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||||
buildinfo["buildpath"] = spack.store.layout.root
|
"prefix_to_hash": prefixes_to_hashes(spec),
|
||||||
buildinfo["spackprefix"] = spack.paths.prefix
|
}
|
||||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
|
||||||
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
|
|
||||||
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
|
|
||||||
buildinfo["relocate_links"] = manifest["link_to_relocate"]
|
|
||||||
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
|
|
||||||
buildinfo["prefix_to_hash"] = prefix_to_hash
|
|
||||||
filename = buildinfo_file_name(workdir)
|
|
||||||
with open(filename, "w") as outfile:
|
|
||||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
|
||||||
|
|
||||||
|
|
||||||
def tarball_directory_name(spec):
|
def tarball_directory_name(spec):
|
||||||
@@ -1137,6 +1139,68 @@ def generate_key_index(key_prefix, tmpdir=None):
|
|||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def gzip_compressed_tarfile(path):
|
||||||
|
"""Create a reproducible, compressed tarfile"""
|
||||||
|
# Create gzip compressed tarball of the install prefix
|
||||||
|
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
|
||||||
|
# If the filename="" is dropped, Python will use fileobj.name instead.
|
||||||
|
# This should effectively mimick `gzip --no-name`.
|
||||||
|
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||||
|
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||||
|
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||||
|
# So we follow gzip.
|
||||||
|
with open(path, "wb") as fileobj, closing(
|
||||||
|
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
|
||||||
|
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
|
||||||
|
yield tar
|
||||||
|
|
||||||
|
|
||||||
|
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
|
||||||
|
# We only add files, symlinks, hardlinks, and directories
|
||||||
|
# No character devices, block devices and FIFOs should ever enter a tarball.
|
||||||
|
if tarinfo.isdev():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# For distribution, it makes no sense to user/group data; since (a) they don't exist
|
||||||
|
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
|
||||||
|
# ownership if it can. We want to extract as the current user. By setting owner to root,
|
||||||
|
# root will extract as root, and non-privileged user will extract as themselves.
|
||||||
|
tarinfo.uid = 0
|
||||||
|
tarinfo.gid = 0
|
||||||
|
tarinfo.uname = ""
|
||||||
|
tarinfo.gname = ""
|
||||||
|
|
||||||
|
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
|
||||||
|
# touched; as long as the content does not change, this ensures we get stable tarballs.
|
||||||
|
tarinfo.mtime = 0
|
||||||
|
|
||||||
|
# Normalize mode
|
||||||
|
if tarinfo.isfile() or tarinfo.islnk():
|
||||||
|
# If user can execute, use 0o755; else 0o644
|
||||||
|
# This is to avoid potentially unsafe world writable & exeutable files that may get
|
||||||
|
# extracted when Python or tar is run with privileges
|
||||||
|
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
|
||||||
|
else: # symbolic link and directories
|
||||||
|
tarinfo.mode = 0o755
|
||||||
|
|
||||||
|
return tarinfo
|
||||||
|
|
||||||
|
|
||||||
|
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||||
|
# Serialize buildinfo for the tarball
|
||||||
|
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
|
||||||
|
tarinfo = tarfile.TarInfo(name=path)
|
||||||
|
tarinfo.size = len(bstring)
|
||||||
|
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||||
|
|
||||||
|
|
||||||
|
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||||
|
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||||
|
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||||
|
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||||
|
|
||||||
|
|
||||||
def _build_tarball(
|
def _build_tarball(
|
||||||
spec,
|
spec,
|
||||||
out_url,
|
out_url,
|
||||||
@@ -1196,50 +1260,45 @@ def _build_tarball(
|
|||||||
):
|
):
|
||||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||||
|
|
||||||
# make a copy of the install directory to work with
|
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
workdir = os.path.join(tmpdir, pkg_dir)
|
||||||
# install_tree copies hardlinks
|
|
||||||
# create a temporary tarfile from prefix and exract it to workdir
|
# TODO: We generally don't want to mutate any files, but when using relative
|
||||||
# tarfile preserves hardlinks
|
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
|
||||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
if relative:
|
||||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
# tarfile is used because it preserves hardlink etc best.
|
||||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
binaries_dir = workdir
|
||||||
tar.extractall(workdir)
|
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||||
os.remove(temp_tarfile_path)
|
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||||
|
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||||
|
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||||
|
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||||
|
tar.extractall(workdir)
|
||||||
|
os.remove(temp_tarfile_path)
|
||||||
|
else:
|
||||||
|
binaries_dir = spec.prefix
|
||||||
|
|
||||||
# create info for later relocation and create tar
|
# create info for later relocation and create tar
|
||||||
write_buildinfo_file(spec, workdir, relative)
|
buildinfo = get_buildinfo_dict(spec, relative)
|
||||||
|
|
||||||
# optionally make the paths in the binaries relative to each other
|
# optionally make the paths in the binaries relative to each other
|
||||||
# in the spack install tree before creating tarball
|
# in the spack install tree before creating tarball
|
||||||
if relative:
|
try:
|
||||||
try:
|
if relative:
|
||||||
make_package_relative(workdir, spec, allow_root)
|
make_package_relative(workdir, spec, buildinfo, allow_root)
|
||||||
except Exception as e:
|
elif not allow_root:
|
||||||
shutil.rmtree(workdir)
|
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||||
shutil.rmtree(tarfile_dir)
|
except Exception as e:
|
||||||
shutil.rmtree(tmpdir)
|
shutil.rmtree(tmpdir)
|
||||||
tty.die(e)
|
tty.die(e)
|
||||||
else:
|
|
||||||
try:
|
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||||
check_package_relocatable(workdir, spec, allow_root)
|
|
||||||
except Exception as e:
|
|
||||||
shutil.rmtree(workdir)
|
|
||||||
shutil.rmtree(tarfile_dir)
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
tty.die(e)
|
|
||||||
|
|
||||||
# create gzip compressed tarball of the install prefix
|
|
||||||
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
|
||||||
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
|
||||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
|
||||||
# So we follow gzip.
|
|
||||||
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
|
|
||||||
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
|
|
||||||
# remove copy of install directory
|
# remove copy of install directory
|
||||||
shutil.rmtree(workdir)
|
if relative:
|
||||||
|
shutil.rmtree(workdir)
|
||||||
|
|
||||||
# get the sha256 checksum of the tarball
|
# get the sha256 checksum of the tarball
|
||||||
checksum = checksum_tarball(tarfile_path)
|
checksum = checksum_tarball(tarfile_path)
|
||||||
@@ -1298,57 +1357,48 @@ def _build_tarball(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
|
def nodes_to_be_packaged(specs, root=True, dependencies=True):
|
||||||
"""Return the list of nodes to be packaged, given a list of specs.
|
"""Return the list of nodes to be packaged, given a list of specs.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
specs (List[spack.spec.Spec]): list of root specs to be processed
|
specs (List[spack.spec.Spec]): list of root specs to be processed
|
||||||
include_root (bool): include the root of each spec in the nodes
|
root (bool): include the root of each spec in the nodes
|
||||||
include_dependencies (bool): include the dependencies of each
|
dependencies (bool): include the dependencies of each
|
||||||
spec in the nodes
|
spec in the nodes
|
||||||
"""
|
"""
|
||||||
if not include_root and not include_dependencies:
|
if not root and not dependencies:
|
||||||
return set()
|
return []
|
||||||
|
elif dependencies:
|
||||||
|
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
|
||||||
|
else:
|
||||||
|
nodes = set(specs)
|
||||||
|
|
||||||
def skip_node(current_node):
|
# Limit to installed non-externals.
|
||||||
if current_node.external or current_node.virtual:
|
packageable = lambda n: not n.external and n.installed
|
||||||
return True
|
|
||||||
return spack.store.db.query_one(current_node) is None
|
|
||||||
|
|
||||||
expanded_set = set()
|
# Mass install check
|
||||||
for current_spec in specs:
|
with spack.store.db.read_transaction():
|
||||||
if not include_dependencies:
|
return list(filter(packageable, nodes))
|
||||||
nodes = [current_spec]
|
|
||||||
else:
|
|
||||||
nodes = [
|
|
||||||
n
|
|
||||||
for n in current_spec.traverse(
|
|
||||||
order="post", root=include_root, deptype=("link", "run")
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
for node in nodes:
|
|
||||||
if not skip_node(node):
|
|
||||||
expanded_set.add(node)
|
|
||||||
|
|
||||||
return expanded_set
|
|
||||||
|
|
||||||
|
|
||||||
def push(specs, push_url, specs_kwargs=None, **kwargs):
|
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
|
||||||
"""Create a binary package for each of the specs passed as input and push them
|
"""Create a binary package for each of the specs passed as input and push them
|
||||||
to a given push URL.
|
to a given push URL.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
specs (List[spack.spec.Spec]): installed specs to be packaged
|
specs (List[spack.spec.Spec]): installed specs to be packaged
|
||||||
push_url (str): url where to push the binary package
|
push_url (str): url where to push the binary package
|
||||||
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
|
include_root (bool): include the root of each spec in the nodes
|
||||||
and "include_dependencies", which determine which part of each spec is
|
include_dependencies (bool): include the dependencies of each
|
||||||
packaged and pushed to the mirror
|
spec in the nodes
|
||||||
**kwargs: TODO
|
**kwargs: TODO
|
||||||
|
|
||||||
"""
|
"""
|
||||||
specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
|
# Be explicit about the arugment type
|
||||||
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
|
if type(include_root) != bool or type(include_dependencies) != bool:
|
||||||
|
raise ValueError("Expected include_root/include_dependencies to be True/False")
|
||||||
|
|
||||||
|
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
|
||||||
|
|
||||||
# TODO: This seems to be an easy target for task
|
# TODO: This seems to be an easy target for task
|
||||||
# TODO: distribution using a parallel pool
|
# TODO: distribution using a parallel pool
|
||||||
@@ -1535,13 +1585,12 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def make_package_relative(workdir, spec, allow_root):
|
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||||
"""
|
"""
|
||||||
Change paths in binaries to relative paths. Change absolute symlinks
|
Change paths in binaries to relative paths. Change absolute symlinks
|
||||||
to relative symlinks.
|
to relative symlinks.
|
||||||
"""
|
"""
|
||||||
prefix = spec.prefix
|
prefix = spec.prefix
|
||||||
buildinfo = read_buildinfo_file(workdir)
|
|
||||||
old_layout_root = buildinfo["buildpath"]
|
old_layout_root = buildinfo["buildpath"]
|
||||||
orig_path_names = list()
|
orig_path_names = list()
|
||||||
cur_path_names = list()
|
cur_path_names = list()
|
||||||
@@ -1565,16 +1614,10 @@ def make_package_relative(workdir, spec, allow_root):
|
|||||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||||
|
|
||||||
|
|
||||||
def check_package_relocatable(workdir, spec, allow_root):
|
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||||
"""
|
"""Check if package binaries are relocatable."""
|
||||||
Check if package binaries are relocatable.
|
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||||
Change links to placeholder links.
|
relocate.ensure_binaries_are_relocatable(binaries)
|
||||||
"""
|
|
||||||
buildinfo = read_buildinfo_file(workdir)
|
|
||||||
cur_path_names = list()
|
|
||||||
for filename in buildinfo["relocate_binaries"]:
|
|
||||||
cur_path_names.append(os.path.join(workdir, filename))
|
|
||||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
|
||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
@@ -1730,16 +1773,16 @@ def is_backup_file(file):
|
|||||||
|
|
||||||
# For all buildcaches
|
# For all buildcaches
|
||||||
# relocate the install prefixes in text files including dependencies
|
# relocate the install prefixes in text files including dependencies
|
||||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
# relocate the install prefixes in binary files including dependencies
|
# relocate the install prefixes in binary files including dependencies
|
||||||
relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# If we are installing back to the same location
|
||||||
# relocate the sbang location if the spack directory changed
|
# relocate the sbang location if the spack directory changed
|
||||||
else:
|
else:
|
||||||
if old_spack_prefix != new_spack_prefix:
|
if old_spack_prefix != new_spack_prefix:
|
||||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||||
@@ -1777,14 +1820,15 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
|||||||
raise UnsignedPackageException(
|
raise UnsignedPackageException(
|
||||||
"To install unsigned packages, use the --no-check-signature option."
|
"To install unsigned packages, use the --no-check-signature option."
|
||||||
)
|
)
|
||||||
# get the sha256 checksum of the tarball
|
|
||||||
|
# compute the sha256 checksum of the tarball
|
||||||
local_checksum = checksum_tarball(tarfile_path)
|
local_checksum = checksum_tarball(tarfile_path)
|
||||||
|
expected = remote_checksum["hash"]
|
||||||
|
|
||||||
# if the checksums don't match don't install
|
# if the checksums don't match don't install
|
||||||
if local_checksum != remote_checksum["hash"]:
|
if local_checksum != expected:
|
||||||
raise NoChecksumException(
|
size, contents = fsys.filesummary(tarfile_path)
|
||||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum)
|
||||||
)
|
|
||||||
|
|
||||||
return tarfile_path
|
return tarfile_path
|
||||||
|
|
||||||
@@ -1842,12 +1886,14 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
|||||||
|
|
||||||
# compute the sha256 checksum of the tarball
|
# compute the sha256 checksum of the tarball
|
||||||
local_checksum = checksum_tarball(tarfile_path)
|
local_checksum = checksum_tarball(tarfile_path)
|
||||||
|
expected = bchecksum["hash"]
|
||||||
|
|
||||||
# if the checksums don't match don't install
|
# if the checksums don't match don't install
|
||||||
if local_checksum != bchecksum["hash"]:
|
if local_checksum != expected:
|
||||||
|
size, contents = fsys.filesummary(tarfile_path)
|
||||||
_delete_staged_downloads(download_result)
|
_delete_staged_downloads(download_result)
|
||||||
raise NoChecksumException(
|
raise NoChecksumException(
|
||||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||||
)
|
)
|
||||||
|
|
||||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||||
@@ -1938,8 +1984,11 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
|||||||
tarball_path = download_result["tarball_stage"].save_filename
|
tarball_path = download_result["tarball_stage"].save_filename
|
||||||
msg = msg.format(tarball_path, sha256)
|
msg = msg.format(tarball_path, sha256)
|
||||||
if not checker.check(tarball_path):
|
if not checker.check(tarball_path):
|
||||||
|
size, contents = fsys.filesummary(tarball_path)
|
||||||
_delete_staged_downloads(download_result)
|
_delete_staged_downloads(download_result)
|
||||||
raise spack.binary_distribution.NoChecksumException(msg)
|
raise NoChecksumException(
|
||||||
|
tarball_path, size, contents, checker.hash_name, sha256, checker.sum
|
||||||
|
)
|
||||||
tty.debug("Verified SHA256 checksum of the build cache")
|
tty.debug("Verified SHA256 checksum of the build cache")
|
||||||
|
|
||||||
# don't print long padded paths while extracting/relocating binaries
|
# don't print long padded paths while extracting/relocating binaries
|
||||||
@@ -2013,12 +2062,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
fetched_spec = Spec.from_json(specfile_contents)
|
fetched_spec = Spec.from_json(specfile_contents)
|
||||||
fetched_spec._mark_concrete()
|
fetched_spec._mark_concrete()
|
||||||
|
|
||||||
found_specs.append(
|
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||||
{
|
|
||||||
"mirror_url": mirror.fetch_url,
|
|
||||||
"spec": fetched_spec,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return found_specs
|
return found_specs
|
||||||
|
|
||||||
@@ -2320,11 +2364,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
|||||||
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
||||||
|
|
||||||
files_to_fetch = [
|
files_to_fetch = [
|
||||||
{
|
{"url": [tarball_path_name], "path": local_tarball_path, "required": True},
|
||||||
"url": [tarball_path_name],
|
|
||||||
"path": local_tarball_path,
|
|
||||||
"required": True,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"url": [
|
"url": [
|
||||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||||
@@ -2445,12 +2485,7 @@ def conditional_fetch(self):
|
|||||||
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
||||||
)
|
)
|
||||||
|
|
||||||
return FetchIndexResult(
|
return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
|
||||||
etag=etag,
|
|
||||||
hash=computed_hash,
|
|
||||||
data=result,
|
|
||||||
fresh=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EtagIndexFetcher:
|
class EtagIndexFetcher:
|
||||||
|
|||||||
@@ -5,11 +5,7 @@
|
|||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||||
from .core import (
|
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||||
all_core_root_specs,
|
|
||||||
ensure_core_dependencies,
|
|
||||||
ensure_patchelf_in_path_or_raise,
|
|
||||||
)
|
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
|
|
||||||
|
|||||||
@@ -59,10 +59,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
|||||||
# to be picked up and used, possibly depending on something in the store, first
|
# to be picked up and used, possibly depending on something in the store, first
|
||||||
# allows the bootstrap version to work when an incompatible version is in
|
# allows the bootstrap version to work when an incompatible version is in
|
||||||
# sys.path
|
# sys.path
|
||||||
orders = [
|
orders = [module_paths + sys.path, sys.path + module_paths]
|
||||||
module_paths + sys.path,
|
|
||||||
sys.path + module_paths,
|
|
||||||
]
|
|
||||||
for path in orders:
|
for path in orders:
|
||||||
sys.path = path
|
sys.path = path
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -53,12 +53,7 @@
|
|||||||
import spack.util.url
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
|
|
||||||
from ._common import (
|
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||||
_executables_in_store,
|
|
||||||
_python_import,
|
|
||||||
_root_spec,
|
|
||||||
_try_import_from_store,
|
|
||||||
)
|
|
||||||
from .config import spack_python_interpreter, spec_for_current_python
|
from .config import spack_python_interpreter, spec_for_current_python
|
||||||
|
|
||||||
#: Name of the file containing metadata about the bootstrapping source
|
#: Name of the file containing metadata about the bootstrapping source
|
||||||
@@ -213,7 +208,7 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
|
|||||||
# This will be None for things that don't depend on python
|
# This will be None for things that don't depend on python
|
||||||
python_spec = item.get("python", None)
|
python_spec = item.get("python", None)
|
||||||
# Skip specs which are not compatible
|
# Skip specs which are not compatible
|
||||||
if not abstract_spec.satisfies(candidate_spec):
|
if not abstract_spec.intersects(candidate_spec):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if python_spec is not None and python_spec not in abstract_spec:
|
if python_spec is not None and python_spec not in abstract_spec:
|
||||||
|
|||||||
@@ -171,7 +171,7 @@ def mypy_root_spec():
|
|||||||
|
|
||||||
def black_root_spec():
|
def black_root_spec():
|
||||||
"""Return the root spec used to bootstrap black"""
|
"""Return the root spec used to bootstrap black"""
|
||||||
return _root_spec("py-black")
|
return _root_spec("py-black@:23.1.0")
|
||||||
|
|
||||||
|
|
||||||
def flake8_root_spec():
|
def flake8_root_spec():
|
||||||
|
|||||||
@@ -69,13 +69,13 @@
|
|||||||
from spack.installer import InstallError
|
from spack.installer import InstallError
|
||||||
from spack.util.cpus import cpus_available
|
from spack.util.cpus import cpus_available
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
|
SYSTEM_DIRS,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
env_flag,
|
env_flag,
|
||||||
filter_system_paths,
|
filter_system_paths,
|
||||||
get_path,
|
get_path,
|
||||||
inspect_path,
|
inspect_path,
|
||||||
is_system_path,
|
is_system_path,
|
||||||
system_dirs,
|
|
||||||
validate,
|
validate,
|
||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
@@ -397,7 +397,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
|
|
||||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||||
|
|
||||||
env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
|
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||||
|
|
||||||
compiler.setup_custom_environment(pkg, env)
|
compiler.setup_custom_environment(pkg, env)
|
||||||
|
|
||||||
@@ -423,6 +423,14 @@ def set_wrapper_variables(pkg, env):
|
|||||||
compiler = pkg.compiler
|
compiler = pkg.compiler
|
||||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
env.extend(spack.schema.environment.parse(compiler.environment))
|
||||||
|
|
||||||
|
# Before setting up PATH to Spack compiler wrappers, make sure compiler is in PATH
|
||||||
|
# This ensures that non-wrapped executables from the compiler bin directory are available
|
||||||
|
bindirs = dedupe(
|
||||||
|
[os.path.dirname(c) for c in [compiler.cc, compiler.cxx, compiler.fc, compiler.f77]]
|
||||||
|
)
|
||||||
|
for bindir in bindirs:
|
||||||
|
env.prepend_path("PATH", bindir)
|
||||||
|
|
||||||
if compiler.extra_rpaths:
|
if compiler.extra_rpaths:
|
||||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||||
@@ -485,7 +493,13 @@ def update_compiler_args_for_dep(dep):
|
|||||||
query = pkg.spec[dep.name]
|
query = pkg.spec[dep.name]
|
||||||
dep_link_dirs = list()
|
dep_link_dirs = list()
|
||||||
try:
|
try:
|
||||||
|
# In some circumstances (particularly for externals) finding
|
||||||
|
# libraries packages can be time consuming, so indicate that
|
||||||
|
# we are performing this operation (and also report when it
|
||||||
|
# finishes).
|
||||||
|
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||||
dep_link_dirs.extend(query.libs.directories)
|
dep_link_dirs.extend(query.libs.directories)
|
||||||
|
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||||
except NoLibrariesError:
|
except NoLibrariesError:
|
||||||
tty.debug("No libraries found for {0}".format(dep.name))
|
tty.debug("No libraries found for {0}".format(dep.name))
|
||||||
|
|
||||||
@@ -772,7 +786,9 @@ def setup_package(pkg, dirty, context="build"):
|
|||||||
set_compiler_environment_variables(pkg, env_mods)
|
set_compiler_environment_variables(pkg, env_mods)
|
||||||
set_wrapper_variables(pkg, env_mods)
|
set_wrapper_variables(pkg, env_mods)
|
||||||
|
|
||||||
|
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||||
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
|
||||||
|
tty.debug("setup_package: collected all modifications from dependencies")
|
||||||
|
|
||||||
# architecture specific setup
|
# architecture specific setup
|
||||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||||
@@ -780,6 +796,7 @@ def setup_package(pkg, dirty, context="build"):
|
|||||||
platform.setup_platform_environment(pkg, env_mods)
|
platform.setup_platform_environment(pkg, env_mods)
|
||||||
|
|
||||||
if context == "build":
|
if context == "build":
|
||||||
|
tty.debug("setup_package: setup build environment for root")
|
||||||
builder = spack.builder.create(pkg)
|
builder = spack.builder.create(pkg)
|
||||||
builder.setup_build_environment(env_mods)
|
builder.setup_build_environment(env_mods)
|
||||||
|
|
||||||
@@ -790,6 +807,7 @@ def setup_package(pkg, dirty, context="build"):
|
|||||||
" includes and omit it when invoked with '--cflags'."
|
" includes and omit it when invoked with '--cflags'."
|
||||||
)
|
)
|
||||||
elif context == "test":
|
elif context == "test":
|
||||||
|
tty.debug("setup_package: setup test environment for root")
|
||||||
env_mods.extend(
|
env_mods.extend(
|
||||||
inspect_path(
|
inspect_path(
|
||||||
pkg.spec.prefix,
|
pkg.spec.prefix,
|
||||||
@@ -806,6 +824,7 @@ def setup_package(pkg, dirty, context="build"):
|
|||||||
# Load modules on an already clean environment, just before applying Spack's
|
# Load modules on an already clean environment, just before applying Spack's
|
||||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||||
if need_compiler:
|
if need_compiler:
|
||||||
|
tty.debug("setup_package: loading compiler modules")
|
||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
@@ -943,6 +962,7 @@ def default_modifications_for_dep(dep):
|
|||||||
_make_runnable(dep, env)
|
_make_runnable(dep, env)
|
||||||
|
|
||||||
def add_modifications_for_dep(dep):
|
def add_modifications_for_dep(dep):
|
||||||
|
tty.debug("Adding env modifications for {0}".format(dep.name))
|
||||||
# Some callers of this function only want the custom modifications.
|
# Some callers of this function only want the custom modifications.
|
||||||
# For callers that want both custom and default modifications, we want
|
# For callers that want both custom and default modifications, we want
|
||||||
# to perform the default modifications here (this groups custom
|
# to perform the default modifications here (this groups custom
|
||||||
@@ -968,6 +988,7 @@ def add_modifications_for_dep(dep):
|
|||||||
builder.setup_dependent_build_environment(env, spec)
|
builder.setup_dependent_build_environment(env, spec)
|
||||||
else:
|
else:
|
||||||
dpkg.setup_dependent_run_environment(env, spec)
|
dpkg.setup_dependent_run_environment(env, spec)
|
||||||
|
tty.debug("Added env modifications for {0}".format(dep.name))
|
||||||
|
|
||||||
# Note that we want to perform environment modifications in a fixed order.
|
# Note that we want to perform environment modifications in a fixed order.
|
||||||
# The Spec.traverse method provides this: i.e. in addition to
|
# The Spec.traverse method provides this: i.e. in addition to
|
||||||
@@ -1016,7 +1037,6 @@ def get_cmake_prefix_path(pkg):
|
|||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||||
):
|
):
|
||||||
|
|
||||||
context = kwargs.get("context", "build")
|
context = kwargs.get("context", "build")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -110,11 +110,7 @@ class AutotoolsBuilder(BaseBuilder):
|
|||||||
phases = ("autoreconf", "configure", "build", "install")
|
phases = ("autoreconf", "configure", "build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = (
|
legacy_methods = ("configure_args", "check", "installcheck")
|
||||||
"configure_args",
|
|
||||||
"check",
|
|
||||||
"installcheck",
|
|
||||||
)
|
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = (
|
legacy_attributes = (
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ def cmake_cache_option(name, boolean_value, comment=""):
|
|||||||
|
|
||||||
|
|
||||||
class CachedCMakeBuilder(CMakeBuilder):
|
class CachedCMakeBuilder(CMakeBuilder):
|
||||||
|
|
||||||
#: Phases of a Cached CMake package
|
#: Phases of a Cached CMake package
|
||||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||||
|
|||||||
@@ -252,10 +252,7 @@ def std_args(pkg, generator=None):
|
|||||||
|
|
||||||
if platform.mac_ver()[0]:
|
if platform.mac_ver()[0]:
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||||
define("CMAKE_FIND_FRAMEWORK", "LAST"),
|
|
||||||
define("CMAKE_FIND_APPBUNDLE", "LAST"),
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set up CMake rpath
|
# Set up CMake rpath
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
|
|
||||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups
|
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class Package(spack.package_base.PackageBase):
|
class Package(spack.package_base.PackageBase):
|
||||||
@@ -38,7 +38,13 @@ class GenericBuilder(BaseBuilder):
|
|||||||
legacy_methods: Tuple[str, ...] = ()
|
legacy_methods: Tuple[str, ...] = ()
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
|
||||||
|
|
||||||
|
#: Callback names for post-install phase tests
|
||||||
|
install_time_test_callbacks = []
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|
||||||
|
# unconditionally perform any post-install phase tests
|
||||||
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -857,10 +857,7 @@ def scalapack_libs(self):
|
|||||||
raise_lib_error("Cannot find a BLACS library for the given MPI.")
|
raise_lib_error("Cannot find a BLACS library for the given MPI.")
|
||||||
|
|
||||||
int_suff = "_" + self.intel64_int_suffix
|
int_suff = "_" + self.intel64_int_suffix
|
||||||
scalapack_libnames = [
|
scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
|
||||||
"libmkl_scalapack" + int_suff,
|
|
||||||
blacs_lib + int_suff,
|
|
||||||
]
|
|
||||||
sca_libs = find_libraries(
|
sca_libs = find_libraries(
|
||||||
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
|
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
|
||||||
)
|
)
|
||||||
@@ -1161,9 +1158,7 @@ def _determine_license_type(self):
|
|||||||
#
|
#
|
||||||
# Ideally, we just tell the installer to look around on the system.
|
# Ideally, we just tell the installer to look around on the system.
|
||||||
# Thankfully, we neither need to care nor emulate where it looks:
|
# Thankfully, we neither need to care nor emulate where it looks:
|
||||||
license_type = {
|
license_type = {"ACTIVATION_TYPE": "exist_lic"}
|
||||||
"ACTIVATION_TYPE": "exist_lic",
|
|
||||||
}
|
|
||||||
|
|
||||||
# However (and only), if the spack-internal Intel license file has been
|
# However (and only), if the spack-internal Intel license file has been
|
||||||
# populated beyond its templated explanatory comments, proffer it to
|
# populated beyond its templated explanatory comments, proffer it to
|
||||||
|
|||||||
@@ -68,10 +68,7 @@ def unpack(self, pkg, spec, prefix):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _generate_tree_line(name, prefix):
|
def _generate_tree_line(name, prefix):
|
||||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||||
name=name,
|
|
||||||
prefix=prefix,
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||||
spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
|
|||||||
@@ -120,6 +120,7 @@ def std_meson_args(self):
|
|||||||
of package writers.
|
of package writers.
|
||||||
"""
|
"""
|
||||||
# standard Meson arguments
|
# standard Meson arguments
|
||||||
|
|
||||||
std_meson_args = MesonBuilder.std_args(self.pkg)
|
std_meson_args = MesonBuilder.std_args(self.pkg)
|
||||||
std_meson_args += getattr(self, "meson_flag_args", [])
|
std_meson_args += getattr(self, "meson_flag_args", [])
|
||||||
return std_meson_args
|
return std_meson_args
|
||||||
@@ -182,7 +183,10 @@ def meson_args(self):
|
|||||||
|
|
||||||
def meson(self, pkg, spec, prefix):
|
def meson(self, pkg, spec, prefix):
|
||||||
"""Run ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = [os.path.abspath(self.root_mesonlists_dir)]
|
options = []
|
||||||
|
if self.spec["meson"].satisfies("@0.64:"):
|
||||||
|
options.append("setup")
|
||||||
|
options.append(os.path.abspath(self.root_mesonlists_dir))
|
||||||
options += self.std_meson_args
|
options += self.std_meson_args
|
||||||
options += self.meson_args()
|
options += self.meson_args()
|
||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ def toolchain_version(self):
|
|||||||
Override this method to select a specific version of the toolchain or change
|
Override this method to select a specific version of the toolchain or change
|
||||||
selection heuristics.
|
selection heuristics.
|
||||||
Default is whatever version of msvc has been selected by concretization"""
|
Default is whatever version of msvc has been selected by concretization"""
|
||||||
return self.compiler.msvc_version
|
return "v" + self.pkg.compiler.platform_toolset_ver
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_msbuild_args(self):
|
def std_msbuild_args(self):
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ def makefile_root(self):
|
|||||||
This path is relative to the root of the extracted tarball,
|
This path is relative to the root of the extracted tarball,
|
||||||
not to the ``build_directory``. Defaults to the current directory.
|
not to the ``build_directory``. Defaults to the current directory.
|
||||||
"""
|
"""
|
||||||
return self.stage.source_dir
|
return self.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def nmakefile_name(self):
|
def nmakefile_name(self):
|
||||||
|
|||||||
@@ -37,11 +37,7 @@ class IntelOneApiPackage(Package):
|
|||||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||||
|
|
||||||
# Add variant to toggle environment modifications from vars.sh
|
# Add variant to toggle environment modifications from vars.sh
|
||||||
variant(
|
variant("envmods", default=True, description="Toggles environment modifications")
|
||||||
"envmods",
|
|
||||||
default=True,
|
|
||||||
description="Toggles environment modifications",
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def update_description(cls):
|
def update_description(cls):
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends, maintainers
|
||||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@
|
|||||||
|
|
||||||
|
|
||||||
class PythonExtension(spack.package_base.PackageBase):
|
class PythonExtension(spack.package_base.PackageBase):
|
||||||
maintainers = ["adamjstewart"]
|
maintainers("adamjstewart", "pradyunsg")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -113,6 +113,9 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
return conflicts
|
return conflicts
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
|
if not self.extendee_spec:
|
||||||
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
python_prefix = self.extendee_spec.prefix
|
python_prefix = self.extendee_spec.prefix
|
||||||
python_is_external = self.extendee_spec.external
|
python_is_external = self.extendee_spec.external
|
||||||
@@ -184,8 +187,6 @@ class PythonPackage(PythonExtension):
|
|||||||
#: Package name, version, and extension on PyPI
|
#: Package name, version, and extension on PyPI
|
||||||
pypi: Optional[str] = None
|
pypi: Optional[str] = None
|
||||||
|
|
||||||
maintainers = ["adamjstewart", "pradyunsg"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "PythonPackage"
|
build_system_class = "PythonPackage"
|
||||||
@@ -267,7 +268,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
|
|
||||||
python.external_path = self.spec.external_path
|
python.external_path = self.spec.external_path
|
||||||
python._mark_concrete()
|
python._mark_concrete()
|
||||||
self.spec.add_dependency_edge(python, ("build", "link", "run"))
|
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||||
|
|
||||||
def get_external_python_for_prefix(self):
|
def get_external_python_for_prefix(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends, maintainers
|
||||||
|
|
||||||
from .generic import GenericBuilder, Package
|
from .generic import GenericBuilder, Package
|
||||||
|
|
||||||
@@ -71,7 +71,7 @@ class RPackage(Package):
|
|||||||
|
|
||||||
GenericBuilder = RBuilder
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
maintainers = ["glennpj"]
|
maintainers("glennpj")
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
@@ -23,7 +23,7 @@ class RacketPackage(PackageBase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
#: Package name, version, and extension on PyPI
|
#: Package name, version, and extension on PyPI
|
||||||
maintainers = ["elfprince13"]
|
maintainers("elfprince13")
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "RacketPackage"
|
build_system_class = "RacketPackage"
|
||||||
|
|||||||
@@ -138,7 +138,7 @@ class ROCmPackage(PackageBase):
|
|||||||
|
|
||||||
depends_on("llvm-amdgpu", when="+rocm")
|
depends_on("llvm-amdgpu", when="+rocm")
|
||||||
depends_on("hsa-rocr-dev", when="+rocm")
|
depends_on("hsa-rocr-dev", when="+rocm")
|
||||||
depends_on("hip", when="+rocm")
|
depends_on("hip +rocm", when="+rocm")
|
||||||
|
|
||||||
conflicts("^blt@:0.3.6", when="+rocm")
|
conflicts("^blt@:0.3.6", when="+rocm")
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends, maintainers
|
||||||
|
|
||||||
from ._checks import BaseBuilder
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@
|
|||||||
class RubyPackage(spack.package_base.PackageBase):
|
class RubyPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for building Ruby gems."""
|
"""Specialized class for building Ruby gems."""
|
||||||
|
|
||||||
maintainers = ["Kerilk"]
|
maintainers("Kerilk")
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
|
|||||||
@@ -61,10 +61,7 @@ def import_modules(self):
|
|||||||
list: list of strings of module names
|
list: list of strings of module names
|
||||||
"""
|
"""
|
||||||
modules = []
|
modules = []
|
||||||
root = os.path.join(
|
root = os.path.join(self.prefix, self.spec["python"].package.platlib)
|
||||||
self.prefix,
|
|
||||||
self.spec["python"].package.platlib,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Some Python libraries are packages: collections of modules
|
# Some Python libraries are packages: collections of modules
|
||||||
# distributed in directories containing __init__.py files
|
# distributed in directories containing __init__.py files
|
||||||
|
|||||||
@@ -42,9 +42,7 @@
|
|||||||
from spack.reporters import CDash, CDashConfiguration
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
JOB_RETRY_CONDITIONS = [
|
JOB_RETRY_CONDITIONS = ["always"]
|
||||||
"always",
|
|
||||||
]
|
|
||||||
|
|
||||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
@@ -129,10 +127,7 @@ def _remove_reserved_tags(tags):
|
|||||||
|
|
||||||
|
|
||||||
def _get_spec_string(spec):
|
def _get_spec_string(spec):
|
||||||
format_elements = [
|
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||||
"{name}{@version}",
|
|
||||||
"{%compiler}",
|
|
||||||
]
|
|
||||||
|
|
||||||
if spec.architecture:
|
if spec.architecture:
|
||||||
format_elements.append(" {arch=architecture}")
|
format_elements.append(" {arch=architecture}")
|
||||||
@@ -328,12 +323,7 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
|
|||||||
dependencies = []
|
dependencies = []
|
||||||
|
|
||||||
def append_dep(s, d):
|
def append_dep(s, d):
|
||||||
dependencies.append(
|
dependencies.append({"spec": s, "depends": d})
|
||||||
{
|
|
||||||
"spec": s,
|
|
||||||
"depends": d,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for spec in spec_list:
|
for spec in spec_list:
|
||||||
for s in spec.traverse(deptype=all):
|
for s in spec.traverse(deptype=all):
|
||||||
@@ -346,10 +336,7 @@ def append_dep(s, d):
|
|||||||
)
|
)
|
||||||
|
|
||||||
skey = _spec_deps_key(s)
|
skey = _spec_deps_key(s)
|
||||||
spec_labels[skey] = {
|
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||||
"spec": s,
|
|
||||||
"needs_rebuild": not up_to_date_mirrors,
|
|
||||||
}
|
|
||||||
|
|
||||||
for d in s.dependencies(deptype=all):
|
for d in s.dependencies(deptype=all):
|
||||||
dkey = _spec_deps_key(d)
|
dkey = _spec_deps_key(d)
|
||||||
@@ -368,16 +355,13 @@ def append_dep(s, d):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
deps_json_obj = {
|
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||||
"specs": specs,
|
|
||||||
"dependencies": dependencies,
|
|
||||||
}
|
|
||||||
|
|
||||||
return deps_json_obj
|
return deps_json_obj
|
||||||
|
|
||||||
|
|
||||||
def _spec_matches(spec, match_string):
|
def _spec_matches(spec, match_string):
|
||||||
return spec.satisfies(match_string)
|
return spec.intersects(match_string)
|
||||||
|
|
||||||
|
|
||||||
def _remove_attributes(src_dict, dest_dict):
|
def _remove_attributes(src_dict, dest_dict):
|
||||||
@@ -410,14 +394,7 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
|
|||||||
|
|
||||||
def _find_matching_config(spec, gitlab_ci):
|
def _find_matching_config(spec, gitlab_ci):
|
||||||
runner_attributes = {}
|
runner_attributes = {}
|
||||||
overridable_attrs = [
|
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
|
||||||
"image",
|
|
||||||
"tags",
|
|
||||||
"variables",
|
|
||||||
"before_script",
|
|
||||||
"script",
|
|
||||||
"after_script",
|
|
||||||
]
|
|
||||||
|
|
||||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||||
|
|
||||||
@@ -513,16 +490,28 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
|||||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_filter_list(env, affected_pkgs):
|
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
|
||||||
"""Given a list of package names and an active/concretized
|
"""Given a list of package names and an active/concretized
|
||||||
environment, return the set of all concrete specs from the
|
environment, return the set of all concrete specs from the
|
||||||
environment that could have been affected by changing the
|
environment that could have been affected by changing the
|
||||||
list of packages.
|
list of packages.
|
||||||
|
|
||||||
|
If a ``dependent_traverse_depth`` is given, it is used to limit
|
||||||
|
upward (in the parent direction) traversal of specs of touched
|
||||||
|
packages. E.g. if 1 is provided, then only direct dependents
|
||||||
|
of touched package specs are traversed to produce specs that
|
||||||
|
could have been affected by changing the package, while if 0 is
|
||||||
|
provided, only the changed specs themselves are traversed. If ``None``
|
||||||
|
is given, upward traversal of touched package specs is done all
|
||||||
|
the way to the environment roots. Providing a negative number
|
||||||
|
results in no traversals at all, yielding an empty set.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
env (spack.environment.Environment): Active concrete environment
|
env (spack.environment.Environment): Active concrete environment
|
||||||
affected_pkgs (List[str]): Affected package names
|
affected_pkgs (List[str]): Affected package names
|
||||||
|
dependent_traverse_depth: Optional integer to limit dependent
|
||||||
|
traversal, or None to disable the limit.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
@@ -539,10 +528,11 @@ def get_spec_filter_list(env, affected_pkgs):
|
|||||||
visited = set()
|
visited = set()
|
||||||
dag_hash = lambda s: s.dag_hash()
|
dag_hash = lambda s: s.dag_hash()
|
||||||
for match in env_matches:
|
for match in env_matches:
|
||||||
for parent in match.traverse(direction="parents", key=dag_hash):
|
for dep_level, parent in match.traverse(direction="parents", key=dag_hash, depth=True):
|
||||||
affected_specs.update(
|
if dependent_traverse_depth is None or dep_level <= dependent_traverse_depth:
|
||||||
parent.traverse(direction="children", visited=visited, key=dag_hash)
|
affected_specs.update(
|
||||||
)
|
parent.traverse(direction="children", visited=visited, key=dag_hash)
|
||||||
|
)
|
||||||
return affected_specs
|
return affected_specs
|
||||||
|
|
||||||
|
|
||||||
@@ -603,6 +593,18 @@ def generate_gitlab_ci_yaml(
|
|||||||
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
|
||||||
build_group = cdash_handler.build_group if cdash_handler else None
|
build_group = cdash_handler.build_group if cdash_handler else None
|
||||||
|
|
||||||
|
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
|
||||||
|
if dependent_depth is not None:
|
||||||
|
try:
|
||||||
|
dependent_depth = int(dependent_depth)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
tty.warn(
|
||||||
|
"Unrecognized value ({0}) ".format(dependent_depth),
|
||||||
|
"provide forSPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH, ",
|
||||||
|
"ignoring it.",
|
||||||
|
)
|
||||||
|
dependent_depth = None
|
||||||
|
|
||||||
prune_untouched_packages = False
|
prune_untouched_packages = False
|
||||||
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
|
||||||
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
|
||||||
@@ -618,7 +620,9 @@ def generate_gitlab_ci_yaml(
|
|||||||
tty.debug("affected pkgs:")
|
tty.debug("affected pkgs:")
|
||||||
for p in affected_pkgs:
|
for p in affected_pkgs:
|
||||||
tty.debug(" {0}".format(p))
|
tty.debug(" {0}".format(p))
|
||||||
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
affected_specs = get_spec_filter_list(
|
||||||
|
env, affected_pkgs, dependent_traverse_depth=dependent_depth
|
||||||
|
)
|
||||||
tty.debug("all affected specs:")
|
tty.debug("all affected specs:")
|
||||||
for s in affected_specs:
|
for s in affected_specs:
|
||||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||||
@@ -685,28 +689,14 @@ def generate_gitlab_ci_yaml(
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
phase_name = phase
|
phase_name = phase
|
||||||
strip_compilers = False
|
strip_compilers = False
|
||||||
phases.append(
|
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||||
{
|
|
||||||
"name": phase_name,
|
|
||||||
"strip-compilers": strip_compilers,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
for bs in env.spec_lists[phase_name]:
|
for bs in env.spec_lists[phase_name]:
|
||||||
bootstrap_specs.append(
|
bootstrap_specs.append(
|
||||||
{
|
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||||
"spec": bs,
|
|
||||||
"phase-name": phase_name,
|
|
||||||
"strip-compilers": strip_compilers,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
phases.append(
|
phases.append({"name": "specs", "strip-compilers": False})
|
||||||
{
|
|
||||||
"name": "specs",
|
|
||||||
"strip-compilers": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# If a remote mirror override (alternate buildcache destination) was
|
# If a remote mirror override (alternate buildcache destination) was
|
||||||
# specified, add it here in case it has already built hashes we might
|
# specified, add it here in case it has already built hashes we might
|
||||||
@@ -975,7 +965,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
bs_arch = c_spec.architecture
|
bs_arch = c_spec.architecture
|
||||||
bs_arch_family = bs_arch.target.microarchitecture.family
|
bs_arch_family = bs_arch.target.microarchitecture.family
|
||||||
if (
|
if (
|
||||||
c_spec.satisfies(compiler_pkg_spec)
|
c_spec.intersects(compiler_pkg_spec)
|
||||||
and bs_arch_family == spec_arch_family
|
and bs_arch_family == spec_arch_family
|
||||||
):
|
):
|
||||||
# We found the bootstrap compiler this release spec
|
# We found the bootstrap compiler this release spec
|
||||||
@@ -1109,15 +1099,9 @@ def generate_gitlab_ci_yaml(
|
|||||||
"variables": variables,
|
"variables": variables,
|
||||||
"script": job_script,
|
"script": job_script,
|
||||||
"tags": tags,
|
"tags": tags,
|
||||||
"artifacts": {
|
"artifacts": {"paths": artifact_paths, "when": "always"},
|
||||||
"paths": artifact_paths,
|
|
||||||
"when": "always",
|
|
||||||
},
|
|
||||||
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
||||||
"retry": {
|
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
|
||||||
"max": 2,
|
|
||||||
"when": JOB_RETRY_CONDITIONS,
|
|
||||||
},
|
|
||||||
"interruptible": True,
|
"interruptible": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1135,10 +1119,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
if image_name:
|
if image_name:
|
||||||
job_object["image"] = image_name
|
job_object["image"] = image_name
|
||||||
if image_entry is not None:
|
if image_entry is not None:
|
||||||
job_object["image"] = {
|
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
|
||||||
"name": image_name,
|
|
||||||
"entrypoint": image_entry,
|
|
||||||
}
|
|
||||||
|
|
||||||
output_object[job_name] = job_object
|
output_object[job_name] = job_object
|
||||||
job_id += 1
|
job_id += 1
|
||||||
@@ -1181,11 +1162,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
|
|
||||||
service_job_retries = {
|
service_job_retries = {
|
||||||
"max": 2,
|
"max": 2,
|
||||||
"when": [
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
"runner_system_failure",
|
|
||||||
"stuck_or_timeout_failure",
|
|
||||||
"script_failure",
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if job_id > 0:
|
if job_id > 0:
|
||||||
@@ -1357,9 +1334,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
_copy_attributes(default_attrs, service_job_config, noop_job)
|
_copy_attributes(default_attrs, service_job_config, noop_job)
|
||||||
|
|
||||||
if "script" not in noop_job:
|
if "script" not in noop_job:
|
||||||
noop_job["script"] = [
|
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
|
||||||
'echo "All specs already up to date, nothing to rebuild."',
|
|
||||||
]
|
|
||||||
|
|
||||||
noop_job["retry"] = service_job_retries
|
noop_job["retry"] = service_job_retries
|
||||||
|
|
||||||
@@ -1489,9 +1464,8 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
|||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
|
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
|
||||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||||
spec_kwargs = {"include_root": True, "include_dependencies": False}
|
|
||||||
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
|
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
|
||||||
bindist.push(matches, push_url, spec_kwargs, **kwargs)
|
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||||
@@ -1554,10 +1528,7 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
|||||||
try:
|
try:
|
||||||
fs.copy(src, artifacts_dir)
|
fs.copy(src, artifacts_dir)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
tty.warn(f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to: {err}")
|
||||||
src, artifacts_dir, str(err)
|
|
||||||
)
|
|
||||||
tty.error(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||||
@@ -1620,9 +1591,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
"""
|
"""
|
||||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
||||||
|
|
||||||
headers = {
|
headers = {"Content-Type": "application/zip"}
|
||||||
"Content-Type": "application/zip",
|
|
||||||
}
|
|
||||||
|
|
||||||
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
|
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
|
||||||
if token:
|
if token:
|
||||||
@@ -2081,10 +2050,7 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
|||||||
with open(file_path, "w") as fd:
|
with open(file_path, "w") as fd:
|
||||||
fd.write(syaml.dump(broken_spec_details))
|
fd.write(syaml.dump(broken_spec_details))
|
||||||
web_util.push_to_url(
|
web_util.push_to_url(
|
||||||
file_path,
|
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||||
url,
|
|
||||||
keep_original=False,
|
|
||||||
extra_args={"ContentType": "text/plain"},
|
|
||||||
)
|
)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
# If there is an S3 error (e.g., access denied or connection
|
# If there is an S3 error (e.g., access denied or connection
|
||||||
@@ -2162,14 +2128,7 @@ def run_standalone_tests(**kwargs):
|
|||||||
tty.error("Reproduction directory is required for stand-alone tests")
|
tty.error("Reproduction directory is required for stand-alone tests")
|
||||||
return
|
return
|
||||||
|
|
||||||
test_args = [
|
test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
|
||||||
"spack",
|
|
||||||
"--color=always",
|
|
||||||
"--backtrace",
|
|
||||||
"--verbose",
|
|
||||||
"test",
|
|
||||||
"run",
|
|
||||||
]
|
|
||||||
if fail_fast:
|
if fail_fast:
|
||||||
test_args.append("--fail-fast")
|
test_args.append("--fail-fast")
|
||||||
|
|
||||||
@@ -2319,19 +2278,9 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
opener = build_opener(HTTPHandler)
|
||||||
|
|
||||||
parent_group_id = self.create_buildgroup(
|
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||||
opener,
|
|
||||||
headers,
|
|
||||||
url,
|
|
||||||
self.build_group,
|
|
||||||
"Daily",
|
|
||||||
)
|
|
||||||
group_id = self.create_buildgroup(
|
group_id = self.create_buildgroup(
|
||||||
opener,
|
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
||||||
headers,
|
|
||||||
url,
|
|
||||||
"Latest {0}".format(self.build_group),
|
|
||||||
"Latest",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
@@ -2341,13 +2290,9 @@ def populate_buildgroup(self, job_names):
|
|||||||
|
|
||||||
data = {
|
data = {
|
||||||
"dynamiclist": [
|
"dynamiclist": [
|
||||||
{
|
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||||
"match": name,
|
|
||||||
"parentgroupid": parent_group_id,
|
|
||||||
"site": self.site,
|
|
||||||
}
|
|
||||||
for name in job_names
|
for name in job_names
|
||||||
],
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|||||||
@@ -43,7 +43,6 @@ def matches(obj, proto):
|
|||||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||||
|
|
||||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
|
||||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -161,9 +161,7 @@ class _UnquotedFlags(object):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
flags_arg_pattern = re.compile(
|
flags_arg_pattern = re.compile(
|
||||||
r'^({0})=([^\'"].*)$'.format(
|
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||||
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||||
@@ -227,7 +225,6 @@ def parse_specs(args, **kwargs):
|
|||||||
return specs
|
return specs
|
||||||
|
|
||||||
except spack.error.SpecError as e:
|
except spack.error.SpecError as e:
|
||||||
|
|
||||||
msg = e.message
|
msg = e.message
|
||||||
if e.long_message:
|
if e.long_message:
|
||||||
msg += e.long_message
|
msg += e.long_message
|
||||||
|
|||||||
@@ -53,7 +53,6 @@ def packages(parser, args):
|
|||||||
|
|
||||||
|
|
||||||
def packages_https(parser, args):
|
def packages_https(parser, args):
|
||||||
|
|
||||||
# Since packages takes a long time, --all is required without name
|
# Since packages takes a long time, --all is required without name
|
||||||
if not args.check_all and not args.name:
|
if not args.check_all and not args.name:
|
||||||
tty.die("Please specify one or more packages to audit, or --all.")
|
tty.die("Please specify one or more packages to audit, or --all.")
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import spack.cmd.common.env_utility as env_utility
|
import spack.cmd.common.env_utility as env_utility
|
||||||
|
|
||||||
description = (
|
description = (
|
||||||
"run a command in a spec's install environment, " "or dump its environment to screen or file"
|
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||||
)
|
)
|
||||||
section = "build"
|
section = "build"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|||||||
@@ -103,9 +103,7 @@ def setup_parser(subparser):
|
|||||||
help="Regenerate buildcache index after building package(s)",
|
help="Regenerate buildcache index after building package(s)",
|
||||||
)
|
)
|
||||||
create.add_argument(
|
create.add_argument(
|
||||||
"--spec-file",
|
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||||
default=None,
|
|
||||||
help="Create buildcache entry for spec from json or yaml file",
|
|
||||||
)
|
)
|
||||||
create.add_argument(
|
create.add_argument(
|
||||||
"--only",
|
"--only",
|
||||||
@@ -402,7 +400,7 @@ def _matching_specs(specs, spec_file):
|
|||||||
return spack.store.find(constraints, hashes=hashes)
|
return spack.store.find(constraints, hashes=hashes)
|
||||||
|
|
||||||
if env:
|
if env:
|
||||||
return [env.specs_by_hash[h] for h in env.concretized_order]
|
return [concrete for _, concrete in env.concretized_specs()]
|
||||||
|
|
||||||
tty.die(
|
tty.die(
|
||||||
"build cache file creation requires at least one"
|
"build cache file creation requires at least one"
|
||||||
@@ -461,10 +459,6 @@ def create_fn(args):
|
|||||||
|
|
||||||
msg = "Pushing binary packages to {0}/build_cache".format(url)
|
msg = "Pushing binary packages to {0}/build_cache".format(url)
|
||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
specs_kwargs = {
|
|
||||||
"include_root": "package" in args.things_to_install,
|
|
||||||
"include_dependencies": "dependencies" in args.things_to_install,
|
|
||||||
}
|
|
||||||
kwargs = {
|
kwargs = {
|
||||||
"key": args.key,
|
"key": args.key,
|
||||||
"force": args.force,
|
"force": args.force,
|
||||||
@@ -473,7 +467,13 @@ def create_fn(args):
|
|||||||
"allow_root": args.allow_root,
|
"allow_root": args.allow_root,
|
||||||
"regenerate_index": args.rebuild_index,
|
"regenerate_index": args.rebuild_index,
|
||||||
}
|
}
|
||||||
bindist.push(matches, url, specs_kwargs, **kwargs)
|
bindist.push(
|
||||||
|
matches,
|
||||||
|
url,
|
||||||
|
include_root="package" in args.things_to_install,
|
||||||
|
include_dependencies="dependencies" in args.things_to_install,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def install_fn(args):
|
def install_fn(args):
|
||||||
@@ -498,11 +498,11 @@ def list_fn(args):
|
|||||||
|
|
||||||
if not args.allarch:
|
if not args.allarch:
|
||||||
arch = spack.spec.Spec.default_arch()
|
arch = spack.spec.Spec.default_arch()
|
||||||
specs = [s for s in specs if s.satisfies(arch)]
|
specs = [s for s in specs if s.intersects(arch)]
|
||||||
|
|
||||||
if args.specs:
|
if args.specs:
|
||||||
constraints = set(args.specs)
|
constraints = set(args.specs)
|
||||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
specs = [s for s in specs if any(s.intersects(c) for c in constraints)]
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
builds = len(specs)
|
builds = len(specs)
|
||||||
tty.msg("%s." % plural(builds, "cached build"))
|
tty.msg("%s." % plural(builds, "cached build"))
|
||||||
|
|||||||
@@ -20,9 +20,7 @@ def setup_parser(subparser):
|
|||||||
help="name of the list to remove specs from",
|
help="name of the list to remove specs from",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--match-spec",
|
"--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
|
||||||
dest="match_spec",
|
|
||||||
help="if name is ambiguous, supply a spec to match",
|
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-a",
|
"-a",
|
||||||
|
|||||||
@@ -530,40 +530,28 @@ def ci_rebuild(args):
|
|||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
cdash_args = []
|
|
||||||
if cdash_handler:
|
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
|
||||||
cdash_args.extend(cdash_handler.args())
|
|
||||||
|
|
||||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||||
|
|
||||||
|
# Arguments when installing dependencies from cache
|
||||||
deps_install_args = install_args
|
deps_install_args = install_args
|
||||||
|
|
||||||
|
# Arguments when installing the root from sources
|
||||||
root_install_args = install_args + [
|
root_install_args = install_args + [
|
||||||
"--keep-stage",
|
"--keep-stage",
|
||||||
"--only=package",
|
"--only=package",
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
slash_hash,
|
|
||||||
]
|
]
|
||||||
|
if cdash_handler:
|
||||||
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
|
root_install_args.extend(cdash_handler.args())
|
||||||
|
root_install_args.append(slash_hash)
|
||||||
|
|
||||||
# ["x", "y"] -> "'x' 'y'"
|
# ["x", "y"] -> "'x' 'y'"
|
||||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[
|
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||||
SPACK_COMMAND,
|
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"bootstrap",
|
|
||||||
"now",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
SPACK_COMMAND,
|
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"config",
|
|
||||||
"add",
|
|
||||||
"config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock
|
|
||||||
],
|
|
||||||
[
|
[
|
||||||
SPACK_COMMAND,
|
SPACK_COMMAND,
|
||||||
"-e",
|
"-e",
|
||||||
|
|||||||
@@ -13,11 +13,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.argparsewriter import (
|
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||||
ArgparseCompletionWriter,
|
|
||||||
ArgparseRstWriter,
|
|
||||||
ArgparseWriter,
|
|
||||||
)
|
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -42,7 +38,7 @@
|
|||||||
"format": "bash",
|
"format": "bash",
|
||||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os.path
|
import os.path
|
||||||
|
import textwrap
|
||||||
|
|
||||||
from llnl.util.lang import stable_partition
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
@@ -415,6 +416,40 @@ def add_cdash_args(subparser, add_help):
|
|||||||
cdash_subgroup.add_argument("--cdash-buildstamp", default=None, help=cdash_help["buildstamp"])
|
cdash_subgroup.add_argument("--cdash-buildstamp", default=None, help=cdash_help["buildstamp"])
|
||||||
|
|
||||||
|
|
||||||
|
def print_cdash_help():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog=textwrap.dedent(
|
||||||
|
"""\
|
||||||
|
environment variables:
|
||||||
|
SPACK_CDASH_AUTH_TOKEN
|
||||||
|
authentication token to present to CDash
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
)
|
||||||
|
add_cdash_args(parser, True)
|
||||||
|
parser.print_help()
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_reporter_options(namespace: argparse.Namespace):
|
||||||
|
"""Sanitize options that affect generation and configuration of reports, like
|
||||||
|
CDash or JUnit.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
namespace: options parsed from cli
|
||||||
|
"""
|
||||||
|
has_any_cdash_option = (
|
||||||
|
namespace.cdash_upload_url or namespace.cdash_build or namespace.cdash_site
|
||||||
|
)
|
||||||
|
if namespace.log_format == "junit" and has_any_cdash_option:
|
||||||
|
raise argparse.ArgumentTypeError("cannot pass any cdash option when --log-format=junit")
|
||||||
|
|
||||||
|
# If any CDash option is passed, assume --log-format=cdash is implied
|
||||||
|
if namespace.log_format is None and has_any_cdash_option:
|
||||||
|
namespace.log_format = "cdash"
|
||||||
|
namespace.reporter = _cdash_reporter(namespace)
|
||||||
|
|
||||||
|
|
||||||
class ConfigSetAction(argparse.Action):
|
class ConfigSetAction(argparse.Action):
|
||||||
"""Generic action for setting spack config options from CLI.
|
"""Generic action for setting spack config options from CLI.
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,11 @@
|
|||||||
import spack.build_environment as build_environment
|
import spack.build_environment as build_environment
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
|
from spack import traverse
|
||||||
from spack.util.environment import dump_environment, pickle_environment
|
from spack.util.environment import dump_environment, pickle_environment
|
||||||
|
|
||||||
|
|
||||||
@@ -38,6 +42,41 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AreDepsInstalledVisitor:
|
||||||
|
def __init__(self, context="build"):
|
||||||
|
if context not in ("build", "test"):
|
||||||
|
raise ValueError("context can only be build or test")
|
||||||
|
|
||||||
|
if context == "build":
|
||||||
|
self.direct_deps = ("build", "link", "run")
|
||||||
|
else:
|
||||||
|
self.direct_deps = ("build", "test", "link", "run")
|
||||||
|
|
||||||
|
self.has_uninstalled_deps = False
|
||||||
|
|
||||||
|
def accept(self, item):
|
||||||
|
# The root may be installed or uninstalled.
|
||||||
|
if item.depth == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Early exit after we've seen an uninstalled dep.
|
||||||
|
if self.has_uninstalled_deps:
|
||||||
|
return False
|
||||||
|
|
||||||
|
spec = item.edge.spec
|
||||||
|
if not spec.external and not spec.installed:
|
||||||
|
self.has_uninstalled_deps = True
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def neighbors(self, item):
|
||||||
|
# Direct deps: follow build & test edges.
|
||||||
|
# Transitive deps: follow link / run.
|
||||||
|
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||||
|
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||||
|
|
||||||
|
|
||||||
def emulate_env_utility(cmd_name, context, args):
|
def emulate_env_utility(cmd_name, context, args):
|
||||||
if not args.spec:
|
if not args.spec:
|
||||||
tty.die("spack %s requires a spec." % cmd_name)
|
tty.die("spack %s requires a spec." % cmd_name)
|
||||||
@@ -65,6 +104,27 @@ def emulate_env_utility(cmd_name, context, args):
|
|||||||
|
|
||||||
spec = spack.cmd.matching_spec_from_env(spec)
|
spec = spack.cmd.matching_spec_from_env(spec)
|
||||||
|
|
||||||
|
# Require that dependencies are installed.
|
||||||
|
visitor = AreDepsInstalledVisitor(context=context)
|
||||||
|
|
||||||
|
# Mass install check needs read transaction.
|
||||||
|
with spack.store.db.read_transaction():
|
||||||
|
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
|
||||||
|
|
||||||
|
if visitor.has_uninstalled_deps:
|
||||||
|
raise spack.error.SpackError(
|
||||||
|
f"Not all dependencies of {spec.name} are installed. "
|
||||||
|
f"Cannot setup {context} environment:",
|
||||||
|
spec.tree(
|
||||||
|
status_fn=spack.spec.Spec.install_status,
|
||||||
|
hashlen=7,
|
||||||
|
hashes=True,
|
||||||
|
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||||
|
# in Spec.tree(...).
|
||||||
|
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
build_environment.setup_package(spec.package, args.dirty, context)
|
build_environment.setup_package(spec.package, args.dirty, context)
|
||||||
|
|
||||||
if args.dump:
|
if args.dump:
|
||||||
|
|||||||
@@ -408,13 +408,7 @@ def config_prefer_upstream(args):
|
|||||||
pkgs = {}
|
pkgs = {}
|
||||||
for spec in pref_specs:
|
for spec in pref_specs:
|
||||||
# Collect all the upstream compilers and versions for this package.
|
# Collect all the upstream compilers and versions for this package.
|
||||||
pkg = pkgs.get(
|
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
|
||||||
spec.name,
|
|
||||||
{
|
|
||||||
"version": [],
|
|
||||||
"compiler": [],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
pkgs[spec.name] = pkg
|
pkgs[spec.name] = pkg
|
||||||
|
|
||||||
# We have no existing variant if this is our first added version.
|
# We have no existing variant if this is our first added version.
|
||||||
|
|||||||
@@ -16,19 +16,10 @@
|
|||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.url import (
|
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||||
UndetectableNameError,
|
|
||||||
UndetectableVersionError,
|
|
||||||
parse_name,
|
|
||||||
parse_version,
|
|
||||||
)
|
|
||||||
from spack.util.editor import editor
|
from spack.util.editor import editor
|
||||||
from spack.util.executable import ProcessError, which
|
from spack.util.executable import ProcessError, which
|
||||||
from spack.util.naming import (
|
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||||
mod_to_class,
|
|
||||||
simplify_name,
|
|
||||||
valid_fully_qualified_module_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
description = "create a new package file"
|
description = "create a new package file"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
@@ -70,7 +61,7 @@ class {class_name}({base_class_name}):
|
|||||||
|
|
||||||
# FIXME: Add a list of GitHub accounts to
|
# FIXME: Add a list of GitHub accounts to
|
||||||
# notify when the package is updated.
|
# notify when the package is updated.
|
||||||
# maintainers = ["github_user1", "github_user2"]
|
# maintainers("github_user1", "github_user2")
|
||||||
|
|
||||||
{versions}
|
{versions}
|
||||||
|
|
||||||
|
|||||||
@@ -96,8 +96,5 @@ def report(args):
|
|||||||
|
|
||||||
|
|
||||||
def debug(parser, args):
|
def debug(parser, args):
|
||||||
action = {
|
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||||
"create-db-tarball": create_db_tarball,
|
|
||||||
"report": report,
|
|
||||||
}
|
|
||||||
action[args.debug_command](args)
|
action[args.debug_command](args)
|
||||||
|
|||||||
@@ -33,12 +33,7 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
# Arguments for display_specs when we find ambiguity
|
# Arguments for display_specs when we find ambiguity
|
||||||
display_args = {
|
display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
|
||||||
"long": True,
|
|
||||||
"show_flags": True,
|
|
||||||
"variants": True,
|
|
||||||
"indent": 4,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(sp):
|
def setup_parser(sp):
|
||||||
|
|||||||
@@ -46,6 +46,14 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def shift(asp_function):
|
||||||
|
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
|
||||||
|
if not asp_function.args:
|
||||||
|
raise ValueError(f"Can't shift ASP function with no arguments: {str(asp_function)}")
|
||||||
|
first, *rest = asp_function.args
|
||||||
|
return asp.AspFunction(first, rest)
|
||||||
|
|
||||||
|
|
||||||
def compare_specs(a, b, to_string=False, color=None):
|
def compare_specs(a, b, to_string=False, color=None):
|
||||||
"""
|
"""
|
||||||
Generate a comparison, including diffs (for each side) and an intersection.
|
Generate a comparison, including diffs (for each side) and an intersection.
|
||||||
@@ -71,23 +79,13 @@ def compare_specs(a, b, to_string=False, color=None):
|
|||||||
# get facts for specs, making sure to include build dependencies of concrete
|
# get facts for specs, making sure to include build dependencies of concrete
|
||||||
# specs and to descend into dependency hashes so we include all facts.
|
# specs and to descend into dependency hashes so we include all facts.
|
||||||
a_facts = set(
|
a_facts = set(
|
||||||
func.shift()
|
shift(func)
|
||||||
for func in setup.spec_clauses(
|
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||||
a,
|
|
||||||
body=True,
|
|
||||||
expand_hashes=True,
|
|
||||||
concrete_build_deps=True,
|
|
||||||
)
|
|
||||||
if func.name == "attr"
|
if func.name == "attr"
|
||||||
)
|
)
|
||||||
b_facts = set(
|
b_facts = set(
|
||||||
func.shift()
|
shift(func)
|
||||||
for func in setup.spec_clauses(
|
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||||
b,
|
|
||||||
body=True,
|
|
||||||
expand_hashes=True,
|
|
||||||
concrete_build_deps=True,
|
|
||||||
)
|
|
||||||
if func.name == "attr"
|
if func.name == "attr"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -148,8 +148,7 @@ def env_activate(args):
|
|||||||
|
|
||||||
if not args.shell:
|
if not args.shell:
|
||||||
spack.cmd.common.shell_init_instructions(
|
spack.cmd.common.shell_init_instructions(
|
||||||
"spack env activate",
|
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
|
||||||
" eval `spack env activate {sh_arg} [...]`",
|
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
@@ -166,7 +165,7 @@ def env_activate(args):
|
|||||||
short_name = os.path.basename(env_path)
|
short_name = os.path.basename(env_path)
|
||||||
ev.Environment(env).write(regenerate=False)
|
ev.Environment(env).write(regenerate=False)
|
||||||
|
|
||||||
# Named environment
|
# Managed environment
|
||||||
elif ev.exists(env_name_or_dir) and not args.dir:
|
elif ev.exists(env_name_or_dir) and not args.dir:
|
||||||
env_path = ev.root(env_name_or_dir)
|
env_path = ev.root(env_name_or_dir)
|
||||||
short_name = env_name_or_dir
|
short_name = env_name_or_dir
|
||||||
@@ -238,8 +237,7 @@ def env_deactivate_setup_parser(subparser):
|
|||||||
def env_deactivate(args):
|
def env_deactivate(args):
|
||||||
if not args.shell:
|
if not args.shell:
|
||||||
spack.cmd.common.shell_init_instructions(
|
spack.cmd.common.shell_init_instructions(
|
||||||
"spack env deactivate",
|
"spack env deactivate", " eval `spack env deactivate {sh_arg}`"
|
||||||
" eval `spack env deactivate {sh_arg}`",
|
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|||||||
@@ -38,11 +38,7 @@ def setup_parser(subparser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="packages with detected externals won't be built with Spack",
|
help="packages with detected externals won't be built with Spack",
|
||||||
)
|
)
|
||||||
find_parser.add_argument(
|
find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
|
||||||
"--exclude",
|
|
||||||
action="append",
|
|
||||||
help="packages to exclude from search",
|
|
||||||
)
|
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"-p",
|
"-p",
|
||||||
"--path",
|
"--path",
|
||||||
@@ -187,7 +183,6 @@ def external_read_cray_manifest(args):
|
|||||||
def _collect_and_consume_cray_manifest_files(
|
def _collect_and_consume_cray_manifest_files(
|
||||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||||
):
|
):
|
||||||
|
|
||||||
manifest_files = []
|
manifest_files = []
|
||||||
if manifest_file:
|
if manifest_file:
|
||||||
manifest_files.append(manifest_file)
|
manifest_files.append(manifest_file)
|
||||||
|
|||||||
@@ -25,10 +25,7 @@ def setup_parser(subparser):
|
|||||||
help="fetch only missing (not yet installed) dependencies",
|
help="fetch only missing (not yet installed) dependencies",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-D",
|
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||||
"--dependencies",
|
|
||||||
action="store_true",
|
|
||||||
help="also fetch all dependencies",
|
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(subparser, ["specs"])
|
arguments.add_common_arguments(subparser, ["specs"])
|
||||||
subparser.epilog = (
|
subparser.epilog = (
|
||||||
|
|||||||
@@ -9,13 +9,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.graph import (
|
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||||
DAGWithDependencyTypes,
|
|
||||||
SimpleDAG,
|
|
||||||
graph_ascii,
|
|
||||||
graph_dot,
|
|
||||||
static_graph_dot,
|
|
||||||
)
|
|
||||||
|
|
||||||
description = "generate graphs of package dependency relationships"
|
description = "generate graphs of package dependency relationships"
|
||||||
section = "basic"
|
section = "basic"
|
||||||
|
|||||||
@@ -39,12 +39,14 @@
|
|||||||
compiler flags:
|
compiler flags:
|
||||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||||
fflags, ldflags, ldlibs
|
fflags, ldflags, ldlibs
|
||||||
|
@g{==} propagate flags to package dependencies
|
||||||
|
|
||||||
variants:
|
variants:
|
||||||
@B{+variant} enable <variant>
|
@B{+variant} enable <variant>
|
||||||
@r{-variant} or @r{~variant} disable <variant>
|
@r{-variant} or @r{~variant} disable <variant>
|
||||||
@B{variant=value} set non-boolean <variant> to <value>
|
@B{variant=value} set non-boolean <variant> to <value>
|
||||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||||
|
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||||
|
|
||||||
architecture variants:
|
architecture variants:
|
||||||
@m{platform=platform} linux, darwin, cray, etc.
|
@m{platform=platform} linux, darwin, cray, etc.
|
||||||
@@ -68,6 +70,8 @@
|
|||||||
hdf5 @c{@1.8:} @g{%gcc} hdf5 1.8 or higher built with gcc
|
hdf5 @c{@1.8:} @g{%gcc} hdf5 1.8 or higher built with gcc
|
||||||
hdf5 @B{+mpi} hdf5 with mpi enabled
|
hdf5 @B{+mpi} hdf5 with mpi enabled
|
||||||
hdf5 @r{~mpi} hdf5 with mpi disabled
|
hdf5 @r{~mpi} hdf5 with mpi disabled
|
||||||
|
hdf5 @B{++mpi} hdf5 with mpi enabled and propagates
|
||||||
|
hdf5 @r{~~mpi} hdf5 with mpi disabled and propagates
|
||||||
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
|
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
|
||||||
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
|
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
|
||||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||||
@@ -78,9 +82,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
guides = {
|
guides = {"spec": spec_guide}
|
||||||
"spec": spec_guide,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
|||||||
@@ -283,7 +283,7 @@ def print_tests(pkg):
|
|||||||
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
|
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
|
||||||
if pkg.name in c_names:
|
if pkg.name in c_names:
|
||||||
v_names.extend(["c", "cxx", "fortran"])
|
v_names.extend(["c", "cxx", "fortran"])
|
||||||
if pkg.spec.satisfies("llvm+clang"):
|
if pkg.spec.intersects("llvm+clang"):
|
||||||
v_names.extend(["c", "cxx"])
|
v_names.extend(["c", "cxx"])
|
||||||
# TODO Refactor END
|
# TODO Refactor END
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,6 @@
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -260,7 +259,7 @@ def default_log_file(spec):
|
|||||||
|
|
||||||
def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> str:
|
def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> str:
|
||||||
"""Return the filename to be used for reporting to JUnit or CDash format."""
|
"""Return the filename to be used for reporting to JUnit or CDash format."""
|
||||||
result = args.log_file or args.cdash_upload_url or default_log_file(specs[0])
|
result = args.log_file or default_log_file(specs[0])
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@@ -348,21 +347,6 @@ def install_specs_outside_environment(specs, install_kwargs):
|
|||||||
builder.install()
|
builder.install()
|
||||||
|
|
||||||
|
|
||||||
def print_cdash_help():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
epilog=textwrap.dedent(
|
|
||||||
"""\
|
|
||||||
environment variables:
|
|
||||||
SPACK_CDASH_AUTH_TOKEN
|
|
||||||
authentication token to present to CDash
|
|
||||||
"""
|
|
||||||
),
|
|
||||||
)
|
|
||||||
arguments.add_cdash_args(parser, True)
|
|
||||||
parser.print_help()
|
|
||||||
|
|
||||||
|
|
||||||
def install_all_specs_from_active_environment(
|
def install_all_specs_from_active_environment(
|
||||||
install_kwargs, only_concrete, cli_test_arg, reporter_factory
|
install_kwargs, only_concrete, cli_test_arg, reporter_factory
|
||||||
):
|
):
|
||||||
@@ -496,7 +480,7 @@ def install(parser, args):
|
|||||||
tty.set_verbose(args.verbose or args.install_verbose)
|
tty.set_verbose(args.verbose or args.install_verbose)
|
||||||
|
|
||||||
if args.help_cdash:
|
if args.help_cdash:
|
||||||
print_cdash_help()
|
spack.cmd.common.arguments.print_cdash_help()
|
||||||
return
|
return
|
||||||
|
|
||||||
if args.no_checksum:
|
if args.no_checksum:
|
||||||
@@ -505,14 +489,14 @@ def install(parser, args):
|
|||||||
if args.deprecated:
|
if args.deprecated:
|
||||||
spack.config.set("config:deprecated", True, scope="command_line")
|
spack.config.set("config:deprecated", True, scope="command_line")
|
||||||
|
|
||||||
|
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||||
|
|
||||||
def reporter_factory(specs):
|
def reporter_factory(specs):
|
||||||
if args.log_format is None:
|
if args.log_format is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
context_manager = spack.report.build_context_manager(
|
context_manager = spack.report.build_context_manager(
|
||||||
reporter=args.reporter(),
|
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||||
filename=report_filename(args, specs=specs),
|
|
||||||
specs=specs,
|
|
||||||
)
|
)
|
||||||
return context_manager
|
return context_manager
|
||||||
|
|
||||||
|
|||||||
@@ -58,10 +58,7 @@
|
|||||||
|
|
||||||
#: licensed files that can have LGPL language in them
|
#: licensed files that can have LGPL language in them
|
||||||
#: so far, just this command -- so it can find LGPL things elsewhere
|
#: so far, just this command -- so it can find LGPL things elsewhere
|
||||||
lgpl_exceptions = [
|
lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
|
||||||
r"lib/spack/spack/cmd/license.py",
|
|
||||||
r"lib/spack/spack/test/cmd/license.py",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _all_spack_files(root=spack.paths.prefix):
|
def _all_spack_files(root=spack.paths.prefix):
|
||||||
@@ -129,7 +126,6 @@ def error_messages(self):
|
|||||||
|
|
||||||
|
|
||||||
def _check_license(lines, path):
|
def _check_license(lines, path):
|
||||||
|
|
||||||
found = []
|
found = []
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|||||||
@@ -98,8 +98,7 @@ def load(parser, args):
|
|||||||
if not args.shell:
|
if not args.shell:
|
||||||
specs_str = " ".join(args.constraint) or "SPECS"
|
specs_str = " ".join(args.constraint) or "SPECS"
|
||||||
spack.cmd.common.shell_init_instructions(
|
spack.cmd.common.shell_init_instructions(
|
||||||
"spack load",
|
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||||
" eval `spack load {sh_arg} %s`" % specs_str,
|
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|||||||
@@ -95,7 +95,7 @@ def location(parser, args):
|
|||||||
spack.cmd.require_active_env("location -e")
|
spack.cmd.require_active_env("location -e")
|
||||||
path = ev.active_environment().path
|
path = ev.active_environment().path
|
||||||
else:
|
else:
|
||||||
# Get named environment path
|
# Get path of requested environment
|
||||||
if not ev.exists(args.location_env):
|
if not ev.exists(args.location_env):
|
||||||
tty.die("no such environment: '%s'" % args.location_env)
|
tty.die("no such environment: '%s'" % args.location_env)
|
||||||
path = ev.root(args.location_env)
|
path = ev.root(args.location_env)
|
||||||
|
|||||||
@@ -27,12 +27,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Arguments for display_specs when we find ambiguity
|
# Arguments for display_specs when we find ambiguity
|
||||||
display_args = {
|
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||||
"long": True,
|
|
||||||
"show_flags": False,
|
|
||||||
"variants": False,
|
|
||||||
"indent": 4,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
|||||||
@@ -335,7 +335,7 @@ def not_excluded_fn(args):
|
|||||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||||
|
|
||||||
def not_excluded(x):
|
def not_excluded(x):
|
||||||
return not any(x.satisfies(y, strict=True) for y in exclude_specs)
|
return not any(x.satisfies(y) for y in exclude_specs)
|
||||||
|
|
||||||
return not_excluded
|
return not_excluded
|
||||||
|
|
||||||
@@ -445,9 +445,7 @@ def mirror_create(args):
|
|||||||
|
|
||||||
mirror_specs = concrete_specs_from_user(args)
|
mirror_specs = concrete_specs_from_user(args)
|
||||||
create_mirror_for_individual_specs(
|
create_mirror_for_individual_specs(
|
||||||
mirror_specs,
|
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||||
path=path,
|
|
||||||
skip_unstable_versions=args.skip_unstable_versions,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -467,9 +465,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
|||||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||||
create_mirror_for_individual_specs(
|
create_mirror_for_individual_specs(
|
||||||
mirror_specs,
|
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||||
path=path,
|
|
||||||
skip_unstable_versions=skip_unstable_versions,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -180,10 +180,7 @@ def loads(module_type, specs, args, out=None):
|
|||||||
for spec in specs
|
for spec in specs
|
||||||
)
|
)
|
||||||
|
|
||||||
module_commands = {
|
module_commands = {"tcl": "module load ", "lmod": "module load "}
|
||||||
"tcl": "module load ",
|
|
||||||
"lmod": "module load ",
|
|
||||||
}
|
|
||||||
|
|
||||||
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
|
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
|
||||||
|
|
||||||
@@ -368,18 +365,14 @@ def refresh(module_type, specs, args):
|
|||||||
|
|
||||||
|
|
||||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||||
|
|
||||||
# Qualifiers to be used when querying the db for specs
|
# Qualifiers to be used when querying the db for specs
|
||||||
constraint_qualifiers = {
|
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
|
||||||
"refresh": {"installed": True, "known": True},
|
|
||||||
}
|
|
||||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||||
|
|
||||||
# Get the specs that match the query from the DB
|
# Get the specs that match the query from the DB
|
||||||
specs = args.specs(**query_args)
|
specs = args.specs(**query_args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
callbacks[args.subparser_name](module_type, specs, args)
|
callbacks[args.subparser_name](module_type, specs, args)
|
||||||
|
|
||||||
except MultipleSpecsMatch:
|
except MultipleSpecsMatch:
|
||||||
|
|||||||
@@ -97,41 +97,28 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
def _process_result(result, show, required_format, kwargs):
|
def _process_result(result, show, required_format, kwargs):
|
||||||
result.raise_if_unsat()
|
result.raise_if_unsat()
|
||||||
opt, *_ = min(result.answers)
|
opt, _, _ = min(result.answers)
|
||||||
|
|
||||||
# dump the solutions as concretized specs
|
|
||||||
if ("opt" in show) and (not required_format):
|
if ("opt" in show) and (not required_format):
|
||||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||||
tty.msg("Optimization Criteria:")
|
tty.msg("Optimization Criteria:")
|
||||||
|
|
||||||
maxlen = max(len(name) for name in result.criteria)
|
maxlen = max(len(s[2]) for s in result.criteria)
|
||||||
max_depth = max(len(v) for v in result.criteria.values() if isinstance(v, list))
|
color.cprint("@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " "))
|
||||||
|
|
||||||
header = "@*{"
|
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||||
header += "".join(f"{depth:<4}" for depth in range(max_depth))
|
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||||
header += "Criterion}"
|
color.cprint(
|
||||||
color.cprint(header)
|
fmt
|
||||||
|
% (
|
||||||
# make non-zero numbers red
|
i,
|
||||||
def highlight(n, c):
|
name,
|
||||||
return color.colorize(f"@{c}{{{n:<4}}}" if n > 0 else f"{n:<4}")
|
"-" if build_cost is None else installed_cost,
|
||||||
|
installed_cost if build_cost is None else build_cost,
|
||||||
for i, (name, cost) in enumerate(result.criteria.items(), 1):
|
|
||||||
colored_name = name.replace("build:", "@c{build:}")
|
|
||||||
colored_name = colored_name.replace("reuse:", "@B{reuse:}")
|
|
||||||
colored_name = colored_name.replace("fixed:", "@G{fixed:}")
|
|
||||||
colored_name = color.colorize(colored_name)
|
|
||||||
|
|
||||||
if isinstance(cost, int):
|
|
||||||
print(highlight(cost, "G") + " " * (max_depth - 1) + colored_name)
|
|
||||||
else:
|
|
||||||
print(
|
|
||||||
"".join(highlight(c, "c" if "build:" in name else "B") for c in cost)
|
|
||||||
+ colored_name
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
# dump the solutions as concretized specs
|
||||||
if "solutions" in show:
|
if "solutions" in show:
|
||||||
for spec in result.specs:
|
for spec in result.specs:
|
||||||
# With -y, just print YAML to output.
|
# With -y, just print YAML to output.
|
||||||
@@ -195,11 +182,7 @@ def solve(parser, args):
|
|||||||
# set up solver parameters
|
# set up solver parameters
|
||||||
# Note: reuse and other concretizer prefs are passed as configuration
|
# Note: reuse and other concretizer prefs are passed as configuration
|
||||||
result = solver.solve(
|
result = solver.solve(
|
||||||
specs,
|
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||||
out=output,
|
|
||||||
timers=args.timers,
|
|
||||||
stats=args.stats,
|
|
||||||
setup_only=setup_only,
|
|
||||||
)
|
)
|
||||||
if not setup_only:
|
if not setup_only:
|
||||||
_process_result(result, show, required_format, kwargs)
|
_process_result(result, show, required_format, kwargs)
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ def spec(parser, args):
|
|||||||
else:
|
else:
|
||||||
tty.die("spack spec requires at least one spec or an active environment")
|
tty.die("spack spec requires at least one spec or an active environment")
|
||||||
|
|
||||||
for (input, output) in specs:
|
for input, output in specs:
|
||||||
# With -y, just print YAML to output.
|
# With -y, just print YAML to output.
|
||||||
if args.format:
|
if args.format:
|
||||||
if args.format == "yaml":
|
if args.format == "yaml":
|
||||||
|
|||||||
@@ -30,20 +30,13 @@ def grouper(iterable, n, fillvalue=None):
|
|||||||
|
|
||||||
|
|
||||||
#: List of directories to exclude from checks -- relative to spack root
|
#: List of directories to exclude from checks -- relative to spack root
|
||||||
exclude_directories = [
|
exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
|
||||||
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
|
|
||||||
]
|
|
||||||
|
|
||||||
#: Order in which tools should be run. flake8 is last so that it can
|
#: Order in which tools should be run. flake8 is last so that it can
|
||||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||||
#: The list maps an executable name to a method to ensure the tool is
|
#: The list maps an executable name to a method to ensure the tool is
|
||||||
#: bootstrapped or present in the environment.
|
#: bootstrapped or present in the environment.
|
||||||
tool_names = [
|
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||||
"isort",
|
|
||||||
"black",
|
|
||||||
"flake8",
|
|
||||||
"mypy",
|
|
||||||
]
|
|
||||||
|
|
||||||
#: tools we run in spack style
|
#: tools we run in spack style
|
||||||
tools = {}
|
tools = {}
|
||||||
@@ -52,7 +45,7 @@ def grouper(iterable, n, fillvalue=None):
|
|||||||
mypy_ignores = [
|
mypy_ignores = [
|
||||||
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
|
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
|
||||||
# doesn't exist in mypy 0.971 for Python 3.6
|
# doesn't exist in mypy 0.971 for Python 3.6
|
||||||
"[annotation-unchecked]",
|
"[annotation-unchecked]"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -150,10 +143,7 @@ def setup_parser(subparser):
|
|||||||
help="branch to compare against to determine changed files (default: develop)",
|
help="branch to compare against to determine changed files (default: develop)",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-a",
|
"-a", "--all", action="store_true", help="check all files, not just changed files"
|
||||||
"--all",
|
|
||||||
action="store_true",
|
|
||||||
help="check all files, not just changed files",
|
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-r",
|
"-r",
|
||||||
@@ -178,10 +168,7 @@ def setup_parser(subparser):
|
|||||||
help="format automatically if possible (e.g., with isort, black)",
|
help="format automatically if possible (e.g., with isort, black)",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--root",
|
"--root", action="store", default=None, help="style check a different spack instance"
|
||||||
action="store",
|
|
||||||
default=None,
|
|
||||||
help="style check a different spack instance",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
tool_group = subparser.add_mutually_exclusive_group()
|
tool_group = subparser.add_mutually_exclusive_group()
|
||||||
@@ -211,6 +198,7 @@ def rewrite_and_print_output(
|
|||||||
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
|
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
|
||||||
):
|
):
|
||||||
"""rewrite ouput with <file>:<line>: format to respect path args"""
|
"""rewrite ouput with <file>:<line>: format to respect path args"""
|
||||||
|
|
||||||
# print results relative to current working directory
|
# print results relative to current working directory
|
||||||
def translate(match):
|
def translate(match):
|
||||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||||
@@ -281,24 +269,10 @@ def run_mypy(mypy_cmd, file_list, args):
|
|||||||
os.path.join(spack.paths.prefix, "pyproject.toml"),
|
os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||||
"--show-error-codes",
|
"--show-error-codes",
|
||||||
]
|
]
|
||||||
mypy_arg_sets = [
|
mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
|
||||||
common_mypy_args
|
|
||||||
+ [
|
|
||||||
"--package",
|
|
||||||
"spack",
|
|
||||||
"--package",
|
|
||||||
"llnl",
|
|
||||||
]
|
|
||||||
]
|
|
||||||
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
|
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
|
||||||
mypy_arg_sets.append(
|
mypy_arg_sets.append(
|
||||||
common_mypy_args
|
common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
|
||||||
+ [
|
|
||||||
"--package",
|
|
||||||
"packages",
|
|
||||||
"--disable-error-code",
|
|
||||||
"no-redef",
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
returncode = 0
|
returncode = 0
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
from llnl.util.tty import colify
|
from llnl.util.tty import colify
|
||||||
@@ -34,9 +33,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Run
|
# Run
|
||||||
run_parser = sp.add_parser(
|
run_parser = sp.add_parser(
|
||||||
"run",
|
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||||
description=test_run.__doc__,
|
|
||||||
help=spack.cmd.first_line(test_run.__doc__),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
alias_help_msg = "Provide an alias for this test-suite"
|
alias_help_msg = "Provide an alias for this test-suite"
|
||||||
@@ -81,9 +78,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# List
|
# List
|
||||||
list_parser = sp.add_parser(
|
list_parser = sp.add_parser(
|
||||||
"list",
|
"list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
|
||||||
description=test_list.__doc__,
|
|
||||||
help=spack.cmd.first_line(test_list.__doc__),
|
|
||||||
)
|
)
|
||||||
list_parser.add_argument(
|
list_parser.add_argument(
|
||||||
"-a",
|
"-a",
|
||||||
@@ -97,9 +92,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Find
|
# Find
|
||||||
find_parser = sp.add_parser(
|
find_parser = sp.add_parser(
|
||||||
"find",
|
"find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
|
||||||
description=test_find.__doc__,
|
|
||||||
help=spack.cmd.first_line(test_find.__doc__),
|
|
||||||
)
|
)
|
||||||
find_parser.add_argument(
|
find_parser.add_argument(
|
||||||
"filter",
|
"filter",
|
||||||
@@ -109,9 +102,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Status
|
# Status
|
||||||
status_parser = sp.add_parser(
|
status_parser = sp.add_parser(
|
||||||
"status",
|
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||||
description=test_status.__doc__,
|
|
||||||
help=spack.cmd.first_line(test_status.__doc__),
|
|
||||||
)
|
)
|
||||||
status_parser.add_argument(
|
status_parser.add_argument(
|
||||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||||
@@ -148,9 +139,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
# Remove
|
# Remove
|
||||||
remove_parser = sp.add_parser(
|
remove_parser = sp.add_parser(
|
||||||
"remove",
|
"remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
|
||||||
description=test_remove.__doc__,
|
|
||||||
help=spack.cmd.first_line(test_remove.__doc__),
|
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||||
remove_parser.add_argument(
|
remove_parser.add_argument(
|
||||||
@@ -171,20 +160,11 @@ def test_run(args):
|
|||||||
|
|
||||||
# cdash help option
|
# cdash help option
|
||||||
if args.help_cdash:
|
if args.help_cdash:
|
||||||
parser = argparse.ArgumentParser(
|
arguments.print_cdash_help()
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
epilog=textwrap.dedent(
|
|
||||||
"""\
|
|
||||||
environment variables:
|
|
||||||
SPACK_CDASH_AUTH_TOKEN
|
|
||||||
authentication token to present to CDash
|
|
||||||
"""
|
|
||||||
),
|
|
||||||
)
|
|
||||||
arguments.add_cdash_args(parser, True)
|
|
||||||
parser.print_help()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
arguments.sanitize_reporter_options(args)
|
||||||
|
|
||||||
# set config option for fail-fast
|
# set config option for fail-fast
|
||||||
if args.fail_fast:
|
if args.fail_fast:
|
||||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||||
@@ -199,11 +179,7 @@ def test_run(args):
|
|||||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||||
specs_to_test = []
|
specs_to_test = []
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
matching = spack.store.db.query_local(
|
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||||
spec,
|
|
||||||
hashes=hashes,
|
|
||||||
explicit=explicit,
|
|
||||||
)
|
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
"""
|
"""
|
||||||
@@ -237,22 +213,15 @@ def test_run(args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def report_filename(args, test_suite):
|
||||||
|
return os.path.abspath(args.log_file or "test-{}".format(test_suite.name))
|
||||||
|
|
||||||
|
|
||||||
def create_reporter(args, specs_to_test, test_suite):
|
def create_reporter(args, specs_to_test, test_suite):
|
||||||
if args.log_format is None:
|
if args.log_format is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
filename = args.cdash_upload_url
|
filename = report_filename(args, test_suite)
|
||||||
if not filename:
|
|
||||||
if args.log_file:
|
|
||||||
if os.path.isabs(args.log_file):
|
|
||||||
log_file = args.log_file
|
|
||||||
else:
|
|
||||||
log_dir = os.getcwd()
|
|
||||||
log_file = os.path.join(log_dir, args.log_file)
|
|
||||||
else:
|
|
||||||
log_file = os.path.join(os.getcwd(), "test-%s" % test_suite.name)
|
|
||||||
filename = log_file
|
|
||||||
|
|
||||||
context_manager = spack.report.test_context_manager(
|
context_manager = spack.report.test_context_manager(
|
||||||
reporter=args.reporter(),
|
reporter=args.reporter(),
|
||||||
filename=filename,
|
filename=filename,
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
import spack.cmd.common.env_utility as env_utility
|
import spack.cmd.common.env_utility as env_utility
|
||||||
|
|
||||||
description = (
|
description = (
|
||||||
"run a command in a spec's test environment, " "or dump its environment to screen or file"
|
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||||
)
|
)
|
||||||
section = "admin"
|
section = "admin"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ def tutorial(parser, args):
|
|||||||
if not tty.get_yes_or_no("Are you sure you want to proceed?"):
|
if not tty.get_yes_or_no("Are you sure you want to proceed?"):
|
||||||
tty.die("Aborted")
|
tty.die("Aborted")
|
||||||
|
|
||||||
rm_cmds = ["rm -f %s" % f for f in rm_configs]
|
rm_cmds = [f"rm -f {f}" for f in rm_configs]
|
||||||
tty.msg("Reverting compiler and repository configuration", *rm_cmds)
|
tty.msg("Reverting compiler and repository configuration", *rm_cmds)
|
||||||
for path in rm_configs:
|
for path in rm_configs:
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
@@ -71,19 +71,19 @@ def tutorial(parser, args):
|
|||||||
|
|
||||||
tty.msg(
|
tty.msg(
|
||||||
"Ensuring that the tutorial binary mirror is configured:",
|
"Ensuring that the tutorial binary mirror is configured:",
|
||||||
"spack mirror add tutorial %s" % tutorial_mirror,
|
f"spack mirror add tutorial {tutorial_mirror}",
|
||||||
)
|
)
|
||||||
mirror_config = syaml_dict()
|
mirror_config = syaml_dict()
|
||||||
mirror_config["tutorial"] = tutorial_mirror
|
mirror_config["tutorial"] = tutorial_mirror
|
||||||
spack.config.set("mirrors", mirror_config, scope="user")
|
spack.config.set("mirrors", mirror_config, scope="user")
|
||||||
|
|
||||||
tty.msg("Ensuring that we trust tutorial binaries", "spack gpg trust %s" % tutorial_key)
|
tty.msg("Ensuring that we trust tutorial binaries", f"spack gpg trust {tutorial_key}")
|
||||||
spack.util.gpg.trust(tutorial_key)
|
spack.util.gpg.trust(tutorial_key)
|
||||||
|
|
||||||
# Note that checkout MUST be last. It changes Spack under our feet.
|
# Note that checkout MUST be last. It changes Spack under our feet.
|
||||||
# If you don't put this last, you'll get import errors for the code
|
# If you don't put this last, you'll get import errors for the code
|
||||||
# that follows (exacerbated by the various lazy singletons we use)
|
# that follows (exacerbated by the various lazy singletons we use)
|
||||||
tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(*spack.spack_version_info[:2]))
|
tty.msg(f"Ensuring we're on the {tutorial_branch} branch")
|
||||||
git = spack.util.git.git(required=True)
|
git = spack.util.git.git(required=True)
|
||||||
with working_dir(spack.paths.prefix):
|
with working_dir(spack.paths.prefix):
|
||||||
git("checkout", tutorial_branch)
|
git("checkout", tutorial_branch)
|
||||||
|
|||||||
@@ -31,12 +31,7 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
# Arguments for display_specs when we find ambiguity
|
# Arguments for display_specs when we find ambiguity
|
||||||
display_args = {
|
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||||
"long": True,
|
|
||||||
"show_flags": False,
|
|
||||||
"variants": False,
|
|
||||||
"indent": 4,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
@@ -133,7 +128,7 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False, o
|
|||||||
return specs_from_cli
|
return specs_from_cli
|
||||||
|
|
||||||
|
|
||||||
def installed_dependents(specs, env):
|
def installed_runtime_dependents(specs, env):
|
||||||
"""Map each spec to a list of its installed dependents.
|
"""Map each spec to a list of its installed dependents.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -160,10 +155,10 @@ def installed_dependents(specs, env):
|
|||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
for dpt in traverse.traverse_nodes(
|
for dpt in traverse.traverse_nodes(
|
||||||
spec.dependents(deptype="all"),
|
spec.dependents(deptype=("link", "run")),
|
||||||
direction="parents",
|
direction="parents",
|
||||||
visited=visited,
|
visited=visited,
|
||||||
deptype="all",
|
deptype=("link", "run"),
|
||||||
root=True,
|
root=True,
|
||||||
key=lambda s: s.dag_hash(),
|
key=lambda s: s.dag_hash(),
|
||||||
):
|
):
|
||||||
@@ -236,12 +231,7 @@ def do_uninstall(specs, force=False):
|
|||||||
hashes_to_remove = set(s.dag_hash() for s in specs)
|
hashes_to_remove = set(s.dag_hash() for s in specs)
|
||||||
|
|
||||||
for s in traverse.traverse_nodes(
|
for s in traverse.traverse_nodes(
|
||||||
specs,
|
specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
|
||||||
order="topo",
|
|
||||||
direction="children",
|
|
||||||
root=True,
|
|
||||||
cover="nodes",
|
|
||||||
deptype="all",
|
|
||||||
):
|
):
|
||||||
if s.dag_hash() in hashes_to_remove:
|
if s.dag_hash() in hashes_to_remove:
|
||||||
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
||||||
@@ -265,7 +255,7 @@ def get_uninstall_list(args, specs, env):
|
|||||||
# args.all takes care of the case where '-a' is given in the cli
|
# args.all takes care of the case where '-a' is given in the cli
|
||||||
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
|
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
|
||||||
|
|
||||||
active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env)
|
active_dpts, outside_dpts = installed_runtime_dependents(base_uninstall_specs, env)
|
||||||
# It will be useful to track the unified set of specs with dependents, as
|
# It will be useful to track the unified set of specs with dependents, as
|
||||||
# well as to separately track specs in the current env with dependents
|
# well as to separately track specs in the current env with dependents
|
||||||
spec_to_dpts = {}
|
spec_to_dpts = {}
|
||||||
|
|||||||
@@ -26,7 +26,6 @@
|
|||||||
description = "run spack's unit tests (wrapper around pytest)"
|
description = "run spack's unit tests (wrapper around pytest)"
|
||||||
section = "developer"
|
section = "developer"
|
||||||
level = "long"
|
level = "long"
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
@@ -212,7 +211,7 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
# Note: skip on windows here because for the moment,
|
# Note: skip on windows here because for the moment,
|
||||||
# clingo is wholly unsupported from bootstrap
|
# clingo is wholly unsupported from bootstrap
|
||||||
if not is_windows:
|
if sys.platform != "win32":
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
|
|||||||
@@ -77,8 +77,7 @@ def unload(parser, args):
|
|||||||
specs_str = " ".join(args.specs) or "SPECS"
|
specs_str = " ".join(args.specs) or "SPECS"
|
||||||
|
|
||||||
spack.cmd.common.shell_init_instructions(
|
spack.cmd.common.shell_init_instructions(
|
||||||
"spack unload",
|
"spack unload", " eval `spack unload {sh_arg}` %s" % specs_str
|
||||||
" eval `spack unload {sh_arg}` %s" % specs_str,
|
|
||||||
)
|
)
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|||||||
@@ -106,12 +106,7 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def url(parser, args):
|
def url(parser, args):
|
||||||
action = {
|
action = {"parse": url_parse, "list": url_list, "summary": url_summary, "stats": url_stats}
|
||||||
"parse": url_parse,
|
|
||||||
"list": url_list,
|
|
||||||
"summary": url_summary,
|
|
||||||
"stats": url_stats,
|
|
||||||
}
|
|
||||||
|
|
||||||
action[args.subcommand](args)
|
action[args.subcommand](args)
|
||||||
|
|
||||||
|
|||||||
@@ -28,8 +28,6 @@
|
|||||||
|
|
||||||
__all__ = ["Compiler"]
|
__all__ = ["Compiler"]
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||||
@@ -598,7 +596,7 @@ def search_regexps(cls, language):
|
|||||||
suffixes = [""]
|
suffixes = [""]
|
||||||
# Windows compilers generally have an extension of some sort
|
# Windows compilers generally have an extension of some sort
|
||||||
# as do most files on Windows, handle that case here
|
# as do most files on Windows, handle that case here
|
||||||
if is_windows:
|
if sys.platform == "win32":
|
||||||
ext = r"\.(?:exe|bat)"
|
ext = r"\.(?:exe|bat)"
|
||||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||||
ext_suf = [ext]
|
ext_suf = [ext]
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ def _to_dict(compiler):
|
|||||||
d = {}
|
d = {}
|
||||||
d["spec"] = str(compiler.spec)
|
d["spec"] = str(compiler.spec)
|
||||||
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
|
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
|
||||||
d["flags"] = dict((fname, fvals) for fname, fvals in compiler.flags)
|
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
|
||||||
d["flags"].update(
|
d["flags"].update(
|
||||||
dict(
|
dict(
|
||||||
(attr, getattr(compiler, attr, None))
|
(attr, getattr(compiler, attr, None))
|
||||||
@@ -619,11 +619,9 @@ def _default(search_paths):
|
|||||||
command_arguments = []
|
command_arguments = []
|
||||||
files_to_be_tested = fs.files_in(*search_paths)
|
files_to_be_tested = fs.files_in(*search_paths)
|
||||||
for compiler_name in spack.compilers.supported_compilers():
|
for compiler_name in spack.compilers.supported_compilers():
|
||||||
|
|
||||||
compiler_cls = class_for_compiler_name(compiler_name)
|
compiler_cls = class_for_compiler_name(compiler_name)
|
||||||
|
|
||||||
for language in ("cc", "cxx", "f77", "fc"):
|
for language in ("cc", "cxx", "f77", "fc"):
|
||||||
|
|
||||||
# Select only the files matching a regexp
|
# Select only the files matching a regexp
|
||||||
for (file, full_path), regexp in itertools.product(
|
for (file, full_path), regexp in itertools.product(
|
||||||
files_to_be_tested, compiler_cls.search_regexps(language)
|
files_to_be_tested, compiler_cls.search_regexps(language)
|
||||||
|
|||||||
@@ -36,36 +36,89 @@ def extract_version_from_output(cls, output):
|
|||||||
ver = match.group(match.lastindex)
|
ver = match.group(match.lastindex)
|
||||||
return ver
|
return ver
|
||||||
|
|
||||||
|
# C++ flags based on CMake Modules/Compiler/AppleClang-CXX.cmake
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx11_flag(self):
|
def cxx11_flag(self):
|
||||||
# Adapted from CMake's AppleClang-CXX rules
|
|
||||||
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
||||||
if self.real_version < spack.version.ver("4.0.0"):
|
if self.real_version < spack.version.ver("4.0"):
|
||||||
raise spack.compiler.UnsupportedCompilerFlag(
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
|
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0"
|
||||||
)
|
)
|
||||||
return "-std=c++11"
|
return "-std=c++11"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx14_flag(self):
|
def cxx14_flag(self):
|
||||||
# Adapted from CMake's rules for AppleClang
|
if self.real_version < spack.version.ver("5.1"):
|
||||||
if self.real_version < spack.version.ver("5.1.0"):
|
|
||||||
raise spack.compiler.UnsupportedCompilerFlag(
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
|
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1"
|
||||||
)
|
)
|
||||||
elif self.real_version < spack.version.ver("6.1.0"):
|
elif self.real_version < spack.version.ver("6.1"):
|
||||||
return "-std=c++1y"
|
return "-std=c++1y"
|
||||||
|
|
||||||
return "-std=c++14"
|
return "-std=c++14"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx17_flag(self):
|
def cxx17_flag(self):
|
||||||
# Adapted from CMake's rules for AppleClang
|
if self.real_version < spack.version.ver("6.1"):
|
||||||
if self.real_version < spack.version.ver("6.1.0"):
|
|
||||||
raise spack.compiler.UnsupportedCompilerFlag(
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
|
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1"
|
||||||
)
|
)
|
||||||
return "-std=c++1z"
|
elif self.real_version < spack.version.ver("10.0"):
|
||||||
|
return "-std=c++1z"
|
||||||
|
return "-std=c++17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx20_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("10.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C++20 standard", "cxx20_flag", "Xcode < 10.0"
|
||||||
|
)
|
||||||
|
elif self.real_version < spack.version.ver("13.0"):
|
||||||
|
return "-std=c++2a"
|
||||||
|
return "-std=c++20"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx23_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("13.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C++23 standard", "cxx23_flag", "Xcode < 13.0"
|
||||||
|
)
|
||||||
|
return "-std=c++2b"
|
||||||
|
|
||||||
|
# C flags based on CMake Modules/Compiler/AppleClang-C.cmake
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c99_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("4.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C99 standard", "c99_flag", "< 4.0"
|
||||||
|
)
|
||||||
|
return "-std=c99"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c11_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("4.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C11 standard", "c11_flag", "< 4.0"
|
||||||
|
)
|
||||||
|
return "-std=c11"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c17_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("11.0"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C17 standard", "c17_flag", "< 11.0"
|
||||||
|
)
|
||||||
|
return "-std=c17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c23_flag(self):
|
||||||
|
if self.real_version < spack.version.ver("11.0.3"):
|
||||||
|
raise spack.compiler.UnsupportedCompilerFlag(
|
||||||
|
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||||
|
)
|
||||||
|
return "-std=c2x"
|
||||||
|
|
||||||
def setup_custom_environment(self, pkg, env):
|
def setup_custom_environment(self, pkg, env):
|
||||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||||
@@ -154,10 +207,7 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
real_dirs = [
|
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||||
"Toolchains/XcodeDefault.xctoolchain/usr/bin",
|
|
||||||
"usr/bin",
|
|
||||||
]
|
|
||||||
|
|
||||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||||
|
|
||||||
|
|||||||
@@ -89,6 +89,11 @@ def cxx14_flag(self):
|
|||||||
return "-std=c++14"
|
return "-std=c++14"
|
||||||
return "-h std=c++14"
|
return "-h std=c++14"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx17_flag(self):
|
||||||
|
if self.is_clang_based:
|
||||||
|
return "-std=c++17"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
if self.is_clang_based:
|
if self.is_clang_based:
|
||||||
|
|||||||
@@ -128,10 +128,23 @@ def c99_flag(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def c11_flag(self):
|
def c11_flag(self):
|
||||||
if self.real_version < ver("6.1.0"):
|
if self.real_version < ver("3.0"):
|
||||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 6.1.0")
|
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 3.0")
|
||||||
else:
|
if self.real_version < ver("3.1"):
|
||||||
return "-std=c11"
|
return "-std=c1x"
|
||||||
|
return "-std=c11"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c17_flag(self):
|
||||||
|
if self.real_version < ver("6.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C17 standard", "c17_flag", "< 6.0")
|
||||||
|
return "-std=c17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def c23_flag(self):
|
||||||
|
if self.real_version < ver("9.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||||
|
return "-std=c2x"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cc_pic_flag(self):
|
def cc_pic_flag(self):
|
||||||
|
|||||||
@@ -103,11 +103,22 @@ def short_msvc_version(self):
|
|||||||
"""
|
"""
|
||||||
This is the shorthand VCToolset version of form
|
This is the shorthand VCToolset version of form
|
||||||
MSVC<short-ver> *NOT* the full version, for that see
|
MSVC<short-ver> *NOT* the full version, for that see
|
||||||
Msvc.msvc_version
|
Msvc.msvc_version or MSVC.platform_toolset_ver for the
|
||||||
|
raw platform toolset version
|
||||||
"""
|
"""
|
||||||
ver = self.msvc_version[:2].joined.string[:3]
|
ver = self.platform_toolset_ver
|
||||||
return "MSVC" + ver
|
return "MSVC" + ver
|
||||||
|
|
||||||
|
@property
|
||||||
|
def platform_toolset_ver(self):
|
||||||
|
"""
|
||||||
|
This is the platform toolset version of current MSVC compiler
|
||||||
|
i.e. 142.
|
||||||
|
This is different from the VC toolset version as established
|
||||||
|
by `short_msvc_version`
|
||||||
|
"""
|
||||||
|
return self.msvc_version[:2].joined.string[:3]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cl_version(self):
|
def cl_version(self):
|
||||||
"""Cl toolset version"""
|
"""Cl toolset version"""
|
||||||
|
|||||||
@@ -134,7 +134,7 @@ def _valid_virtuals_and_externals(self, spec):
|
|||||||
|
|
||||||
externals = spec_externals(cspec)
|
externals = spec_externals(cspec)
|
||||||
for ext in externals:
|
for ext in externals:
|
||||||
if ext.satisfies(spec):
|
if ext.intersects(spec):
|
||||||
usable.append(ext)
|
usable.append(ext)
|
||||||
|
|
||||||
# If nothing is in the usable list now, it's because we aren't
|
# If nothing is in the usable list now, it's because we aren't
|
||||||
@@ -200,7 +200,7 @@ def concretize_version(self, spec):
|
|||||||
|
|
||||||
# List of versions we could consider, in sorted order
|
# List of versions we could consider, in sorted order
|
||||||
pkg_versions = spec.package_class.versions
|
pkg_versions = spec.package_class.versions
|
||||||
usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)]
|
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
|
||||||
|
|
||||||
yaml_prefs = PackagePrefs(spec.name, "version")
|
yaml_prefs = PackagePrefs(spec.name, "version")
|
||||||
|
|
||||||
@@ -344,7 +344,7 @@ def concretize_architecture(self, spec):
|
|||||||
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
||||||
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
||||||
|
|
||||||
if not new_target_arch.satisfies(curr_target_arch):
|
if not new_target_arch.intersects(curr_target_arch):
|
||||||
# new_target is an incorrect guess based on preferences
|
# new_target is an incorrect guess based on preferences
|
||||||
# and/or default
|
# and/or default
|
||||||
valid_target_ranges = str(curr_target).split(",")
|
valid_target_ranges = str(curr_target).split(",")
|
||||||
@@ -743,9 +743,7 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
|||||||
import spack.solver.asp
|
import spack.solver.asp
|
||||||
|
|
||||||
solver = spack.solver.asp.Solver()
|
solver = spack.solver.asp.Solver()
|
||||||
solver.tests = kwargs.get("tests", False)
|
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
|
||||||
|
|
||||||
result = solver.solve(abstract_specs)
|
|
||||||
result.raise_if_unsat()
|
result.raise_if_unsat()
|
||||||
return [s.copy() for s in result.specs]
|
return [s.copy() for s in result.specs]
|
||||||
|
|
||||||
|
|||||||
@@ -793,7 +793,7 @@ def _config():
|
|||||||
configuration_paths = [
|
configuration_paths = [
|
||||||
# Default configuration scope is the lowest-level scope. These are
|
# Default configuration scope is the lowest-level scope. These are
|
||||||
# versioned with Spack and can be overridden by systems, sites or users
|
# versioned with Spack and can be overridden by systems, sites or users
|
||||||
configuration_defaults_path,
|
configuration_defaults_path
|
||||||
]
|
]
|
||||||
|
|
||||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||||
@@ -801,15 +801,11 @@ def _config():
|
|||||||
# System configuration is per machine.
|
# System configuration is per machine.
|
||||||
# This is disabled if user asks for no local configuration.
|
# This is disabled if user asks for no local configuration.
|
||||||
if not disable_local_config:
|
if not disable_local_config:
|
||||||
configuration_paths.append(
|
configuration_paths.append(("system", spack.paths.system_config_path))
|
||||||
("system", spack.paths.system_config_path),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Site configuration is per spack instance, for sites or projects
|
# Site configuration is per spack instance, for sites or projects
|
||||||
# No site-level configs should be checked into spack by default.
|
# No site-level configs should be checked into spack by default.
|
||||||
configuration_paths.append(
|
configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
|
||||||
("site", os.path.join(spack.paths.etc_path)),
|
|
||||||
)
|
|
||||||
|
|
||||||
# User configuration can override both spack defaults and site config
|
# User configuration can override both spack defaults and site config
|
||||||
# This is disabled if user asks for no local configuration.
|
# This is disabled if user asks for no local configuration.
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user