Compare commits
505 Commits
features/c
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
08a9345fcc | ||
![]() |
7d072cc16f | ||
![]() |
d81f457e7a | ||
![]() |
3969653f1b | ||
![]() |
db37672abf | ||
![]() |
1f75ca96df | ||
![]() |
605835fe42 | ||
![]() |
6c2748c37d | ||
![]() |
210d221357 | ||
![]() |
c9ef5c8152 | ||
![]() |
0274091204 | ||
![]() |
3f2f0cc146 | ||
![]() |
a236fce31f | ||
![]() |
f77a38a96b | ||
![]() |
6d55066b94 | ||
![]() |
78132f2d6b | ||
![]() |
fba47e87d7 | ||
![]() |
bf8e8d9f5f | ||
![]() |
ebc0e9be19 | ||
![]() |
b6f08f1d4e | ||
![]() |
d9724597ed | ||
![]() |
c90c946d52 | ||
![]() |
bb66d15d08 | ||
![]() |
1cd2d07f0b | ||
![]() |
43cb49d87a | ||
![]() |
5165524ca6 | ||
![]() |
38dc3a6896 | ||
![]() |
115e448bd3 | ||
![]() |
5c25437c9f | ||
![]() |
0e72ff4a0d | ||
![]() |
90edd18d1f | ||
![]() |
40dbadb868 | ||
![]() |
c16546bd4c | ||
![]() |
143e6a4fbb | ||
![]() |
13816f19fd | ||
![]() |
34ff3b408b | ||
![]() |
aec88ef3e6 | ||
![]() |
9fda22d942 | ||
![]() |
550613ee3a | ||
![]() |
5341834ebe | ||
![]() |
fc027e34d1 | ||
![]() |
aef58776b4 | ||
![]() |
4e17a40d09 | ||
![]() |
d97251a4c6 | ||
![]() |
2a66a67132 | ||
![]() |
10fd69ec74 | ||
![]() |
9889a0cac6 | ||
![]() |
65ffd0dd63 | ||
![]() |
3d2c779b87 | ||
![]() |
b57c2a10d4 | ||
![]() |
849a0a5eeb | ||
![]() |
7e4a6160b9 | ||
![]() |
646e7b4b00 | ||
![]() |
eea86c3981 | ||
![]() |
37c48fc82c | ||
![]() |
47daba3dc1 | ||
![]() |
46062d98fd | ||
![]() |
4e37084ed4 | ||
![]() |
e7c229393d | ||
![]() |
87fd9c3e93 | ||
![]() |
676f2a3175 | ||
![]() |
5fe7f5329b | ||
![]() |
c6a3dd03ab | ||
![]() |
ebdd5e28f2 | ||
![]() |
068abdd105 | ||
![]() |
9ec372a86c | ||
![]() |
5e05f6b7c6 | ||
![]() |
7988d8c67d | ||
![]() |
658a3f2fdb | ||
![]() |
582f0289af | ||
![]() |
95b737d923 | ||
![]() |
d7e9a13f53 | ||
![]() |
25e45f9f07 | ||
![]() |
38cc51ec36 | ||
![]() |
5c10c29923 | ||
![]() |
daf95227bf | ||
![]() |
480a5e0848 | ||
![]() |
783e253f7d | ||
![]() |
716196930a | ||
![]() |
f42402129a | ||
![]() |
419878f035 | ||
![]() |
32927fd1c1 | ||
![]() |
092a5a8d75 | ||
![]() |
f082b19058 | ||
![]() |
eb7e26006f | ||
![]() |
2d3d9640dc | ||
![]() |
f95080393a | ||
![]() |
0b8203940a | ||
![]() |
3bd7859e7f | ||
![]() |
599b612edf | ||
![]() |
6f9126d738 | ||
![]() |
59399ab1f8 | ||
![]() |
b67f619448 | ||
![]() |
2755706115 | ||
![]() |
43ed8a12b7 | ||
![]() |
3f5b4a4907 | ||
![]() |
cd16478aba | ||
![]() |
aa87c747f9 | ||
![]() |
9210b19398 | ||
![]() |
4d156b9e6b | ||
![]() |
56df8b61a2 | ||
![]() |
170867e38a | ||
![]() |
5ca9fd6c82 | ||
![]() |
ef165c80b3 | ||
![]() |
6859694e8e | ||
![]() |
aeaca77630 | ||
![]() |
2a9d1d444b | ||
![]() |
abad16c198 | ||
![]() |
4c0bc39054 | ||
![]() |
501d322264 | ||
![]() |
59fc09e93f | ||
![]() |
8bd54e2f8f | ||
![]() |
bd8d121a23 | ||
![]() |
2bd487988f | ||
![]() |
452e56f467 | ||
![]() |
62e94b0cb7 | ||
![]() |
5d331d4141 | ||
![]() |
bd8d0324a9 | ||
![]() |
d79a3ecc28 | ||
![]() |
dac3b45387 | ||
![]() |
31fe78e378 | ||
![]() |
e16ca49036 | ||
![]() |
a7ee72708a | ||
![]() |
c4a53cf376 | ||
![]() |
e963d02a07 | ||
![]() |
f89451b4b8 | ||
![]() |
9c87506c2c | ||
![]() |
35223543e9 | ||
![]() |
e1b22325ea | ||
![]() |
2389047072 | ||
![]() |
f49c58708b | ||
![]() |
d916073397 | ||
![]() |
bfa514af98 | ||
![]() |
c57e2140c2 | ||
![]() |
9e21d490ea | ||
![]() |
3b4ca0374e | ||
![]() |
7d33c36a30 | ||
![]() |
4f14db19c4 | ||
![]() |
a0dcf9620b | ||
![]() |
9a9c3a984b | ||
![]() |
a3543e2248 | ||
![]() |
4cdbb04b15 | ||
![]() |
2594be9459 | ||
![]() |
8f07983ab6 | ||
![]() |
8e34eaaa75 | ||
![]() |
7e7d373ab3 | ||
![]() |
dbd520f851 | ||
![]() |
4fd7fa5fc1 | ||
![]() |
84d2097a8c | ||
![]() |
842f19c6e3 | ||
![]() |
577ea0a0a8 | ||
![]() |
4d8f9ff3e8 | ||
![]() |
60ce6c7302 | ||
![]() |
d111bde69e | ||
![]() |
3045ed0e43 | ||
![]() |
2de0e30016 | ||
![]() |
15085ef6e5 | ||
![]() |
0c2849da4d | ||
![]() |
75eeab1297 | ||
![]() |
b8e32ff6b3 | ||
![]() |
e7924148af | ||
![]() |
d454cf4711 | ||
![]() |
ba81ef50f5 | ||
![]() |
d5dd4b8b5d | ||
![]() |
0e47548cb6 | ||
![]() |
74974d85f6 | ||
![]() |
7925bb575e | ||
![]() |
976cb02f78 | ||
![]() |
f1bdc74789 | ||
![]() |
920347c21a | ||
![]() |
c00ece6cf2 | ||
![]() |
7b0157c7e7 | ||
![]() |
1b3a2ba06a | ||
![]() |
dc22a80f86 | ||
![]() |
27d6a75692 | ||
![]() |
70456ce4a7 | ||
![]() |
078369ec2b | ||
![]() |
da8e022f6b | ||
![]() |
c8a3f1a8ae | ||
![]() |
be3f7b5da3 | ||
![]() |
68d7ce3bb6 | ||
![]() |
7a490e95b6 | ||
![]() |
088e4c6b64 | ||
![]() |
76816d722a | ||
![]() |
cbabdf283c | ||
![]() |
060bc01273 | ||
![]() |
0280ac51ed | ||
![]() |
bd442fea40 | ||
![]() |
fb9e5fcc4f | ||
![]() |
bc02453f6d | ||
![]() |
74a6c48d96 | ||
![]() |
854f169ded | ||
![]() |
94c2043b28 | ||
![]() |
579df768ca | ||
![]() |
e004db8f77 | ||
![]() |
2d1cca2839 | ||
![]() |
8dae369a69 | ||
![]() |
0d76436780 | ||
![]() |
34402beeb7 | ||
![]() |
6a249944f5 | ||
![]() |
6838ee6bb8 | ||
![]() |
d50f296d4f | ||
![]() |
e5d227e73d | ||
![]() |
af7b4c5a2f | ||
![]() |
75e9742d71 | ||
![]() |
e2f2559a5a | ||
![]() |
cac7f9774a | ||
![]() |
6c4f8e62ae | ||
![]() |
cb03db3d69 | ||
![]() |
372bbb43a8 | ||
![]() |
7b763faa1c | ||
![]() |
4a79857b5e | ||
![]() |
61df3b9080 | ||
![]() |
0b134aa711 | ||
![]() |
ea71477a9d | ||
![]() |
bc26848cee | ||
![]() |
3e50ee70be | ||
![]() |
0d8a20b05e | ||
![]() |
74d63c2fd3 | ||
![]() |
a0622a2ee0 | ||
![]() |
a25a910ba0 | ||
![]() |
0e5ce57fd5 | ||
![]() |
e86c07547d | ||
![]() |
d7b5a27d1d | ||
![]() |
eee8fdc438 | ||
![]() |
5a91802807 | ||
![]() |
7fd56da5b7 | ||
![]() |
eefa5d6cb5 | ||
![]() |
845973273a | ||
![]() |
0696497ffa | ||
![]() |
babd29da50 | ||
![]() |
c4e2d24ca9 | ||
![]() |
2c13361b09 | ||
![]() |
e8740b40da | ||
![]() |
e45fc994aa | ||
![]() |
ed3f5fba1f | ||
![]() |
d0e8a4d26f | ||
![]() |
60a5f70b80 | ||
![]() |
fa9acb6a98 | ||
![]() |
621d42d0c7 | ||
![]() |
cfdaee4725 | ||
![]() |
306ba86709 | ||
![]() |
b47fd61f18 | ||
![]() |
c10ff27600 | ||
![]() |
9056f31f11 | ||
![]() |
513232cdb3 | ||
![]() |
30893dd99a | ||
![]() |
b7c2411b50 | ||
![]() |
002e833993 | ||
![]() |
02c9296db4 | ||
![]() |
562065c427 | ||
![]() |
9d00bcb286 | ||
![]() |
a009a1a62a | ||
![]() |
f7692d5699 | ||
![]() |
12e1768fdb | ||
![]() |
28a3be3eca | ||
![]() |
33500b5169 | ||
![]() |
91011a8c5f | ||
![]() |
4570c9de5b | ||
![]() |
e7507dcd08 | ||
![]() |
4711758593 | ||
![]() |
b55e9e8248 | ||
![]() |
2e62cfea3e | ||
![]() |
286e1147d6 | ||
![]() |
7e3d228d19 | ||
![]() |
a1ab42c8c0 | ||
![]() |
57a2f2ddde | ||
![]() |
b7b7b2fac4 | ||
![]() |
5ac1167250 | ||
![]() |
fcc4185132 | ||
![]() |
943c8091c2 | ||
![]() |
a7a6f2aaef | ||
![]() |
5aaa82fb69 | ||
![]() |
bc33d5f421 | ||
![]() |
0e4e232ad1 | ||
![]() |
145d44cd97 | ||
![]() |
e797a89fe1 | ||
![]() |
254a2bc3ea | ||
![]() |
873652a33e | ||
![]() |
3f686734ab | ||
![]() |
e1373d5408 | ||
![]() |
a2054564d8 | ||
![]() |
ea75dbf7bd | ||
![]() |
63a67e525b | ||
![]() |
95aaaeb5af | ||
![]() |
a0bd53148b | ||
![]() |
3302b176fd | ||
![]() |
4672346d9c | ||
![]() |
4182e97761 | ||
![]() |
5598de88ff | ||
![]() |
d88ecf0af0 | ||
![]() |
516f0461b8 | ||
![]() |
b8cf7c3835 | ||
![]() |
707684a7b7 | ||
![]() |
739aebbd18 | ||
![]() |
71c053c391 | ||
![]() |
9a0a4eceaf | ||
![]() |
19f8e9147d | ||
![]() |
e99750fd3c | ||
![]() |
33cde87775 | ||
![]() |
e63597bf79 | ||
![]() |
2f63342677 | ||
![]() |
04eae7316f | ||
![]() |
67d6c086d8 | ||
![]() |
69370c9c8f | ||
![]() |
9948785220 | ||
![]() |
4047e025e0 | ||
![]() |
783bbdf2db | ||
![]() |
a65f13f79f | ||
![]() |
fc391d5332 | ||
![]() |
e05f12f18e | ||
![]() |
14f248652c | ||
![]() |
87f99de3fb | ||
![]() |
9e7fe04a77 | ||
![]() |
45d149c7d3 | ||
![]() |
8250a085b0 | ||
![]() |
39b9f214a8 | ||
![]() |
7631b5ea14 | ||
![]() |
a4d2f8332f | ||
![]() |
007f02e06a | ||
![]() |
8ec1657136 | ||
![]() |
c5fc794d77 | ||
![]() |
5c409f794a | ||
![]() |
06b30842e4 | ||
![]() |
ebbe63013d | ||
![]() |
3f7f10ca2b | ||
![]() |
6a5a074150 | ||
![]() |
c046c61cff | ||
![]() |
7bd95f6ad3 | ||
![]() |
4429e17db0 | ||
![]() |
65dd6e1196 | ||
![]() |
a43da48d23 | ||
![]() |
f9c06669ca | ||
![]() |
11c6431c9a | ||
![]() |
1e85a1b227 | ||
![]() |
b81aa42179 | ||
![]() |
c59f68a33d | ||
![]() |
743a93902d | ||
![]() |
5bc5139552 | ||
![]() |
3be450c16f | ||
![]() |
c733fe9c34 | ||
![]() |
e2edb45d2c | ||
![]() |
b2a95fb4b7 | ||
![]() |
7bf7a266ba | ||
![]() |
2341074694 | ||
![]() |
1c0dbab821 | ||
![]() |
865c8b606c | ||
![]() |
c98afbc44c | ||
![]() |
57cd822fb7 | ||
![]() |
627c2d3bf6 | ||
![]() |
3b1b261cd8 | ||
![]() |
40c4c81c19 | ||
![]() |
642451e047 | ||
![]() |
6630ddb47d | ||
![]() |
7fd7d0b9fd | ||
![]() |
f7d71ec792 | ||
![]() |
d80bc70481 | ||
![]() |
81cfe39ae3 | ||
![]() |
ed058fd212 | ||
![]() |
1da12490fa | ||
![]() |
8b5b4ade0e | ||
![]() |
12bc4cf093 | ||
![]() |
f8676db7f4 | ||
![]() |
dd747c5c48 | ||
![]() |
cf031e83f0 | ||
![]() |
f709518916 | ||
![]() |
aa9eb33108 | ||
![]() |
818c9aeb5a | ||
![]() |
cfdf19ed6b | ||
![]() |
566754440f | ||
![]() |
f0658243c0 | ||
![]() |
06b6b05dbd | ||
![]() |
189cd59d13 | ||
![]() |
5a43f4ba55 | ||
![]() |
29aa7117f4 | ||
![]() |
d367b4285a | ||
![]() |
260e735425 | ||
![]() |
ca872f9c34 | ||
![]() |
b72a268bc5 | ||
![]() |
818195a3bd | ||
![]() |
679d41ea66 | ||
![]() |
86216cc36e | ||
![]() |
ecb7ad493f | ||
![]() |
fb1e81657c | ||
![]() |
34e4c62e8c | ||
![]() |
acb02326aa | ||
![]() |
c1756257c2 | ||
![]() |
1ee7c735ec | ||
![]() |
22deed708e | ||
![]() |
6693dc5eb8 | ||
![]() |
396f219011 | ||
![]() |
a3ecd7efed | ||
![]() |
ae5511afd6 | ||
![]() |
78fe2c63fa | ||
![]() |
f4f396745e | ||
![]() |
f3c6d892b1 | ||
![]() |
2f5988cec7 | ||
![]() |
44922f734d | ||
![]() |
144e657c58 | ||
![]() |
6f48fe2b6f | ||
![]() |
fcd03adc02 | ||
![]() |
0620b954be | ||
![]() |
6174b829f7 | ||
![]() |
0d4b1c6a73 | ||
![]() |
fb9797bd67 | ||
![]() |
4eee3c12c1 | ||
![]() |
3e5f9a2138 | ||
![]() |
8295a45999 | ||
![]() |
5138c71d34 | ||
![]() |
eef9939c21 | ||
![]() |
ffddaabaa0 | ||
![]() |
f664d1edaa | ||
![]() |
6d5d1562bd | ||
![]() |
70c71e8f93 | ||
![]() |
d9d1eb24f9 | ||
![]() |
cef59ad0bf | ||
![]() |
a1e117a98b | ||
![]() |
cb855d5ffd | ||
![]() |
3866ff0096 | ||
![]() |
6dc167e43d | ||
![]() |
0fd085be8e | ||
![]() |
74fba221f1 | ||
![]() |
deeeb86067 | ||
![]() |
98daf5b7ec | ||
![]() |
8a3d98b632 | ||
![]() |
0cc945b367 | ||
![]() |
e732155e8c | ||
![]() |
c07fb833a9 | ||
![]() |
7d566b481f | ||
![]() |
a72e5e762e | ||
![]() |
0eb22ef770 | ||
![]() |
95f78440f1 | ||
![]() |
74a51aba50 | ||
![]() |
b370ecfbda | ||
![]() |
04d55b7600 | ||
![]() |
d695438851 | ||
![]() |
f0447d63ad | ||
![]() |
e8a7a04f14 | ||
![]() |
23316f0352 | ||
![]() |
b3433cb872 | ||
![]() |
349ba83bc6 | ||
![]() |
ecfd9ef12b | ||
![]() |
4502351659 | ||
![]() |
8a08f09ac0 | ||
![]() |
60ecd0374e | ||
![]() |
52ccee79d8 | ||
![]() |
7f0f1b63d6 | ||
![]() |
b65f1f22ec | ||
![]() |
e9efa1df75 | ||
![]() |
884a5b8b07 | ||
![]() |
91d674f5d0 | ||
![]() |
76fbb8cd8f | ||
![]() |
0f3f2a8024 | ||
![]() |
5a5f774369 | ||
![]() |
f5212ae139 | ||
![]() |
4b618704bf | ||
![]() |
46285d9725 | ||
![]() |
36852fe348 | ||
![]() |
8914d26867 | ||
![]() |
fdea5e7624 | ||
![]() |
ca1e4d54b5 | ||
![]() |
656528bbbb | ||
![]() |
4d42e9d1f3 | ||
![]() |
d058c1d649 | ||
![]() |
43854fc2ec | ||
![]() |
6a2149df6e | ||
![]() |
af38d097ac | ||
![]() |
e67dca73d1 | ||
![]() |
2e6ed1e707 | ||
![]() |
53d2ffaf83 | ||
![]() |
a95e061fed | ||
![]() |
e01b9b38ef | ||
![]() |
eac15badd3 | ||
![]() |
806b8aa966 | ||
![]() |
9e5ca525f7 | ||
![]() |
5ea4322f88 | ||
![]() |
4ca2d8bc19 | ||
![]() |
e0059ef961 | ||
![]() |
7d9fad9576 | ||
![]() |
553277a84f | ||
![]() |
00a3ebd0bb | ||
![]() |
ffc9060e11 | ||
![]() |
31d5f56913 | ||
![]() |
bfdebae831 | ||
![]() |
aa83fa44e1 | ||
![]() |
e56291dd45 | ||
![]() |
2f52545214 | ||
![]() |
5090023e3a | ||
![]() |
d355880110 | ||
![]() |
1a0434b808 | ||
![]() |
c3eec8a36f | ||
![]() |
25b8cf93d2 | ||
![]() |
34ff7605e6 | ||
![]() |
e026fd3613 | ||
![]() |
3f5f4cfe26 | ||
![]() |
74fe9ccef3 | ||
![]() |
fd5a8b2075 | ||
![]() |
33793445cf | ||
![]() |
f4a144c8ac | ||
![]() |
6c439ec022 | ||
![]() |
209409189a | ||
![]() |
ff900566e0 | ||
![]() |
a954a0bb9f | ||
![]() |
c21e00f504 | ||
![]() |
9ae1317e79 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -10,3 +10,8 @@ updates:
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -34,6 +34,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@@ -41,6 +42,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
|
26
.github/workflows/bootstrap.yml
vendored
26
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,11 +179,11 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -86,19 +86,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
7
.github/workflows/style/requirements.txt
vendored
Normal file
7
.github/workflows/style/requirements.txt
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.5.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -187,7 +187,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
|
17
.github/workflows/valid-style.yml
vendored
17
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,16 +35,17 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -68,7 +69,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
9
.github/workflows/windows_python.yml
vendored
9
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -75,6 +75,5 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
|
32
SECURITY.md
32
SECURITY.md
@@ -2,24 +2,26 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
|
@@ -14,7 +14,7 @@
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
set spack="%SPACK_ROOT%"\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
|
@@ -39,12 +39,26 @@ function Read-SpackArgs {
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
@@ -61,7 +75,7 @@ function Invoke-SpackCD {
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
@@ -69,46 +83,46 @@ function Invoke-SpackEnv {
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +130,7 @@ function Invoke-SpackLoad {
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
@@ -128,5 +142,5 @@ switch($SpackSubCommand)
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
@@ -9,9 +9,32 @@
|
||||
Bundle
|
||||
------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
associated software is specified as bundle dependencies.
|
||||
``BundlePackage`` represents a set of packages that are expected to work
|
||||
well together, such as a collection of commonly used software libraries.
|
||||
The associated software is specified as dependencies.
|
||||
|
||||
If it makes sense, variants, conflicts, and requirements can be added to
|
||||
the package. :ref:`Variants <variants>` ensure that common build options
|
||||
are consistent across the packages supporting them. :ref:`Conflicts
|
||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||
bugs or limitations.
|
||||
|
||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||
it could use the ``require`` directive as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
|
@@ -214,6 +214,7 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
113
lib/spack/docs/gpu_configuration.rst
Normal file
113
lib/spack/docs/gpu_configuration.rst
Normal file
@@ -0,0 +1,113 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
@@ -77,6 +77,7 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -363,6 +363,42 @@ one of these::
|
||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||
``nano``, and ``notepad``, in that order.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Bundling software
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
common build options are consistently applied to dependencies. Known build
|
||||
failures, such as not building on a platform or when certain compilers or
|
||||
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
||||
Build requirements, such as only building with specific compilers, can similarly
|
||||
be flagged with :ref:`requires <packaging_conflicts>`.
|
||||
|
||||
The ``spack create --template bundle`` command will create a skeleton
|
||||
``BundlePackage`` ``package.py`` for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create --template bundle --name coolsdk
|
||||
|
||||
Now you can fill in the basic package documentation, version(s), and software
|
||||
package dependencies along with any other relevant customizations.
|
||||
|
||||
.. note::
|
||||
|
||||
Remember that bundle packages have no software of their own so there
|
||||
is nothing to download.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-downloadable software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -610,7 +646,16 @@ add a line like this in the package class:
|
||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||
|
||||
Versions should be listed in descending order, from newest to oldest.
|
||||
.. note::
|
||||
|
||||
By convention, we list versions in descending order, from newest to oldest.
|
||||
|
||||
.. note::
|
||||
|
||||
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
||||
there is nothing to fetch. Consequently, their version directives
|
||||
consist solely of the version name (e.g., ``version("202309")``).
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Date Versions
|
||||
@@ -2678,7 +2723,7 @@ Conflicts and requirements
|
||||
--------------------------
|
||||
|
||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||
to build e.g. against other dependencies or with certain compilers. Spack
|
||||
from building e.g. against other dependencies or with certain compilers. Spack
|
||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||
|
||||
Adding the following to a package:
|
||||
@@ -4773,17 +4818,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4972,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5192,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5271,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5247,14 +5291,14 @@ Saving build-time files
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5306,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5335,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5372,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5393,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5440,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5410,7 +5456,7 @@ Reading expected output from a file
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
@@ -5444,7 +5490,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5457,7 +5503,7 @@ source code, the path would be obtained as shown below.
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -5494,9 +5540,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5585,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -5551,7 +5600,7 @@ Inheriting stand-alone tests
|
||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||
packages that inherit from or provide interface implementations for those
|
||||
packages, respectively.
|
||||
packages, respectively.
|
||||
|
||||
The table below summarizes the stand-alone tests that will be executed along
|
||||
with those implemented in the package itself.
|
||||
@@ -5621,7 +5670,7 @@ for ``openmpi``:
|
||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||
...
|
||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||
Openmpi::test_bin_mpirun .. PASSED
|
||||
Openmpi::test_bin_ompi_info .. PASSED
|
||||
@@ -6071,7 +6120,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
@@ -6147,7 +6196,100 @@ follows:
|
||||
"foo-package@{0}".format(version_str)
|
||||
)
|
||||
|
||||
.. _package-lifecycle:
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Add detection tests to packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To ensure that software is detected correctly for multiple configurations
|
||||
and on different systems users can write a ``detection_test.yaml`` file and
|
||||
put it in the package directory alongside the ``package.py`` file.
|
||||
This YAML file contains enough information for Spack to mock an environment
|
||||
and try to check if the detection logic yields the results that are expected.
|
||||
|
||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
||||
represent search mechanisms and they each map to a list of tests that should confirm
|
||||
the validity of the package's detection logic.
|
||||
|
||||
The detection tests can be run with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack audit externals
|
||||
|
||||
Errors that have been detected are reported to screen.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Tests for PATH inspections
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Detection tests insisting on ``PATH`` inspections are listed under
|
||||
the ``paths`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
``results``. The exact details on how to specify both the ``layout`` and the
|
||||
``results`` are reported in the table below:
|
||||
|
||||
.. list-table:: Test based on PATH inspections
|
||||
:header-rows: 1
|
||||
|
||||
* - Option Name
|
||||
- Description
|
||||
- Allowed Values
|
||||
- Required Field
|
||||
* - ``layout``
|
||||
- Specifies the filesystem tree used for the test
|
||||
- List of objects
|
||||
- Yes
|
||||
* - ``layout:[0]:executables``
|
||||
- Relative paths for the mock executables to be created
|
||||
- List of strings
|
||||
- Yes
|
||||
* - ``layout:[0]:script``
|
||||
- Mock logic for the executable
|
||||
- Any valid shell script
|
||||
- Yes
|
||||
* - ``results``
|
||||
- List of expected results
|
||||
- List of objects (empty if no result is expected)
|
||||
- Yes
|
||||
* - ``results:[0]:spec``
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
||||
``myrepo.llvm`` might look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
includes:
|
||||
- "builtin.llvm"
|
||||
|
||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
||||
those locally defined in the file.
|
||||
|
||||
-----------------------------
|
||||
Style guidelines for packages
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.2
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
urllib3==2.0.5
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.0"
|
||||
__version__ = "0.2.1"
|
||||
|
@@ -79,14 +79,18 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
"""All the ancestors of this microarchitecture."""
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
return value
|
||||
if self._ancestors is None:
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
self._ancestors = value
|
||||
return self._ancestors
|
||||
|
||||
def _to_set(self):
|
||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||
|
@@ -145,6 +145,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -217,6 +224,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -300,6 +314,13 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -1412,6 +1433,92 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"sapphirerapids": {
|
||||
"from": [
|
||||
"icelake"
|
||||
],
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"popcnt",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"avx",
|
||||
"rdrand",
|
||||
"f16c",
|
||||
"movbe",
|
||||
"fma",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"rdseed",
|
||||
"adx",
|
||||
"clflushopt",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"avx512f",
|
||||
"avx512vl",
|
||||
"avx512bw",
|
||||
"avx512dq",
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes",
|
||||
"avx512_bf16",
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"k10": {
|
||||
"from": ["x86_64"],
|
||||
"vendor": "AuthenticAMD",
|
||||
@@ -2065,8 +2172,6 @@
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
@@ -2083,12 +2188,12 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:13.0",
|
||||
"versions": "10.3:12.2",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "13.1:",
|
||||
"versions": "12.3:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
|
105
lib/spack/llnl/path.py
Normal file
105
lib/spack/llnl/path.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Path primitives that just require Python standard library."""
|
||||
import functools
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
class Path:
|
||||
"""Enum to identify the path-style."""
|
||||
|
||||
unix: int = 0
|
||||
windows: int = 1
|
||||
platform_path: int = windows if sys.platform == "win32" else unix
|
||||
|
||||
|
||||
def format_os_path(path: str, mode: int = Path.unix) -> str:
|
||||
"""Formats the input path to use consistent, platform specific separators.
|
||||
|
||||
Absolute paths are converted between drive letters and a prepended '/' as per platform
|
||||
requirement.
|
||||
|
||||
Parameters:
|
||||
path: the path to be normalized, must be a string or expose the replace method.
|
||||
mode: the path file separator style to normalize the passed path to.
|
||||
Default is unix style, i.e. '/'
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
if mode == Path.windows:
|
||||
path = path.replace("/", "\\")
|
||||
else:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def convert_to_posix_path(path: str) -> str:
|
||||
"""Converts the input path to POSIX style."""
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path: str) -> str:
|
||||
"""Converts the input path to Windows style."""
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path: str) -> str:
|
||||
"""Converts the input path to the current platform's native style."""
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
|
||||
|
||||
def path_to_os_path(*parameters: str) -> List[str]:
|
||||
"""Takes an arbitrary number of positional parameters, converts each argument of type
|
||||
string to use a normalized filepath separator, and returns a list of all values.
|
||||
"""
|
||||
|
||||
def _is_url(path_or_url: str) -> bool:
|
||||
if "\\" in path_or_url:
|
||||
return False
|
||||
url_tuple = urlparse(path_or_url)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
result = []
|
||||
for item in parameters:
|
||||
if isinstance(item, str) and not _is_url(item):
|
||||
item = convert_to_platform_path(item)
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
"""Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes usage of filepath separators on a per platform basis.
|
||||
|
||||
Note: `**kwargs`, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice: a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
|
||||
def holder_func(func):
|
||||
@functools.wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return path_filter_caller
|
||||
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
67
lib/spack/llnl/string.py
Normal file
67
lib/spack/llnl/string.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""String manipulation functions that do not have other dependencies than Python
|
||||
standard library
|
||||
"""
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def comma_list(sequence: List[str], article: str = "") -> str:
|
||||
if type(sequence) is not list:
|
||||
sequence = list(sequence)
|
||||
|
||||
if not sequence:
|
||||
return ""
|
||||
if len(sequence) == 1:
|
||||
return sequence[0]
|
||||
|
||||
out = ", ".join(str(s) for s in sequence[:-1])
|
||||
if len(sequence) != 2:
|
||||
out += "," # oxford comma
|
||||
out += " "
|
||||
if article:
|
||||
out += article + " "
|
||||
out += str(sequence[-1])
|
||||
return out
|
||||
|
||||
|
||||
def comma_or(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'or').
|
||||
"""
|
||||
return comma_list(sequence, "or")
|
||||
|
||||
|
||||
def comma_and(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'and').
|
||||
"""
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
||||
|
||||
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
|
||||
"""Pluralize <singular> word by adding an s if n != 1.
|
||||
|
||||
Arguments:
|
||||
n: number of things there are
|
||||
singular: singular form of word
|
||||
plural: optional plural form, for when it's not just singular + 's'
|
||||
show_n: whether to include n in the result string (default True)
|
||||
|
||||
Returns:
|
||||
"1 thing" if n == 1 or "n things" if n != 1
|
||||
"""
|
||||
number = f"{n} " if show_n else ""
|
||||
if n == 1:
|
||||
return f"{number}{singular}"
|
||||
elif plural is not None:
|
||||
return f"{number}{plural}"
|
||||
else:
|
||||
return f"{number}{singular}s"
|
459
lib/spack/llnl/url.py
Normal file
459
lib/spack/llnl/url.py
Normal file
@@ -0,0 +1,459 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
|
||||
+ PREFIX_EXTENSIONS
|
||||
+ EXTENSIONS
|
||||
+ NO_TAR_EXTENSIONS
|
||||
)
|
||||
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
|
||||
|
||||
|
||||
def find_list_urls(url: str) -> Set[str]:
|
||||
r"""Find good list URLs for the supplied URL.
|
||||
|
||||
By default, returns the dirname of the archive path.
|
||||
|
||||
Provides special treatment for the following websites, which have a
|
||||
unique list URL different from the dirname of the download URL:
|
||||
|
||||
========= =======================================================
|
||||
GitHub https://github.com/<repo>/<name>/releases
|
||||
GitLab https://gitlab.\*/<repo>/<name>/tags
|
||||
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
|
||||
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
|
||||
PyPI https://pypi.org/simple/<name>/
|
||||
LuaRocks https://luarocks.org/modules/<repo>/<name>
|
||||
========= =======================================================
|
||||
|
||||
Note: this function is called by `spack versions`, `spack checksum`,
|
||||
and `spack create`, but not by `spack fetch` or `spack install`.
|
||||
|
||||
Parameters:
|
||||
url (str): The download URL for the package
|
||||
|
||||
Returns:
|
||||
set: One or more list URLs for the package
|
||||
"""
|
||||
|
||||
url_types = [
|
||||
# GitHub
|
||||
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
|
||||
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
|
||||
# GitLab API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
|
||||
(
|
||||
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
|
||||
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
|
||||
),
|
||||
# GitLab non-API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
|
||||
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
|
||||
# BitBucket
|
||||
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
|
||||
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
|
||||
# CRAN
|
||||
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
|
||||
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
|
||||
(
|
||||
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
|
||||
lambda m: m.group(1) + "/Archive/" + m.group(2),
|
||||
),
|
||||
# PyPI
|
||||
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
|
||||
(
|
||||
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
|
||||
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
|
||||
),
|
||||
# LuaRocks
|
||||
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
|
||||
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
|
||||
(
|
||||
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
|
||||
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
|
||||
lambda m: "https://luarocks.org/modules/"
|
||||
+ m.group("org")
|
||||
+ "/"
|
||||
+ m.group("name")
|
||||
+ "/",
|
||||
),
|
||||
]
|
||||
|
||||
list_urls = {os.path.dirname(url)}
|
||||
|
||||
for pattern, fun in url_types:
|
||||
match = re.search(pattern, url)
|
||||
if match:
|
||||
list_urls.add(fun(match))
|
||||
|
||||
return list_urls
|
||||
|
||||
|
||||
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
|
||||
"""Strips query and fragment from a url, then returns the base url and the suffix.
|
||||
|
||||
Args:
|
||||
url: URL to be stripped
|
||||
|
||||
Raises:
|
||||
ValueError: when there is any error parsing the URL
|
||||
"""
|
||||
components = urlsplit(url)
|
||||
stripped = components[:3] + (None, None)
|
||||
|
||||
query, frag = components[3:5]
|
||||
suffix = ""
|
||||
if query:
|
||||
suffix += "?" + query
|
||||
if frag:
|
||||
suffix += "#" + frag
|
||||
|
||||
return urlunsplit(stripped), suffix
|
||||
|
||||
|
||||
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
|
||||
|
||||
|
||||
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
|
||||
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
|
||||
returns the input URL and an empty string.
|
||||
"""
|
||||
match = SOURCEFORGE_RE.search(url)
|
||||
if match is not None:
|
||||
return match.groups()
|
||||
return url, ""
|
||||
|
||||
|
||||
def has_extension(path_or_url: str, ext: str) -> bool:
|
||||
"""Returns true if the extension in input is present in path, false otherwise."""
|
||||
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
|
||||
if not ext.startswith(r"\."):
|
||||
ext = rf"\.{ext}$"
|
||||
|
||||
if re.search(ext, prefix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
|
||||
"""Tries to match an allowed archive extension to the input. Returns the first match,
|
||||
or None if no match was found.
|
||||
|
||||
Raises:
|
||||
ValueError: if the input is None
|
||||
"""
|
||||
if path_or_url is None:
|
||||
raise ValueError("Can't call extension() on None")
|
||||
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, t):
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
def remove_extension(path_or_url: str, *, extension: str) -> str:
|
||||
"""Returns the input with the extension removed"""
|
||||
suffix = rf"\.{extension}$"
|
||||
return re.sub(suffix, "", path_or_url)
|
||||
|
||||
|
||||
def check_and_remove_ext(path: str, *, extension: str) -> str:
|
||||
"""Returns the input path with the extension removed, if the extension is present in path.
|
||||
Otherwise, returns the input unchanged.
|
||||
"""
|
||||
if not has_extension(path, extension):
|
||||
return path
|
||||
path, _ = split_url_on_sourceforge_suffix(path)
|
||||
return remove_extension(path, extension=extension)
|
||||
|
||||
|
||||
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
|
||||
"""If a path contains the extension in input, returns the path stripped of the extension.
|
||||
Otherwise, returns the input path.
|
||||
|
||||
If extension is None, attempts to strip any allowed extension from path.
|
||||
"""
|
||||
if extension is None:
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, ext=t):
|
||||
extension = t
|
||||
break
|
||||
else:
|
||||
return path_or_url
|
||||
|
||||
return check_and_remove_ext(path_or_url, extension=extension)
|
||||
|
||||
|
||||
def split_url_extension(url: str) -> Tuple[str, ...]:
|
||||
"""Some URLs have a query string, e.g.:
|
||||
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
|
||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
|
||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
||||
"""
|
||||
# Strip off sourceforge download suffix.
|
||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
||||
prefix, suffix = split_url_on_sourceforge_suffix(url)
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
if ext is not None:
|
||||
prefix = strip_extension(prefix)
|
||||
return prefix, ext, suffix
|
||||
|
||||
try:
|
||||
prefix, suf = strip_query_and_fragment(prefix)
|
||||
except ValueError:
|
||||
# FIXME: tty.debug("Got error parsing path %s" % path)
|
||||
# Ignore URL parse errors here
|
||||
return url, ""
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
prefix = strip_extension(prefix)
|
||||
suffix = suf + suffix
|
||||
if ext is None:
|
||||
ext = ""
|
||||
|
||||
return prefix, ext, suffix
|
||||
|
||||
|
||||
def strip_version_suffixes(path_or_url: str) -> str:
|
||||
"""Some tarballs contain extraneous information after the version:
|
||||
|
||||
* ``bowtie2-2.2.5-source``
|
||||
* ``libevent-2.0.21-stable``
|
||||
* ``cuda_8.0.44_linux.run``
|
||||
|
||||
These strings are not part of the version number and should be ignored.
|
||||
This function strips those suffixes off and returns the remaining string.
|
||||
The goal is that the version is always the last thing in ``path``:
|
||||
|
||||
* ``bowtie2-2.2.5``
|
||||
* ``libevent-2.0.21``
|
||||
* ``cuda_8.0.44``
|
||||
|
||||
Args:
|
||||
path_or_url: The filename or URL for the package
|
||||
|
||||
Returns:
|
||||
The ``path`` with any extraneous suffixes removed
|
||||
"""
|
||||
# NOTE: This could be done with complicated regexes in parse_version_offset
|
||||
# NOTE: The problem is that we would have to add these regexes to the end
|
||||
# NOTE: of every single version regex. Easier to just strip them off
|
||||
# NOTE: permanently
|
||||
|
||||
suffix_regexes = [
|
||||
# Download type
|
||||
r"[Ii]nstall",
|
||||
r"all",
|
||||
r"code",
|
||||
r"[Ss]ources?",
|
||||
r"file",
|
||||
r"full",
|
||||
r"single",
|
||||
r"with[a-zA-Z_-]+",
|
||||
r"rock",
|
||||
r"src(_0)?",
|
||||
r"public",
|
||||
r"bin",
|
||||
r"binary",
|
||||
r"run",
|
||||
r"[Uu]niversal",
|
||||
r"jar",
|
||||
r"complete",
|
||||
r"dynamic",
|
||||
r"oss",
|
||||
r"gem",
|
||||
r"tar",
|
||||
r"sh",
|
||||
# Download version
|
||||
r"release",
|
||||
r"bin",
|
||||
r"stable",
|
||||
r"[Ff]inal",
|
||||
r"rel",
|
||||
r"orig",
|
||||
r"dist",
|
||||
r"\+",
|
||||
# License
|
||||
r"gpl",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)",
|
||||
# Other
|
||||
r"cpp",
|
||||
r"gtk",
|
||||
r"incubating",
|
||||
# OS
|
||||
r"[Ll]inux(_64)?",
|
||||
r"LINUX",
|
||||
r"[Uu]ni?x",
|
||||
r"[Ss]un[Oo][Ss]",
|
||||
r"[Mm]ac[Oo][Ss][Xx]?",
|
||||
r"[Oo][Ss][Xx]",
|
||||
r"[Dd]arwin(64)?",
|
||||
r"[Aa]pple",
|
||||
r"[Ww]indows",
|
||||
r"[Ww]in(64|32)?",
|
||||
r"[Cc]ygwin(64|32)?",
|
||||
r"[Mm]ingw",
|
||||
r"centos",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
# Remove the suffix from the end of the path
|
||||
# This may be done multiple times
|
||||
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
|
||||
|
||||
return path_or_url
|
||||
|
||||
|
||||
def expand_contracted_extension(extension: str) -> str:
|
||||
"""Returns the expanded version of a known contracted extension.
|
||||
|
||||
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
|
||||
return the input unmodified.
|
||||
"""
|
||||
extension = extension.strip(".")
|
||||
return CONTRACTION_MAP.get(extension, extension)
|
||||
|
||||
|
||||
def expand_contracted_extension_in_path(
|
||||
path_or_url: str, *, extension: Optional[str] = None
|
||||
) -> str:
|
||||
"""Returns the input path or URL with any contraction extension expanded.
|
||||
|
||||
Args:
|
||||
path_or_url: path or URL to be expanded
|
||||
extension: if specified, only attempt to expand that extension
|
||||
"""
|
||||
extension = extension or extension_from_path(path_or_url)
|
||||
if extension is None:
|
||||
return path_or_url
|
||||
|
||||
expanded = expand_contracted_extension(extension)
|
||||
if expanded != extension:
|
||||
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
|
||||
return path_or_url
|
||||
|
||||
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
||||
|
||||
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
|
||||
"""Strips the compression extension from the input, and returns it. For instance,
|
||||
"foo.tgz" becomes "foo.tar".
|
||||
|
||||
If no extension is given, try a default list of extensions.
|
||||
|
||||
Args:
|
||||
path_or_url: input to be stripped
|
||||
ext: if given, extension to be stripped
|
||||
"""
|
||||
if not extension_from_path(path_or_url):
|
||||
return path_or_url
|
||||
|
||||
expanded_path = expand_contracted_extension_in_path(path_or_url)
|
||||
candidates = [ext] if ext is not None else EXTENSIONS
|
||||
for current_extension in candidates:
|
||||
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
|
||||
if modified_path != expanded_path:
|
||||
return modified_path
|
||||
return expanded_path
|
||||
|
||||
|
||||
def allowed_archive(path_or_url: str) -> bool:
|
||||
"""Returns true if the input is a valid archive, False otherwise."""
|
||||
return (
|
||||
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
)
|
||||
|
||||
|
||||
def determine_url_file_extension(path: str) -> str:
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
|
||||
Where the URL doesn't actually contain the filename. We need
|
||||
to know what type it is so that we can appropriately name files
|
||||
in mirrors.
|
||||
"""
|
||||
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
|
||||
if match:
|
||||
if match.group(1) == "zip":
|
||||
return "zip"
|
||||
elif match.group(1) == "tar":
|
||||
return "tar.gz"
|
||||
|
||||
prefix, ext, suffix = split_url_extension(path)
|
||||
return ext
|
@@ -11,6 +11,7 @@
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pathlib
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
@@ -18,14 +19,17 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
@@ -101,7 +105,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
@@ -169,7 +173,7 @@ def rename(src, dst):
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -333,8 +337,7 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -566,7 +569,7 @@ def set_install_permissions(path):
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
# this function will be invoked on the target. If the file is outside a
|
||||
# Spack-maintained prefix, the permissions should not be modified.
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
return
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0o755)
|
||||
@@ -635,7 +638,7 @@ def chmod_x(entry, perms):
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link."""
|
||||
if os.path.islink(dest):
|
||||
if islink(dest):
|
||||
return
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
@@ -721,26 +724,12 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -763,6 +752,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -770,6 +761,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -783,6 +776,11 @@ def copy_tree(
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
# symlinks at the end.
|
||||
links = []
|
||||
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
@@ -805,7 +803,7 @@ def copy_tree(
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True,
|
||||
):
|
||||
if os.path.islink(s):
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
@@ -819,7 +817,9 @@ def escaped_path(path):
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
symlink(target, d)
|
||||
links.append((target, d, s))
|
||||
continue
|
||||
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -834,9 +834,17 @@ def escaped_path(path):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -847,12 +855,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1256,7 +1273,12 @@ def traverse_tree(
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||
paths too.
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1283,11 +1305,24 @@ def traverse_tree(
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||
# but only do this if the ignore is defined.
|
||||
if ignore is not None:
|
||||
if islink(source_child) and not follow_links:
|
||||
target = readlink(source_child)
|
||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||
if any(map(ignore, all_parents)):
|
||||
tty.warn(
|
||||
f"Skipping {source_path} because the source or a part of the source's "
|
||||
f"path is included in the ignores."
|
||||
)
|
||||
continue
|
||||
|
||||
# Treat as a directory
|
||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1313,7 +1348,11 @@ def traverse_tree(
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
@@ -1528,7 +1567,7 @@ def remove_if_dead_link(path):
|
||||
Parameters:
|
||||
path (str): The potential dead link
|
||||
"""
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
if islink(path) and not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@@ -1587,7 +1626,7 @@ def remove_linked_tree(path):
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
@@ -2388,7 +2427,7 @@ def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2401,9 +2440,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(os.path.dirname)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
else:
|
||||
self._additional_library_dependents.add(pth)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2416,7 +2455,7 @@ def rpaths(self):
|
||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||
for extra_path in self._addl_rpaths:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||
return set(dependent_libs)
|
||||
return set([pathlib.Path(x) for x in dependent_libs])
|
||||
|
||||
def add_rpath(self, *paths):
|
||||
"""
|
||||
@@ -2432,7 +2471,7 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path, dest_dir):
|
||||
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
@@ -2440,27 +2479,35 @@ def _link(self, path, dest_dir):
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
|
||||
def report_already_linked():
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(str(dest_file))
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (str(path), str(dest_file))
|
||||
+ "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (str(file_name), str(dest_dir))
|
||||
)
|
||||
|
||||
file_name = path.name
|
||||
dest_file = dest_dir / file_name
|
||||
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
|
||||
try:
|
||||
symlink(path, dest_file)
|
||||
symlink(str(path), str(dest_file))
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
# Catch OSErrors missed by the SymlinkError checks
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest_dir)
|
||||
)
|
||||
pass
|
||||
report_already_linked()
|
||||
else:
|
||||
raise e
|
||||
# catch errors we raise ourselves from Spack
|
||||
except llnl.util.symlink.AlreadyExistsError:
|
||||
report_already_linked()
|
||||
|
||||
def establish_link(self):
|
||||
"""
|
||||
@@ -2693,7 +2740,7 @@ def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
if os.path.isfile(entry) or islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.util.string
|
||||
from ..string import plural
|
||||
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
@@ -169,7 +169,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
attempts = plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
|
@@ -2,77 +2,189 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
|
||||
from llnl.util import lang
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
Create a link.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
Windows-only methods will return false for other operating systems.
|
||||
|
||||
For Windows: spack considers symlinks, hard links, and junctions to
|
||||
all be links, so if any of those are True, return True.
|
||||
|
||||
Args:
|
||||
path (str): path to check if it is a link.
|
||||
|
||||
Returns:
|
||||
bool - whether the path is any kind link or not.
|
||||
"""
|
||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
def _windows_is_hardlink(path: str) -> bool:
|
||||
"""Determines if a path is a windows hard link. This is accomplished
|
||||
by looking at the number of links using os.stat. A non-hard-linked file
|
||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||
larger than 1. Note that both the original and hard-linked file will
|
||||
return True because they share the same inode.
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
Args:
|
||||
path (str): Windows path to check for a hard link
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
Returns:
|
||||
bool - Whether the path is a hard link or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||
return False
|
||||
|
||||
CreateHardLink(link, path)
|
||||
return os.stat(path).st_nlink > 1
|
||||
|
||||
|
||||
def _windows_is_junction(path: str) -> bool:
|
||||
"""Determines if a path is a windows junction. A junction can be
|
||||
determined using a bitwise AND operation between the file's
|
||||
attribute bitmask and the known junction bitmask (0x400).
|
||||
|
||||
Args:
|
||||
path (str): A non-file path
|
||||
|
||||
Returns:
|
||||
bool - whether the path is a junction or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||
return False
|
||||
|
||||
import ctypes.wintypes
|
||||
|
||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
invalid_file_attributes = 0xFFFFFFFF
|
||||
reparse_point = 0x400
|
||||
file_attr = get_file_attributes(str(path))
|
||||
|
||||
if file_attr == invalid_file_attributes:
|
||||
return False
|
||||
|
||||
return file_attr & reparse_point > 0
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
def _windows_can_symlink() -> bool:
|
||||
"""
|
||||
Determines if windows is able to make a symlink depending on
|
||||
the system configuration and the level of the user's permissions.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||
return False
|
||||
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
dpath = os.path.join(tempdir, "dpath")
|
||||
fpath = os.path.join(tempdir, "fpath.txt")
|
||||
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
dlink = os.path.join(tempdir, "dlink")
|
||||
flink = os.path.join(tempdir, "flink.txt")
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -96,24 +208,140 @@ def _win32_can_symlink():
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
def _windows_create_link(source: str, link: str):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
Attempts to create a Hard Link or Junction as an alternative
|
||||
to a symbolic link. This is called when symbolic links cannot
|
||||
be created.
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||
elif os.path.isdir(source):
|
||||
_windows_create_junction(source=source, link=link)
|
||||
elif os.path.isfile(source):
|
||||
_windows_create_hard_link(path=source, link=link)
|
||||
else:
|
||||
raise SymlinkError(
|
||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
def _windows_create_junction(source: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a junction,
|
||||
then create the junction.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
import subprocess
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
return False
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a hard
|
||||
link, then create the hard link.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
"""Find all of the files that point to the same inode as the link"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||
link = os.path.abspath(link)
|
||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||
|
||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||
links.remove(link)
|
||||
if len(links) == 1:
|
||||
return links.pop()
|
||||
elif len(links) > 1:
|
||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||
else:
|
||||
raise SymlinkError("Cannot determine hard link source path.")
|
||||
|
||||
|
||||
def _windows_read_junction(link: str):
|
||||
"""Find the path that a junction points to."""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||
|
||||
link = os.path.abspath(link)
|
||||
link_basename = os.path.basename(link)
|
||||
link_parent = os.path.dirname(link)
|
||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||
if matches:
|
||||
return matches.group(1)
|
||||
else:
|
||||
raise SymlinkError("Could not find junction path.")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
|
||||
class AlreadyExistsError(SymlinkError):
|
||||
"""Link path already exists."""
|
||||
|
@@ -38,10 +38,13 @@ def _search_duplicate_compilers(error_cls):
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import itertools
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -798,3 +801,76 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
external_detection = AuditClass(
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
def packages_with_detection_tests():
|
||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
|
||||
to_be_tested = []
|
||||
for current_repo in spack.repo.PATH.repos:
|
||||
namespace = current_repo.namespace
|
||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
||||
pkgs_with_tests = [
|
||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
||||
]
|
||||
to_be_tested.extend(pkgs_with_tests)
|
||||
|
||||
return to_be_tested
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
errors = []
|
||||
|
||||
# Filter the packages and retain only the ones with detection tests
|
||||
pkgs_with_tests = packages_with_detection_tests()
|
||||
selected_pkgs = []
|
||||
for current_package in pkgs_with_tests:
|
||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
||||
# Check for both unqualified name and qualified name
|
||||
if unqualified_name in pkgs or current_package in pkgs:
|
||||
selected_pkgs.append(current_package)
|
||||
selected_pkgs.sort()
|
||||
|
||||
if not selected_pkgs:
|
||||
summary = "No detection test to run"
|
||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
||||
warnings.warn("\n".join([summary] + details))
|
||||
return errors
|
||||
|
||||
for pkg_name in selected_pkgs:
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
not_detected = set(expected_specs) - set(specs)
|
||||
if not_detected:
|
||||
summary = pkg_name + ": cannot detect some specs"
|
||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
not_expected = set(specs) - set(expected_specs)
|
||||
if not_expected:
|
||||
summary = pkg_name + ": detected unexpected specs"
|
||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -35,6 +34,7 @@
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
@@ -49,6 +49,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
@@ -646,8 +647,7 @@ class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||
directories."""
|
||||
|
||||
def __init__(self):
|
||||
# Save unique identifiers of files to avoid
|
||||
# relocating hardlink files for each path.
|
||||
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
||||
self.visited = set()
|
||||
|
||||
# Lists of files we will check
|
||||
@@ -656,6 +656,8 @@ def __init__(self):
|
||||
|
||||
def seen_before(self, root, rel_path):
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
if stat_result.st_nlink == 1:
|
||||
return False
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in self.visited:
|
||||
return True
|
||||
@@ -876,32 +878,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -1431,7 +1419,7 @@ def try_fetch(url_to_fetch):
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
@@ -1594,9 +1582,10 @@ def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
for rel_path in buildinfo[key]:
|
||||
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
if stat_result.st_nlink > 1:
|
||||
if identifier in visited:
|
||||
continue
|
||||
visited.add(identifier)
|
||||
new_list.append(rel_path)
|
||||
buildinfo[key] = new_list
|
||||
|
||||
@@ -1813,10 +1802,11 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
timer.start("extract")
|
||||
if os.path.exists(spec.prefix):
|
||||
if force:
|
||||
shutil.rmtree(spec.prefix)
|
||||
@@ -1896,7 +1886,9 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
timer.stop("extract")
|
||||
|
||||
timer.start("relocate")
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
@@ -1917,6 +1909,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
timer.stop("relocate")
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
@@ -2154,7 +2147,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
if not os.path.exists(stage.save_filename):
|
||||
try:
|
||||
stage.fetch()
|
||||
except web_util.FetchError:
|
||||
except spack.error.FetchError:
|
||||
continue
|
||||
|
||||
tty.debug("Found key {0}".format(fingerprint))
|
||||
@@ -2306,7 +2299,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
try:
|
||||
stage.fetch()
|
||||
break
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
tty.debug(e)
|
||||
else:
|
||||
if fail_if_missing:
|
||||
|
@@ -476,16 +476,16 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
"cmake",
|
||||
"bison",
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
"gawk",
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
"git",
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
search_list.append("winbison")
|
||||
externals = spack.detection.by_path(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
|
@@ -15,9 +15,9 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
@@ -137,7 +137,7 @@ def _install_with_depfile(self) -> None:
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
@@ -43,6 +43,7 @@
|
||||
from typing import List, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
@@ -68,7 +69,7 @@
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
EnvironmentModifications,
|
||||
@@ -82,7 +83,6 @@
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
from spack.util.string import plural
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -537,39 +537,6 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
parallel=False, command_line=None, config_default=None, max_cpus=None
|
||||
):
|
||||
"""
|
||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||
number of jobs set on the command line. If not set, then we use the config
|
||||
defaults (which is usually set through the builtin config scope), but we
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel (bool or None): true when package supports parallel builds
|
||||
command_line (int or None): command line override
|
||||
config_default (int or None): config default number of jobs
|
||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
|
||||
if command_line is None and "command_line" in spack.config.scopes():
|
||||
command_line = spack.config.get("config:build_jobs", scope="command_line")
|
||||
|
||||
if command_line is not None:
|
||||
return command_line
|
||||
|
||||
max_cpus = max_cpus or cpus_available()
|
||||
|
||||
# in some rare cases _builtin config may not be set, so default to max 16
|
||||
config_default = config_default or spack.config.get("config:build_jobs", 16)
|
||||
|
||||
return min(max_cpus, config_default)
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
|
@@ -274,7 +274,6 @@ def std_args(pkg, generator=None):
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -451,7 +450,6 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
"""
|
||||
|
@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
|
@@ -95,7 +95,7 @@ def makefile_root(self):
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def nmakefile_name(self):
|
||||
def makefile_name(self):
|
||||
"""Name of the current makefile. This is currently an empty value.
|
||||
If a project defines this value, it will be used with the /f argument
|
||||
to provide nmake an explicit makefile. This is usefule in scenarios where
|
||||
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
opts += self.nmake_install_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
|
@@ -61,6 +61,11 @@ def component_prefix(self):
|
||||
"""Path to component <prefix>/<component>/<version>."""
|
||||
return self.prefix.join(join_path(self.component_dir, self.spec.version))
|
||||
|
||||
@property
|
||||
def env_script_args(self):
|
||||
"""Additional arguments to pass to vars.sh script."""
|
||||
return ()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
self.install_component(basename(self.url_for_version(spec.version)))
|
||||
|
||||
@@ -124,7 +129,7 @@ def setup_run_environment(self, env):
|
||||
if "~envmods" not in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
join_path(self.component_prefix, "env", "vars.sh"), *self.env_script_args
|
||||
)
|
||||
)
|
||||
|
||||
|
@@ -16,6 +16,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.detection
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
@@ -226,7 +227,48 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -273,54 +315,16 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
it depends on.
|
||||
|
||||
First search: an "installed" external that shares a prefix with this package
|
||||
Second search: a configured external that shares a prefix with this package
|
||||
Third search: search this prefix for a python package
|
||||
|
||||
Returns:
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
|
||||
python_external_config = spack.config.get("packages:python:externals", [])
|
||||
python_externals_configured = [
|
||||
spack.spec.parse_with_version_concrete(item["spec"])
|
||||
for item in python_external_config
|
||||
if item["prefix"] == self.spec.external_path
|
||||
]
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
||||
raise StopIteration("No external python could be detected for %s to depend on" % self.spec)
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Headers may be in either location
|
||||
include = self.prefix.join(self.spec["python"].package.include)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||
|
||||
if headers:
|
||||
@@ -334,13 +338,14 @@ def libs(self):
|
||||
"""Discover libraries in platlib."""
|
||||
|
||||
# Remove py- prefix in package name
|
||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||
name = self.spec.name[3:]
|
||||
|
||||
for shared in [True, False]:
|
||||
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||
if libs:
|
||||
return libs
|
||||
root = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
|
||||
libs = fs.find_all_libraries(root, recursive=True)
|
||||
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
msg = "Unable to recursively locate {} libraries in {}"
|
||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||
|
@@ -10,9 +10,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -92,7 +93,7 @@ def install(self, pkg, spec, prefix):
|
||||
"--copy",
|
||||
"-i",
|
||||
"-j",
|
||||
str(determine_number_of_jobs(parallel)),
|
||||
str(determine_number_of_jobs(parallel=parallel)),
|
||||
"--",
|
||||
os.getcwd(),
|
||||
]
|
||||
|
@@ -308,7 +308,7 @@ def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||
continue
|
||||
@@ -316,7 +316,7 @@ def append_dep(s, d):
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg("Will not stage external dep: {0}".format(d))
|
||||
@@ -1029,13 +1029,18 @@ def main_script_replacements(cmd):
|
||||
job_vars = job_object.setdefault("variables", {})
|
||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||
|
||||
job_object["needs"] = []
|
||||
if spec_label in dependencies:
|
||||
if enable_artifacts_buildcache:
|
||||
# Get dependencies transitively, so they're all
|
||||
# available in the artifacts buildcache.
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
|
||||
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||
else:
|
||||
# In this case, "needs" is only used for scheduling
|
||||
# purposes, so we only get the direct dependencies.
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from textwrap import dedent
|
||||
from typing import List, Match, Tuple
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import join_path
|
||||
from llnl.util.lang import attr_setdefault, index_by
|
||||
@@ -29,7 +30,6 @@
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.string
|
||||
|
||||
# cmd has a submodule called "list" so preserve the python list module
|
||||
python_list = list
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
@@ -516,7 +516,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify
|
||||
import llnl.util.tty.color as cl
|
||||
|
||||
import spack.audit
|
||||
@@ -20,6 +21,15 @@ def setup_parser(subparser):
|
||||
# Audit configuration files
|
||||
sp.add_parser("configs", help="audit configuration files")
|
||||
|
||||
# Audit package recipes
|
||||
external_parser = sp.add_parser("externals", help="check external detection in packages")
|
||||
external_parser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
dest="list_externals",
|
||||
help="if passed, list which packages have detection tests",
|
||||
)
|
||||
|
||||
# Https and other linting
|
||||
https_parser = sp.add_parser("packages-https", help="check https in packages")
|
||||
https_parser.add_argument(
|
||||
@@ -29,7 +39,7 @@ def setup_parser(subparser):
|
||||
# Audit package recipes
|
||||
pkg_parser = sp.add_parser("packages", help="audit package recipes")
|
||||
|
||||
for group in [pkg_parser, https_parser]:
|
||||
for group in [pkg_parser, https_parser, external_parser]:
|
||||
group.add_argument(
|
||||
"name",
|
||||
metavar="PKG",
|
||||
@@ -62,6 +72,18 @@ def packages_https(parser, args):
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def externals(parser, args):
|
||||
if args.list_externals:
|
||||
msg = "@*{The following packages have detection tests:}"
|
||||
tty.msg(cl.colorize(msg))
|
||||
llnl.util.tty.colify.colify(spack.audit.packages_with_detection_tests(), indent=2)
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
def list(parser, args):
|
||||
for subcommand, check_tags in spack.audit.GROUPS.items():
|
||||
print(cl.colorize("@*b{" + subcommand + "}:"))
|
||||
@@ -78,6 +100,7 @@ def list(parser, args):
|
||||
def audit(parser, args):
|
||||
subcommands = {
|
||||
"configs": configs,
|
||||
"externals": externals,
|
||||
"packages": packages,
|
||||
"packages-https": packages_https,
|
||||
"list": list,
|
||||
|
@@ -13,6 +13,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -20,6 +21,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -31,7 +33,6 @@
|
||||
from spack.cmd import display_specs
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
section = "packaging"
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -296,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -326,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -340,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -503,7 +527,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
temp_stage.create()
|
||||
temp_stage.fetch()
|
||||
web_util.push_to_url(local_path, dest_url, keep_original=True)
|
||||
except web_util.FetchError as e:
|
||||
except spack.error.FetchError as e:
|
||||
# Expected, since we have to try all the possible extensions
|
||||
tty.debug("no such file: {0}".format(src_url))
|
||||
tty.debug(e)
|
||||
|
@@ -66,7 +66,7 @@ def setup_parser(subparser):
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
)
|
||||
@@ -96,7 +96,7 @@ def checksum(parser, args):
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
@@ -119,13 +119,13 @@ def checksum(parser, args):
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
remote_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
@@ -239,7 +239,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -253,6 +254,8 @@ def ci_rebuild(args):
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
rebuild_timer = timer.Timer()
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -736,6 +739,14 @@ def ci_rebuild(args):
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
extra_attributes = {"name": ".ci-rebuild"}
|
||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
|
||||
# Tie job success/failure to the success/failure of building the spec
|
||||
return install_exit_code
|
||||
|
||||
|
@@ -812,6 +812,9 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
|
||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.dependency as dep
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.mirror
|
||||
import spack.modules
|
||||
@@ -114,16 +114,13 @@ def __call__(self, parser, namespace, jobs, option_string):
|
||||
|
||||
|
||||
class DeptypeAction(argparse.Action):
|
||||
"""Creates a tuple of valid dependency types from a deptype argument."""
|
||||
"""Creates a flag of valid dependency types from a deptype argument."""
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
deptype = dep.all_deptypes
|
||||
if values:
|
||||
deptype = tuple(x.strip() for x in values.split(","))
|
||||
if deptype == ("all",):
|
||||
deptype = "all"
|
||||
deptype = dep.canonical_deptype(deptype)
|
||||
|
||||
if not values or values == "all":
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.canonicalize(values.split(","))
|
||||
setattr(namespace, self.dest, deptype)
|
||||
|
||||
|
||||
@@ -285,9 +282,8 @@ def deptype():
|
||||
return Args(
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
default=dt.ALL,
|
||||
help="comma-separated list of deptypes to traverse (default=%s)" % ",".join(dt.ALL_TYPES),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
@@ -46,9 +47,9 @@ def __init__(self, context="build"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = ("build", "link", "run")
|
||||
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
|
||||
else:
|
||||
self.direct_deps = ("build", "test", "link", "run")
|
||||
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
@@ -71,8 +72,8 @@ def accept(self, item):
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
|
@@ -185,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
@@ -822,7 +822,7 @@ def get_versions(args, name):
|
||||
if args.url is not None and args.template != "bundle" and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.util.web.find_versions_of_archive(args.url)
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
except UndetectableVersionError:
|
||||
# Use fake versions
|
||||
tty.warn("Couldn't detect version in: {0}".format(args.url))
|
||||
|
@@ -74,7 +74,7 @@ def dependencies(parser, args):
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
deptype=args.deptype,
|
||||
depflag=args.deptype,
|
||||
)
|
||||
|
||||
if spec.name in dependencies:
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -28,7 +29,6 @@
|
||||
import spack.schema.env
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.string as string
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
description = "manage virtual environments"
|
||||
@@ -239,6 +239,13 @@ def env_deactivate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to activate the environment",
|
||||
)
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
@@ -5,7 +5,9 @@
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -54,7 +56,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
@@ -120,46 +122,23 @@ def external_find(args):
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
# Outside the Cray manifest, the search is done by tag for performance reasons,
|
||||
# since tags are cached.
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
args.tags = []
|
||||
if args.all or args.packages:
|
||||
# Each detectable package has at least the detectable tag
|
||||
args.tags = ["detectable"]
|
||||
elif not args.tags:
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
|
||||
|
||||
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
@@ -173,6 +152,28 @@ def external_find(args):
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
def packages_to_search_for(
|
||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag, full=True))
|
||||
|
||||
if names:
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in names]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if select_re.search(x)]
|
||||
|
||||
if exclude:
|
||||
# Match both fully qualified and unqualified
|
||||
parts = [rf"(^{x}$|[.]{x}$)" for x in exclude]
|
||||
select_re = re.compile("|".join(parts))
|
||||
result = [x for x in result if not select_re.search(x)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
|
@@ -74,19 +74,19 @@ def graph(parser, args):
|
||||
|
||||
if args.static:
|
||||
args.dot = True
|
||||
static_graph_dot(specs, deptype=args.deptype)
|
||||
static_graph_dot(specs, depflag=args.deptype)
|
||||
return
|
||||
|
||||
if args.dot:
|
||||
builder = SimpleDAG()
|
||||
if args.color:
|
||||
builder = DAGWithDependencyTypes()
|
||||
graph_dot(specs, builder=builder, deptype=args.deptype)
|
||||
graph_dot(specs, builder=builder, depflag=args.deptype)
|
||||
return
|
||||
|
||||
# ascii is default: user doesn't need to provide it explicitly
|
||||
debug = spack.config.get("config:debug")
|
||||
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
|
||||
graph_ascii(specs[0], debug=debug, depflag=args.deptype)
|
||||
for spec in specs[1:]:
|
||||
print() # extra line bt/w independent graphs
|
||||
graph_ascii(spec, debug=debug)
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.deptypes as dt
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.install_test
|
||||
import spack.repo
|
||||
@@ -64,11 +65,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
@@ -160,7 +161,7 @@ def print_dependencies(pkg):
|
||||
for deptype in ("build", "link", "run"):
|
||||
color.cprint("")
|
||||
color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
|
||||
deps = sorted(pkg.dependencies_of_type(deptype))
|
||||
deps = sorted(pkg.dependencies_of_type(dt.flag_from_string(deptype)))
|
||||
if deps:
|
||||
colify(deps, indent=4)
|
||||
else:
|
||||
|
@@ -16,7 +16,7 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
from spack.version import VersionList
|
||||
|
||||
@@ -149,8 +149,8 @@ def rows_for_ncols(elts, ncols):
|
||||
|
||||
def get_dependencies(pkg):
|
||||
all_deps = {}
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
all_deps[deptype] = [d for d in deps]
|
||||
|
||||
return all_deps
|
||||
@@ -275,8 +275,8 @@ def head(n, span_id, title, anchor=None):
|
||||
out.write("\n")
|
||||
out.write("</dd>\n")
|
||||
|
||||
for deptype in spack.dependency.all_deptypes:
|
||||
deps = pkg_cls.dependencies_of_type(deptype)
|
||||
for deptype in dt.ALL_TYPES:
|
||||
deps = pkg_cls.dependencies_of_type(dt.flag_from_string(deptype))
|
||||
if deps:
|
||||
out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
|
||||
out.write("<dd>\n")
|
||||
|
@@ -52,6 +52,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
@@ -6,10 +6,11 @@
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
from spack.spec import Spec
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
description = "generate Windows installer"
|
||||
section = "admin"
|
||||
|
@@ -443,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
|
@@ -137,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
@@ -176,17 +176,29 @@ def solve(parser, args):
|
||||
output = sys.stdout if "asp" in show else None
|
||||
setup_only = set(show) == {"asp"}
|
||||
unify = spack.config.get("concretizer:unify")
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
if unify != "when_possible":
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(
|
||||
solver.solve_in_rounds(specs, out=output, timers=args.timers, stats=args.stats)
|
||||
solver.solve_in_rounds(
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
allow_deprecated=allow_deprecated,
|
||||
)
|
||||
):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
|
@@ -77,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
import io
|
||||
import sys
|
||||
|
||||
import llnl.string
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
@@ -24,7 +25,7 @@ def report_tags(category, tags):
|
||||
if isatty:
|
||||
num = len(tags)
|
||||
fmt = "{0} package tag".format(category)
|
||||
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
|
||||
buffer.write("{0}:\n".format(llnl.string.plural(num, fmt)))
|
||||
|
||||
if tags:
|
||||
colify.colify(tags, output=buffer, tty=isatty, indent=4)
|
||||
|
@@ -51,6 +51,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
|
@@ -12,6 +12,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto as crypto
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
@@ -26,7 +27,6 @@
|
||||
substitution_offsets,
|
||||
)
|
||||
from spack.util.naming import simplify_name
|
||||
from spack.util.web import find_versions_of_archive
|
||||
|
||||
description = "debugging tool for url parsing"
|
||||
section = "developer"
|
||||
@@ -139,7 +139,7 @@ def url_parse(args):
|
||||
if args.spider:
|
||||
print()
|
||||
tty.msg("Spidering for versions:")
|
||||
versions = find_versions_of_archive(url)
|
||||
versions = spack.url.find_versions_of_archive(url)
|
||||
|
||||
if not versions:
|
||||
print(" Found no versions for {0}".format(name))
|
||||
|
@@ -37,10 +37,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
help="only list remote versions newer than the latest checksummed version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
arguments.add_common_arguments(subparser, ["package", "jobs"])
|
||||
|
||||
|
||||
def versions(parser, args):
|
||||
@@ -68,7 +65,7 @@ def versions(parser, args):
|
||||
if args.safe:
|
||||
return
|
||||
|
||||
fetched_versions = pkg.fetch_remote_versions(args.concurrency)
|
||||
fetched_versions = pkg.fetch_remote_versions(args.jobs)
|
||||
|
||||
if args.new:
|
||||
if sys.stdout.isatty():
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
@@ -24,7 +25,6 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -39,10 +39,17 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
version_arg (str): the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_arg:
|
||||
output = compiler(version_arg, output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(version_arg, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(output=str, error=str, ignore_errors=ignore_errors)
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
@@ -153,7 +160,7 @@ def _parse_link_paths(string):
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@llnl.path.system_path_filter
|
||||
def _parse_non_system_link_dirs(string: str) -> List[str]:
|
||||
"""Parses link paths out of compiler debug output.
|
||||
|
||||
@@ -229,6 +236,9 @@ class Compiler:
|
||||
# by any compiler
|
||||
_all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
|
||||
|
||||
#: Platform matcher for Platform objects supported by compiler
|
||||
is_supported_on_platform = lambda x: True
|
||||
|
||||
# Default flags used by a compiler to set an rpath
|
||||
@property
|
||||
def cc_rpath_arg(self):
|
||||
@@ -594,8 +604,6 @@ def search_regexps(cls, language):
|
||||
compiler_names = getattr(cls, "{0}_names".format(language))
|
||||
prefixes = [""] + cls.prefixes
|
||||
suffixes = [""]
|
||||
# Windows compilers generally have an extension of some sort
|
||||
# as do most files on Windows, handle that case here
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
cls_suf = [suf + ext for suf in cls.suffixes]
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
from typing import Dict
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -298,7 +298,7 @@ def select_new_compilers(compilers, scope=None):
|
||||
return compilers_not_in_config
|
||||
|
||||
|
||||
def supported_compilers():
|
||||
def supported_compilers() -> List[str]:
|
||||
"""Return a set of names of compilers supported by Spack.
|
||||
|
||||
See available_compilers() to get a list of all the available
|
||||
@@ -306,10 +306,41 @@ def supported_compilers():
|
||||
"""
|
||||
# Hack to be able to call the compiler `apple-clang` while still
|
||||
# using a valid python name for the module
|
||||
return sorted(
|
||||
name if name != "apple_clang" else "apple-clang"
|
||||
for name in llnl.util.lang.list_modules(spack.paths.compilers_path)
|
||||
)
|
||||
return sorted(all_compiler_names())
|
||||
|
||||
|
||||
def supported_compilers_for_host_platform() -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the current host platform
|
||||
"""
|
||||
host_plat = spack.platforms.real_host()
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
Args:
|
||||
platform (str): string representation of platform
|
||||
for which compiler compatability should be determined
|
||||
"""
|
||||
return [
|
||||
name
|
||||
for name in supported_compilers()
|
||||
if class_for_compiler_name(name).is_supported_on_platform(platform)
|
||||
]
|
||||
|
||||
|
||||
def all_compiler_names() -> List[str]:
|
||||
def replace_apple_clang(name):
|
||||
return name if name != "apple_clang" else "apple-clang"
|
||||
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@@ -628,7 +659,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
|
||||
def _default(search_paths):
|
||||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
@@ -687,9 +718,11 @@ def _default(fn_args):
|
||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
error = "Couldn't get version for compiler {0}".format(path)
|
||||
error = f"Couldn't get version for compiler {path}".format(path)
|
||||
except spack.util.executable.ProcessError as e:
|
||||
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except spack.util.executable.ProcessTimeoutError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
|
@@ -112,6 +112,7 @@ def extract_version_from_output(cls, output):
|
||||
match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
return "unknown"
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fortran_compiler):
|
||||
|
@@ -99,6 +99,28 @@ def cxx17_flag(self):
|
||||
else:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("8.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "< 8.0"
|
||||
)
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "< 11.0"
|
||||
)
|
||||
elif self.real_version < Version("14.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.5"):
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from distutils.version import StrictVersion
|
||||
from typing import Dict, List, Set
|
||||
|
||||
import spack.compiler
|
||||
@@ -29,13 +28,97 @@
|
||||
}
|
||||
|
||||
|
||||
class CmdCall:
|
||||
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
|
||||
|
||||
def __init__(self, *cmds):
|
||||
if not cmds:
|
||||
raise RuntimeError(
|
||||
"""Attempting to run commands from CMD without specifying commands.
|
||||
Please add commands to be run."""
|
||||
)
|
||||
self._cmds = cmds
|
||||
|
||||
def __call__(self):
|
||||
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
|
||||
return out.decode("utf-16le", errors="replace") # novermin
|
||||
|
||||
@property
|
||||
def cmd_line(self):
|
||||
base_call = "cmd /u /c "
|
||||
commands = " && ".join([x.command_str() for x in self._cmds])
|
||||
# If multiple commands are being invoked by a single subshell
|
||||
# they must be encapsulated by a double quote. Always double
|
||||
# quote to be sure of proper handling
|
||||
# cmd will properly resolve nested double quotes as needed
|
||||
#
|
||||
# `set`` writes out the active env to the subshell stdout,
|
||||
# and in this context we are always trying to obtain env
|
||||
# state so it should always be appended
|
||||
return base_call + f'"{commands} && set"'
|
||||
|
||||
|
||||
class VarsInvocation:
|
||||
def __init__(self, script):
|
||||
self._script = script
|
||||
|
||||
def command_str(self):
|
||||
return f'"{self._script}"'
|
||||
|
||||
@property
|
||||
def script(self):
|
||||
return self._script
|
||||
|
||||
|
||||
class VCVarsInvocation(VarsInvocation):
|
||||
def __init__(self, script, arch, msvc_version):
|
||||
super(VCVarsInvocation, self).__init__(script)
|
||||
self._arch = arch
|
||||
self._msvc_version = msvc_version
|
||||
|
||||
@property
|
||||
def sdk_ver(self):
|
||||
"""Accessor for Windows SDK version property
|
||||
|
||||
Note: This property may not be set by
|
||||
the calling context and as such this property will
|
||||
return an empty string
|
||||
|
||||
This property will ONLY be set if the SDK package
|
||||
is a dependency somewhere in the Spack DAG of the package
|
||||
for which we are constructing an MSVC compiler env.
|
||||
Otherwise this property should be unset to allow the VCVARS
|
||||
script to use its internal heuristics to determine appropriate
|
||||
SDK version
|
||||
"""
|
||||
if getattr(self, "_sdk_ver", None):
|
||||
return self._sdk_ver + ".0"
|
||||
return ""
|
||||
|
||||
@sdk_ver.setter
|
||||
def sdk_ver(self, val):
|
||||
self._sdk_ver = val
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
return self._arch
|
||||
|
||||
@property
|
||||
def vcvars_ver(self):
|
||||
return f"-vcvars_ver={self._msvc_version}"
|
||||
|
||||
def command_str(self):
|
||||
script = super(VCVarsInvocation, self).command_str()
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if StrictVersion(cl_ver) <= StrictVersion(fortran_mapping[ver]):
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
|
||||
@@ -70,27 +153,58 @@ class Msvc(Compiler):
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
|
||||
# The MSVC compiler class overrides this to prevent instances
|
||||
# of erroneous matching on executable names that cannot be msvc
|
||||
# compilers
|
||||
suffixes = []
|
||||
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
is_supported_on_platform = lambda x: isinstance(x, spack.platforms.Windows)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||
env_cmds.append(self.vcvars_call)
|
||||
# Below is a check for a valid fortran path
|
||||
# paths has c, cxx, fc, and f77 paths in that order
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
# If this found, it sets all the vars
|
||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
||||
else:
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
|
||||
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
# env first
|
||||
env_cmds.extend(
|
||||
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
@@ -119,16 +233,30 @@ def platform_toolset_ver(self):
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
# non zero return code if it is not provided
|
||||
# and input file
|
||||
return Version(
|
||||
re.search(
|
||||
Msvc.version_regex,
|
||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
||||
spack.compiler.get_compiler_version_output(
|
||||
compiler, version_arg=None, ignore_errors=True
|
||||
),
|
||||
).group(1)
|
||||
)
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
return self._compiler_version(self.cc)
|
||||
|
||||
@property
|
||||
def ifx_version(self):
|
||||
"""Ifx compiler version associated with this version of MSVC"""
|
||||
return self._compiler_version(self.fc)
|
||||
|
||||
@property
|
||||
def vs_root(self):
|
||||
# The MSVC install root is located at a fix level above the compiler
|
||||
@@ -146,27 +274,12 @@ def setup_custom_environment(self, pkg, env):
|
||||
# output, sort into dictionary, use that to make the build
|
||||
# environment.
|
||||
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
'cmd /u /c "{}" {} {} {} && set'.format(
|
||||
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
||||
),
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
if sys.version_info[0] >= 3:
|
||||
out = out.decode("utf-16le", errors="replace") # novermin
|
||||
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
|
||||
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
|
||||
|
||||
out = self.msvc_compiler_environment()
|
||||
int_env = dict(
|
||||
(key, value)
|
||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||
|
@@ -744,8 +744,11 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
@@ -857,12 +857,12 @@ def add_from_file(filename, scope=None):
|
||||
def add(fullpath, scope=None):
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
|
||||
components = process_config_path(fullpath)
|
||||
|
||||
has_existing_value = True
|
||||
path = ""
|
||||
override = False
|
||||
value = syaml.load_config(components[-1])
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = "::" if override else ":" if path else ""
|
||||
@@ -883,14 +883,14 @@ def add(fullpath, scope=None):
|
||||
existing = get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1 : -1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if override:
|
||||
path += "::"
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = fullpath.rpartition(":")
|
||||
value = syaml.load_config(value)
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
@@ -1231,11 +1231,17 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
#
|
||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
||||
# trailing ':')
|
||||
#
|
||||
def process_config_path(path):
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||
quoted path components outside of the value will be considered ill formed and will
|
||||
raise.
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
|
||||
[this, is, a, path, value:with:colon]
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
@@ -1263,6 +1269,17 @@ def process_config_path(path):
|
||||
front.append = True
|
||||
|
||||
result.append(front)
|
||||
|
||||
quote = "['\"]"
|
||||
not_quote = "[^'\"]"
|
||||
|
||||
if re.match(f"^{quote}", path):
|
||||
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
|
||||
if not m:
|
||||
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||
result.append(m.group(1))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
@@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
import jsonschema
|
||||
import jsonschema.exceptions
|
||||
@@ -11,6 +14,7 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
@@ -45,9 +49,29 @@ def translated_compiler_name(manifest_compiler_name):
|
||||
)
|
||||
|
||||
|
||||
def compiler_from_entry(entry):
|
||||
def compiler_from_entry(entry: dict, manifest_path: str):
|
||||
# Note that manifest_path is only passed here to compose a
|
||||
# useful warning message when paths appear to be missing.
|
||||
compiler_name = translated_compiler_name(entry["name"])
|
||||
paths = entry["executables"]
|
||||
|
||||
if "prefix" in entry:
|
||||
prefix = entry["prefix"]
|
||||
paths = dict(
|
||||
(lang, os.path.join(prefix, relpath))
|
||||
for (lang, relpath) in entry["executables"].items()
|
||||
)
|
||||
else:
|
||||
paths = entry["executables"]
|
||||
|
||||
# Do a check for missing paths. Note that this isn't possible for
|
||||
# all compiler entries, since their "paths" might actually be
|
||||
# exe names like "cc" that depend on modules being loaded. Cray
|
||||
# manifest entries are always paths though.
|
||||
missing_paths = []
|
||||
for path in paths.values():
|
||||
if not os.path.exists(path):
|
||||
missing_paths.append(path)
|
||||
|
||||
# to instantiate a compiler class we may need a concrete version:
|
||||
version = "={}".format(entry["version"])
|
||||
arch = entry["arch"]
|
||||
@@ -56,8 +80,18 @@ def compiler_from_entry(entry):
|
||||
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
return compiler_cls(spec, operating_system, target, paths)
|
||||
path_list = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
|
||||
if missing_paths:
|
||||
warnings.warn(
|
||||
"Manifest entry refers to nonexistent paths:\n\t"
|
||||
+ "\n\t".join(missing_paths)
|
||||
+ f"\nfor {str(spec)}"
|
||||
+ f"\nin {manifest_path}"
|
||||
+ "\nPlease report this issue"
|
||||
)
|
||||
|
||||
return compiler_cls(spec, operating_system, target, path_list)
|
||||
|
||||
|
||||
def spec_from_entry(entry):
|
||||
@@ -158,13 +192,13 @@ def entries_to_specs(entries):
|
||||
dependencies = entry["dependencies"]
|
||||
for name, properties in dependencies.items():
|
||||
dep_hash = properties["hash"]
|
||||
deptypes = properties["type"]
|
||||
depflag = dt.canonicalize(properties["type"])
|
||||
if dep_hash in spec_dict:
|
||||
if entry["hash"] not in spec_dict:
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes, virtuals=())
|
||||
parent_spec._add_dependency(dep_spec, depflag=depflag, virtuals=())
|
||||
|
||||
for spec in spec_dict.values():
|
||||
spack.spec.reconstruct_virtuals_on_edges(spec)
|
||||
@@ -186,12 +220,21 @@ def read(path, apply_updates):
|
||||
tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
|
||||
compilers = list()
|
||||
if "compilers" in json_data:
|
||||
compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
|
||||
compilers.extend(compiler_from_entry(x, path) for x in json_data["compilers"])
|
||||
tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
|
||||
# Filter out the compilers that already appear in the configuration
|
||||
compilers = spack.compilers.select_new_compilers(compilers)
|
||||
if apply_updates and compilers:
|
||||
spack.compilers.add_compilers_to_config(compilers, init_config=False)
|
||||
for compiler in compilers:
|
||||
try:
|
||||
spack.compilers.add_compilers_to_config([compiler], init_config=False)
|
||||
except Exception:
|
||||
warnings.warn(
|
||||
f"Could not add compiler {str(compiler.spec)}: "
|
||||
f"\n\tfrom manifest: {path}"
|
||||
"\nPlease reexecute with 'spack -d' and include the stack trace"
|
||||
)
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
|
@@ -27,6 +27,8 @@
|
||||
import time
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
import spack.deptypes as dt
|
||||
|
||||
try:
|
||||
import uuid
|
||||
|
||||
@@ -89,7 +91,7 @@
|
||||
|
||||
#: Types of dependencies tracked by the database
|
||||
#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
|
||||
_TRACKED_DEPENDENCIES = ht.dag_hash.depflag
|
||||
|
||||
#: Default list of fields written for each install record
|
||||
DEFAULT_INSTALL_RECORD_FIELDS = (
|
||||
@@ -795,7 +797,7 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, deptypes=dtypes, virtuals=virtuals)
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -1146,7 +1148,7 @@ def _add(
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
@@ -1154,7 +1156,7 @@ def _add(
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.deptypes == ("build",)
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
@@ -1198,10 +1200,10 @@ def _add(
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
@@ -1371,7 +1373,13 @@ def deprecate(self, spec, deprecator):
|
||||
return self._deprecate(spec, deprecator)
|
||||
|
||||
@_autospec
|
||||
def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
):
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
@@ -3,64 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's dependency relationships."""
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.spec
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
all_deptypes = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
default_deptype = ("build", "link")
|
||||
|
||||
#: Type hint for the arguments accepting a dependency type
|
||||
DependencyArgument = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
|
||||
def deptype_chars(*type_tuples: str) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged).
|
||||
"""
|
||||
types: Set[str] = set()
|
||||
for t in type_tuples:
|
||||
if t:
|
||||
types.update(t)
|
||||
|
||||
return "".join(t[0] if t in types else " " for t in all_deptypes)
|
||||
|
||||
|
||||
def canonical_deptype(deptype: DependencyArgument) -> Tuple[str, ...]:
|
||||
"""Convert deptype to a canonical sorted tuple, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return all_deptypes
|
||||
|
||||
elif isinstance(deptype, str):
|
||||
if deptype not in all_deptypes:
|
||||
raise ValueError("Invalid dependency type: %s" % deptype)
|
||||
return (deptype,)
|
||||
|
||||
elif isinstance(deptype, (tuple, list, set)):
|
||||
bad = [d for d in deptype if d not in all_deptypes]
|
||||
if bad:
|
||||
raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
|
||||
return tuple(sorted(set(deptype)))
|
||||
|
||||
raise ValueError("Invalid dependency type: %s" % repr(deptype))
|
||||
|
||||
|
||||
class Dependency:
|
||||
"""Class representing metadata for a dependency on a package.
|
||||
@@ -93,7 +40,7 @@ def __init__(
|
||||
self,
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: "spack.spec.Spec",
|
||||
type: Optional[Tuple[str, ...]] = default_deptype,
|
||||
depflag: dt.DepFlag = dt.DEFAULT,
|
||||
):
|
||||
"""Create a new Dependency.
|
||||
|
||||
@@ -110,11 +57,7 @@ def __init__(
|
||||
# This dict maps condition specs to lists of Patch objects, just
|
||||
# as the patches dict on packages does.
|
||||
self.patches: Dict[spack.spec.Spec, "List[spack.patch.Patch]"] = {}
|
||||
|
||||
if type is None:
|
||||
self.type = set(default_deptype)
|
||||
else:
|
||||
self.type = set(type)
|
||||
self.depflag = depflag
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
@@ -124,7 +67,7 @@ def name(self) -> str:
|
||||
def merge(self, other: "Dependency"):
|
||||
"""Merge constraints, deptypes, and patches of other into self."""
|
||||
self.spec.constrain(other.spec)
|
||||
self.type |= other.type
|
||||
self.depflag |= other.depflag
|
||||
|
||||
# concatenate patch lists, or just copy them in
|
||||
for cond, p in other.patches.items():
|
||||
@@ -135,5 +78,5 @@ def merge(self, other: "Dependency"):
|
||||
self.patches[cond] = other.patches[cond]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
types = deptype_chars(*self.type)
|
||||
types = dt.flag_to_chars(self.depflag)
|
||||
return f"<Dependency: {self.pkg.name} -> {self.spec} [{types}]>"
|
||||
|
123
lib/spack/spack/deptypes.py
Normal file
123
lib/spack/spack/deptypes.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Data structures that represent Spack's edge types."""
|
||||
|
||||
from typing import Iterable, List, Tuple, Union
|
||||
|
||||
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
||||
DepFlag = int
|
||||
|
||||
#: Type hint for the high-level dependency input
|
||||
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
||||
|
||||
#: Individual dependency types
|
||||
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
|
||||
|
||||
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
||||
# the order (link, run, build, test) when depending on the same package multiple times,
|
||||
# and we rely on default integer comparison to sort dependency types.
|
||||
# New dependency types should be appended.
|
||||
LINK = 0b0001
|
||||
RUN = 0b0010
|
||||
BUILD = 0b0100
|
||||
TEST = 0b1000
|
||||
|
||||
#: The types of dependency relationships that Spack understands.
|
||||
ALL_TYPES: Tuple[DepType, ...] = ("build", "link", "run", "test")
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT_TYPES: Tuple[DepType, ...] = ("build", "link")
|
||||
|
||||
#: A flag with all dependency types set
|
||||
ALL: DepFlag = BUILD | LINK | RUN | TEST
|
||||
|
||||
#: Default dependency type if none is specified
|
||||
DEFAULT: DepFlag = BUILD | LINK
|
||||
|
||||
#: An iterator of all flag components
|
||||
ALL_FLAGS: Tuple[DepFlag, DepFlag, DepFlag, DepFlag] = (BUILD, LINK, RUN, TEST)
|
||||
|
||||
|
||||
def flag_from_string(s: str) -> DepFlag:
|
||||
if s == "build":
|
||||
return BUILD
|
||||
elif s == "link":
|
||||
return LINK
|
||||
elif s == "run":
|
||||
return RUN
|
||||
elif s == "test":
|
||||
return TEST
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type: {s}")
|
||||
|
||||
|
||||
def flag_from_strings(deptype: Iterable[str]) -> DepFlag:
|
||||
"""Transform an iterable of deptype strings into a flag."""
|
||||
flag = 0
|
||||
for deptype_str in deptype:
|
||||
flag |= flag_from_string(deptype_str)
|
||||
return flag
|
||||
|
||||
|
||||
def canonicalize(deptype: DepTypes) -> DepFlag:
|
||||
"""Convert deptype user input to a DepFlag, or raise ValueError.
|
||||
|
||||
Args:
|
||||
deptype: string representing dependency type, or a list/tuple of such strings.
|
||||
Can also be the builtin function ``all`` or the string 'all', which result in
|
||||
a tuple of all dependency types known to Spack.
|
||||
"""
|
||||
if deptype in ("all", all):
|
||||
return ALL
|
||||
|
||||
if isinstance(deptype, str):
|
||||
return flag_from_string(deptype)
|
||||
|
||||
if isinstance(deptype, (tuple, list, set)):
|
||||
return flag_from_strings(deptype)
|
||||
|
||||
raise ValueError(f"Invalid dependency type: {deptype!r}")
|
||||
|
||||
|
||||
def flag_to_tuple(x: DepFlag) -> Tuple[DepType, ...]:
|
||||
deptype: List[DepType] = []
|
||||
if x & BUILD:
|
||||
deptype.append("build")
|
||||
if x & LINK:
|
||||
deptype.append("link")
|
||||
if x & RUN:
|
||||
deptype.append("run")
|
||||
if x & TEST:
|
||||
deptype.append("test")
|
||||
return tuple(deptype)
|
||||
|
||||
|
||||
def flag_to_string(x: DepFlag) -> DepType:
|
||||
if x == BUILD:
|
||||
return "build"
|
||||
elif x == LINK:
|
||||
return "link"
|
||||
elif x == RUN:
|
||||
return "run"
|
||||
elif x == TEST:
|
||||
return "test"
|
||||
else:
|
||||
raise ValueError(f"Invalid dependency type flag: {x}")
|
||||
|
||||
|
||||
def flag_to_chars(depflag: DepFlag) -> str:
|
||||
"""Create a string representing deptypes for many dependencies.
|
||||
|
||||
The string will be some subset of 'blrt', like 'bl ', 'b t', or
|
||||
' lr ' where each letter in 'blrt' stands for 'build', 'link',
|
||||
'run', and 'test' (the dependency types).
|
||||
|
||||
For a single dependency, this just indicates that the dependency has
|
||||
the indicated deptypes. For a list of dependnecies, this shows
|
||||
whether ANY dpeendency in the list has the deptypes (so the deptypes
|
||||
are merged)."""
|
||||
return "".join(
|
||||
t_str[0] if t_flag & depflag else " " for t_str, t_flag in zip(ALL_TYPES, ALL_FLAGS)
|
||||
)
|
@@ -3,13 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_executable, by_library, executables_in_path
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_library",
|
||||
"by_executable",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"detection_tests",
|
||||
]
|
||||
|
@@ -13,13 +13,13 @@
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
"""
|
||||
import collections
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
@@ -29,12 +29,28 @@
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
#: Information on a package that has been detected
|
||||
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml():
|
||||
"""Return all the specs mentioned as externals in packages.yaml"""
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
already_defined_specs = set()
|
||||
for pkg_name, package_configuration in packages_yaml.items():
|
||||
@@ -43,7 +59,12 @@ def _externals_in_packages_yaml():
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _pkg_config_dict(external_pkg_entries):
|
||||
ExternalEntryType = Union[str, Dict[str, str]]
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
@@ -65,7 +86,10 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
|
||||
@@ -83,15 +107,14 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
return pkg_dict
|
||||
|
||||
|
||||
def _spec_is_valid(spec):
|
||||
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
try:
|
||||
str(spec)
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from
|
||||
# the spec so we can look up the implementation of
|
||||
# determine_spec_details
|
||||
msg = "Constructed spec for {0} does not have a string representation"
|
||||
llnl.util.tty.warn(msg.format(spec.name))
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -106,7 +129,7 @@ def _spec_is_valid(spec):
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths):
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
@@ -124,7 +147,7 @@ def path_to_dict(search_paths):
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def is_executable(file_path):
|
||||
def is_executable(file_path: str) -> bool:
|
||||
"""Return True if the path passed as argument is that of an executable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
||||
|
||||
@@ -146,7 +169,7 @@ def _convert_to_iterable(single_val_or_multiple):
|
||||
return [x]
|
||||
|
||||
|
||||
def executable_prefix(executable_dir):
|
||||
def executable_prefix(executable_dir: str) -> str:
|
||||
"""Given a directory where an executable is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
@@ -167,12 +190,12 @@ def executable_prefix(executable_dir):
|
||||
return os.sep.join(components[:idx])
|
||||
|
||||
|
||||
def library_prefix(library_dir):
|
||||
"""Given a directory where an library is found, guess the prefix
|
||||
def library_prefix(library_dir: str) -> str:
|
||||
"""Given a directory where a library is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
Args:
|
||||
library_dir: directory where an library is found
|
||||
library_dir: directory where a library is found
|
||||
"""
|
||||
# Given a prefix where an library is found, assuming that prefix
|
||||
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
|
||||
@@ -195,13 +218,17 @@ def library_prefix(library_dir):
|
||||
return library_dir
|
||||
|
||||
|
||||
def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages (list): list of DetectedPackage objects to be added
|
||||
scope (str): configuration scope where to add the detected packages
|
||||
buildable (bool): whether the detected packages are buildable or not
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
@@ -209,7 +236,10 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
@@ -222,16 +252,19 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def _windows_drive():
|
||||
"""Return Windows drive string extracted from PROGRAMFILES
|
||||
env var, which is garunteed to be defined for all logins"""
|
||||
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
|
||||
return drive
|
||||
def _windows_drive() -> str:
|
||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||
which is guaranteed to be defined for all logins.
|
||||
"""
|
||||
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
|
||||
if match is None:
|
||||
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
|
||||
return match.group(1)
|
||||
|
||||
|
||||
class WindowsCompilerExternalPaths:
|
||||
@staticmethod
|
||||
def find_windows_compiler_root_paths():
|
||||
def find_windows_compiler_root_paths() -> List[str]:
|
||||
"""Helper for Windows compiler installation root discovery
|
||||
|
||||
At the moment simply returns location of VS install paths from VSWhere
|
||||
@@ -239,7 +272,7 @@ def find_windows_compiler_root_paths():
|
||||
return list(winOs.WindowsOs.vs_install_paths)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_cmake_paths():
|
||||
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||
"""Semi hard-coded search path for cmake bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(
|
||||
@@ -249,7 +282,7 @@ def find_windows_compiler_cmake_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_ninja_paths():
|
||||
def find_windows_compiler_ninja_paths() -> List[str]:
|
||||
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
|
||||
@@ -257,7 +290,7 @@ def find_windows_compiler_ninja_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_bundled_packages():
|
||||
def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
"""Return all MSVC compiler bundled packages"""
|
||||
return (
|
||||
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
|
||||
@@ -266,36 +299,39 @@ def find_windows_compiler_bundled_packages():
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots():
|
||||
def find_windows_kit_roots() -> List[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
)
|
||||
return kit_base
|
||||
kit_base = os.path.join(program_files, "Windows Kits", "**")
|
||||
return glob.glob(kit_base)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base=None):
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_bin = os.path.join(kit, "bin")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base=None):
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
assert kit_base, "Unexpectedly empty value for Windows kit base path"
|
||||
kit_paths = []
|
||||
for kit in kit_base:
|
||||
kit_lib = os.path.join(kit, "Lib")
|
||||
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
|
||||
return kit_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths():
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
"""Provides a list of all installation paths
|
||||
for the WDK by version and architecture
|
||||
"""
|
||||
@@ -303,7 +339,7 @@ def find_windows_driver_development_kit_paths():
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_installed_roots_paths():
|
||||
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
@@ -311,26 +347,33 @@ def find_windows_kit_reg_installed_roots_paths():
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
|
||||
)
|
||||
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
|
||||
root_paths = []
|
||||
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
|
||||
root_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
|
||||
)
|
||||
return root_paths
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths():
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
sdk_paths = []
|
||||
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
|
||||
windows_reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
)
|
||||
if not reg:
|
||||
# couldn't find key, return empty list
|
||||
return []
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
|
||||
reg = windows_reg.get_subkey(key)
|
||||
sdk_paths.extend(
|
||||
WindowsKitExternalPaths.find_windows_kit_lib_paths(
|
||||
reg.get_value("InstallationFolder").value
|
||||
)
|
||||
)
|
||||
return sdk_paths
|
||||
|
||||
|
||||
def find_win32_additional_install_paths():
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
"""Not all programs on Windows live on the PATH
|
||||
Return a list of other potential install locations.
|
||||
"""
|
||||
@@ -357,13 +400,12 @@ def find_win32_additional_install_paths():
|
||||
return windows_search_ext
|
||||
|
||||
|
||||
def compute_windows_program_path_for_package(pkg):
|
||||
"""Given a package, attempt to compute its Windows
|
||||
program files location, return list of best guesses
|
||||
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package, attempts to compute its Windows program files location,
|
||||
and returns the list of best guesses.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which
|
||||
Program Files location is to be computed
|
||||
pkg: package for which Program Files location is to be computed
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
@@ -378,7 +420,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
]
|
||||
|
||||
|
||||
def compute_windows_user_path_for_package(pkg):
|
||||
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package attempt to compute its user scoped
|
||||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
|
@@ -2,15 +2,17 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Detection of software installed in the system based on paths inspections
|
||||
"""Detection of software installed in the system, based on paths inspections
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
@@ -18,7 +20,7 @@
|
||||
import spack.util.environment
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import ( # find_windows_compiler_bundled_packages,
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
@@ -31,8 +33,13 @@
|
||||
path_to_dict,
|
||||
)
|
||||
|
||||
#: Timeout used for package detection (seconds)
|
||||
DETECTION_TIMEOUT = 60
|
||||
if sys.platform == "win32":
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
def common_windows_package_paths():
|
||||
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
paths.extend(find_win32_additional_install_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
@@ -41,7 +48,7 @@ def common_windows_package_paths():
|
||||
return paths
|
||||
|
||||
|
||||
def executables_in_path(path_hints):
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
@@ -52,7 +59,7 @@ def executables_in_path(path_hints):
|
||||
assumed there are two different instances of the executable.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
@@ -61,7 +68,9 @@ def executables_in_path(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||
standard system library paths.
|
||||
@@ -74,7 +83,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
assumed there are two different instances of the library.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
@@ -90,7 +99,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints):
|
||||
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
@@ -106,218 +115,253 @@ def libraries_in_windows_paths(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
return groups.items()
|
||||
return groups
|
||||
|
||||
|
||||
# TODO consolidate this with by_executable
|
||||
# Packages should be able to define both .libraries and .executables in the future
|
||||
# determine_spec_details should get all relevant libraries and executables in one call
|
||||
def by_library(packages_to_check, path_hints=None):
|
||||
# Techniques for finding libraries is determined on a per recipe basis in
|
||||
# the determine_version class method. Some packages will extract the
|
||||
# version number from a shared libraries filename.
|
||||
# Other libraries could use the strings function to extract it as described
|
||||
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
|
||||
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of packages to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
|
||||
and standard system library paths.
|
||||
"""
|
||||
# If no path hints from command line, intialize to empty list so
|
||||
# we can add default hints on a per package basis
|
||||
path_hints = [] if path_hints is None else path_hints
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
lib_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "libraries"):
|
||||
for lib in pkg.libraries:
|
||||
lib_pattern_to_pkgs[lib].append(pkg)
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
|
||||
path_to_lib_name = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints)
|
||||
)
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
pkg_to_found_libs = collections.defaultdict(set)
|
||||
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(lib_pattern)
|
||||
for path, lib in path_to_lib_name.items():
|
||||
if compiled_re.search(lib):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_libs[pkg].add(path)
|
||||
Args:
|
||||
pkg: package being detected
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> lib found for the spec
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
"""Returns a list of candidate files found on the system.
|
||||
|
||||
for pkg, libs in pkg_to_found_libs.items():
|
||||
Args:
|
||||
patterns: search patterns to be used for matching files
|
||||
paths: paths where to search for files
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
"""Given a path where a file was found, returns the corresponding prefix.
|
||||
|
||||
Args:
|
||||
path: path of a detected file
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
paths: files matching the package search patterns
|
||||
"""
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
warnings.warn(
|
||||
f"{pkg.name} must define 'determine_spec_details' in order"
|
||||
f" for Spack to detect externally-provided instances"
|
||||
f" of the package."
|
||||
)
|
||||
continue
|
||||
return []
|
||||
|
||||
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following libraries in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
|
||||
)
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = library_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
|
||||
"add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Libraries in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def by_executable(packages_to_check, path_hints=None):
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of package classes to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "executables"):
|
||||
for exe in pkg.platform_executables():
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
# Add Windows specific, package related paths to the search paths
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
|
||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
||||
pkg_to_found_exes = collections.defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in path_to_exe_name.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(candidate_path, items_in_prefix)
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following executables in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
|
||||
)
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
if not specs:
|
||||
files = ", ".join(_convert_to_iterable(items_in_prefix))
|
||||
llnl.util.tty.debug(
|
||||
f"The following files in {candidate_path} were decidedly not "
|
||||
f"part of the package {pkg.name}: {files}"
|
||||
)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||
for spec in specs:
|
||||
prefix = self.prefix_from_path(path=candidate_path)
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
f"Files in {candidate_path} and {prior_prefix} are both associated"
|
||||
f" with the same spec {str(spec)}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
resolved_specs[spec] = candidate_path
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
f'"{spec}" has been detected on the system but will '
|
||||
f"not be added to packages.yaml [reason={str(e)}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
if not result:
|
||||
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
class LibrariesFinder(Finder):
|
||||
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in libraries_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = library_prefix(path)
|
||||
if not result:
|
||||
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of packages to be detected. Each package can be either unqualified
|
||||
of fully qualified
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
for pkg_name, futures in detected_specs_by_package.items():
|
||||
for future in futures:
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
|
||||
result[unqualified_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
|
||||
return result
|
||||
|
187
lib/spack/spack/detection/test.py
Normal file
187
lib/spack/spack/detection/test.py
Normal file
@@ -0,0 +1,187 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Create and run mock e2e tests for package detection."""
|
||||
import collections
|
||||
import contextlib
|
||||
import pathlib
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
|
||||
from .path import by_path
|
||||
|
||||
|
||||
class MockExecutables(NamedTuple):
|
||||
"""Mock executables to be used in detection tests"""
|
||||
|
||||
#: Relative paths for mock executables to be created
|
||||
executables: List[str]
|
||||
#: Shell script for the mock executable
|
||||
script: str
|
||||
|
||||
|
||||
class ExpectedTestResult(NamedTuple):
|
||||
"""Data structure to model assertions on detection tests"""
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
"""Data structure to construct detection tests by PATH inspection.
|
||||
|
||||
Packages may have a YAML file containing the description of one or more detection tests
|
||||
to be performed. Each test creates a few mock executable scripts in a temporary folder,
|
||||
and checks that detection by PATH gives the expected results.
|
||||
"""
|
||||
|
||||
pkg_name: str
|
||||
layout: List[MockExecutables]
|
||||
results: List[ExpectedTestResult]
|
||||
|
||||
|
||||
class Runner:
|
||||
"""Runs an external detection test"""
|
||||
|
||||
def __init__(self, *, test: DetectionTest, repository: spack.repo.RepoPath) -> None:
|
||||
self.test = test
|
||||
self.repository = repository
|
||||
self.tmpdir = tempfile.TemporaryDirectory()
|
||||
|
||||
def execute(self) -> List[spack.spec.Spec]:
|
||||
"""Executes a test and returns the specs that have been detected.
|
||||
|
||||
This function sets-up a test in a temporary directory, according to the prescriptions
|
||||
in the test layout, then performs a detection by executables and returns the specs that
|
||||
have been detected.
|
||||
"""
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
hints = set()
|
||||
try:
|
||||
for entry in self.test.layout:
|
||||
exes = self._create_executable_scripts(entry)
|
||||
|
||||
for mock_executable in exes:
|
||||
hints.add(str(mock_executable.parent))
|
||||
|
||||
yield list(hints)
|
||||
finally:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
result = []
|
||||
for mock_exe_path in relative_paths:
|
||||
rel_path = pathlib.Path(mock_exe_path)
|
||||
abs_path = pathlib.Path(self.tmpdir.name) / rel_path
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
abs_path.write_text(script_template.render(script=script))
|
||||
filesystem.set_executable(abs_path)
|
||||
result.append(abs_path)
|
||||
return result
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
"""Returns a list of test runners for a given package.
|
||||
|
||||
Currently, detection tests are specified in a YAML file, called ``detection_test.yaml``,
|
||||
alongside the ``package.py`` file.
|
||||
|
||||
This function reads that file to create a bunch of ``Runner`` objects.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository where the package lives
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
MockExecutables(executables=layout["executables"], script=layout["script"])
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
)
|
||||
result.append(Runner(test=current_test, repository=repository))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def read_detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> Dict[str, Any]:
|
||||
"""Returns the normalized content of the detection_tests.yaml associated with the package
|
||||
passed in input.
|
||||
|
||||
The content is merged with that of any package that is transitively included using the
|
||||
"includes" attribute.
|
||||
|
||||
Args:
|
||||
pkg_name: name of the package to test
|
||||
repository: repository in which to search for packages
|
||||
"""
|
||||
content_stack, seen = [], set()
|
||||
included_packages: Deque[str] = collections.deque()
|
||||
|
||||
root_detection_yaml, result = _detection_tests_yaml(pkg_name, repository)
|
||||
included_packages.extend(result.get("includes", []))
|
||||
seen |= set(result.get("includes", []))
|
||||
|
||||
while included_packages:
|
||||
current_package = included_packages.popleft()
|
||||
try:
|
||||
current_detection_yaml, content = _detection_tests_yaml(current_package, repository)
|
||||
except FileNotFoundError as e:
|
||||
msg = (
|
||||
f"cannot read the detection tests from the '{current_package}' package, "
|
||||
f"included by {root_detection_yaml}"
|
||||
)
|
||||
raise FileNotFoundError(msg + f"\n\n\t{e}\n")
|
||||
|
||||
content_stack.append((current_package, content))
|
||||
included_packages.extend(x for x in content.get("includes", []) if x not in seen)
|
||||
seen |= set(content.get("includes", []))
|
||||
|
||||
result.setdefault("paths", [])
|
||||
for pkg_name, content in content_stack:
|
||||
result["paths"].extend(content.get("paths", []))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _detection_tests_yaml(
|
||||
pkg_name: str, repository: spack.repo.RepoPath
|
||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||
with open(str(detection_tests_yaml)) as f:
|
||||
content = spack_yaml.load(f)
|
||||
return detection_tests_yaml, content
|
@@ -38,12 +38,14 @@ class OpenMpi(Package):
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.dependency import Dependency
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import (
|
||||
@@ -407,10 +409,7 @@ def version(
|
||||
|
||||
def _execute_version(pkg, ver, **kwargs):
|
||||
if (
|
||||
any(
|
||||
s in kwargs
|
||||
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
|
||||
)
|
||||
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||
and hasattr(pkg, "has_code")
|
||||
and not pkg.has_code
|
||||
):
|
||||
@@ -438,7 +437,7 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
pkg.versions[version] = kwargs
|
||||
|
||||
|
||||
def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
def _depends_on(pkg, spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
when_spec = make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
@@ -449,7 +448,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
|
||||
type = canonical_deptype(type)
|
||||
depflag = dt.canonicalize(type)
|
||||
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
|
||||
|
||||
# call this patches here for clarity -- we want patch to be a list,
|
||||
@@ -479,12 +478,12 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
|
||||
|
||||
# this is where we actually add the dependency to this package
|
||||
if when_spec not in conditions:
|
||||
dependency = Dependency(pkg, dep_spec, type=type)
|
||||
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||
conditions[when_spec] = dependency
|
||||
else:
|
||||
dependency = conditions[when_spec]
|
||||
dependency.spec.constrain(dep_spec, deps=False)
|
||||
dependency.type |= set(type)
|
||||
dependency.depflag |= depflag
|
||||
|
||||
# apply patches to the dependency
|
||||
for execute_patch in patches:
|
||||
@@ -527,7 +526,7 @@ def _execute_conflicts(pkg):
|
||||
|
||||
|
||||
@directive(("dependencies"))
|
||||
def depends_on(spec, when=None, type=default_deptype, patches=None):
|
||||
def depends_on(spec, when=None, type=dt.DEFAULT_TYPES, patches=None):
|
||||
"""Creates a dict of deps with specs defining when they apply.
|
||||
|
||||
Args:
|
||||
@@ -760,7 +759,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
|
@@ -120,10 +120,8 @@ def write_host_environment(self, spec):
|
||||
versioning. We use it in the case that an analysis later needs to
|
||||
easily access this information.
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
|
||||
env_file = self.env_metadata_path(spec)
|
||||
environ = get_host_environment_metadata()
|
||||
environ = spack.spec.get_host_environment_metadata()
|
||||
with open(env_file, "w") as fd:
|
||||
sjson.dump(environ, fd)
|
||||
|
||||
|
@@ -12,6 +12,7 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.environment.environment as ev
|
||||
import spack.spec
|
||||
import spack.traverse as traverse
|
||||
@@ -36,7 +37,9 @@ def from_string(s: str) -> "UseBuildCache":
|
||||
def _deptypes(use_buildcache: UseBuildCache):
|
||||
"""What edges should we follow for a given node? If it's a cache-only
|
||||
node, then we can drop build type deps."""
|
||||
return ("link", "run") if use_buildcache == UseBuildCache.ONLY else ("build", "link", "run")
|
||||
return (
|
||||
dt.LINK | dt.RUN if use_buildcache == UseBuildCache.ONLY else dt.BUILD | dt.LINK | dt.RUN
|
||||
)
|
||||
|
||||
|
||||
class DepfileNode:
|
||||
@@ -69,13 +72,13 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
||||
self.adjacency_list: List[DepfileNode] = []
|
||||
self.pkg_buildcache = pkg_buildcache
|
||||
self.deps_buildcache = deps_buildcache
|
||||
self.deptypes_root = _deptypes(pkg_buildcache)
|
||||
self.deptypes_deps = _deptypes(deps_buildcache)
|
||||
self.depflag_root = _deptypes(pkg_buildcache)
|
||||
self.depflag_deps = _deptypes(deps_buildcache)
|
||||
|
||||
def neighbors(self, node):
|
||||
"""Produce a list of spec to follow from node"""
|
||||
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
|
||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
def accept(self, node):
|
||||
self.adjacency_list.append(
|
||||
|
@@ -28,6 +28,7 @@
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.hash_types as ht
|
||||
@@ -403,7 +404,7 @@ def _write_yaml(data, str_or_file):
|
||||
|
||||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.util.environment.get_host_environment()
|
||||
valid_variables = spack.spec.get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
return eval(string, valid_variables)
|
||||
|
||||
@@ -1395,7 +1396,10 @@ def _concretize_together_where_possible(
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(specs_to_concretize, tests=tests):
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
@@ -1504,7 +1508,7 @@ def _concretize_separately(self, tests=False):
|
||||
start = time.time()
|
||||
max_processes = min(
|
||||
len(arguments), # Number of specs
|
||||
spack.config.get("config:build_jobs"), # Cap on build jobs
|
||||
spack.util.cpus.determine_number_of_jobs(parallel=True),
|
||||
)
|
||||
|
||||
# TODO: revisit this print as soon as darwin is parallel too
|
||||
@@ -1536,13 +1540,13 @@ def _concretize_separately(self, tests=False):
|
||||
for h in self.specs_by_hash:
|
||||
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
|
||||
for node in computed_spec.traverse():
|
||||
test_edges = node.edges_to_dependencies(deptype="test")
|
||||
test_edges = node.edges_to_dependencies(depflag=dt.TEST)
|
||||
for current_edge in test_edges:
|
||||
test_dependency = current_edge.spec
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptypes="test", virtuals=current_edge.virtuals
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -2058,7 +2062,7 @@ def matching_spec(self, spec):
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
@@ -2190,7 +2194,7 @@ def _read_lockfile_dict(self, d):
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _, virtuals in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes, virtuals=virtuals
|
||||
specs_by_hash[dep_hash], depflag=dt.canonicalize(deptypes), virtuals=virtuals
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
@@ -2366,7 +2370,7 @@ def display_specs(concretized_specs):
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.display_format,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
@@ -2664,6 +2668,26 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.yaml_content = with_defaults_added
|
||||
self.changed = False
|
||||
|
||||
def _all_matches(self, user_spec: str) -> List[str]:
|
||||
"""Maps the input string to the first equivalent user spec in the manifest,
|
||||
and returns it.
|
||||
|
||||
Args:
|
||||
user_spec: user spec to be found
|
||||
|
||||
Raises:
|
||||
ValueError: if no equivalent match is found
|
||||
"""
|
||||
result = []
|
||||
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||
result.append(yaml_spec_str)
|
||||
|
||||
if not result:
|
||||
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
|
||||
|
||||
return result
|
||||
|
||||
def add_user_spec(self, user_spec: str) -> None:
|
||||
"""Appends the user spec passed as input to the list of root specs.
|
||||
|
||||
@@ -2684,8 +2708,9 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
for key in self._all_matches(user_spec):
|
||||
self.pristine_configuration["specs"].remove(key)
|
||||
self.configuration["specs"].remove(key)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
|
@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -82,7 +82,7 @@ def deactivate_header(shell):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
@@ -128,3 +128,7 @@ def __init__(self, provided, required, constraint_type):
|
||||
self.provided = provided
|
||||
self.required = required
|
||||
self.constraint_type = constraint_type
|
||||
|
||||
|
||||
class FetchError(SpackError):
|
||||
"""Superclass for fetch-related errors."""
|
||||
|
@@ -31,9 +31,11 @@
|
||||
import urllib.parse
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import comma_and, quote
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
@@ -46,9 +48,8 @@
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -400,7 +401,7 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except web_util.FetchError as err:
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
@@ -441,7 +442,7 @@ def expand(self):
|
||||
|
||||
# TODO: replace this by mime check.
|
||||
if not self.extension:
|
||||
self.extension = spack.url.determine_url_file_extension(self.url)
|
||||
self.extension = llnl.url.determine_url_file_extension(self.url)
|
||||
|
||||
if self.stage.expanded:
|
||||
tty.debug("Source already staged to %s" % self.stage.source_path)
|
||||
@@ -570,7 +571,7 @@ def expand(self):
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, **kwargs):
|
||||
assert extension_from_path(destination) == "tar.gz"
|
||||
assert llnl.url.extension_from_path(destination) == "tar.gz"
|
||||
assert self.stage.source_path.startswith(self.stage.path)
|
||||
|
||||
tar = which("tar", required=True)
|
||||
@@ -733,7 +734,11 @@ def version_from_git(git_exe):
|
||||
@property
|
||||
def git(self):
|
||||
if not self._git:
|
||||
self._git = spack.util.git.git()
|
||||
try:
|
||||
self._git = spack.util.git.git(required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise
|
||||
|
||||
# Disable advice for a quieter fetch
|
||||
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
|
||||
@@ -1289,7 +1294,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise web_util.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1336,7 +1341,7 @@ def fetch(self):
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise web_util.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
@@ -1430,7 +1435,7 @@ def from_kwargs(**kwargs):
|
||||
on attribute names (e.g., ``git``, ``hg``, etc.)
|
||||
|
||||
Raises:
|
||||
spack.util.web.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
spack.error.FetchError: If no ``fetch_strategy`` matches the args.
|
||||
"""
|
||||
for fetcher in all_strategies:
|
||||
if fetcher.matches(kwargs):
|
||||
@@ -1537,7 +1542,7 @@ def for_package_version(pkg, version=None):
|
||||
# if it's a commit, we must use a GitFetchStrategy
|
||||
if isinstance(version, spack.version.GitVersion):
|
||||
if not hasattr(pkg, "git"):
|
||||
raise web_util.FetchError(
|
||||
raise spack.error.FetchError(
|
||||
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
@@ -1687,11 +1692,11 @@ def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
||||
|
||||
class NoCacheError(web_util.FetchError):
|
||||
class NoCacheError(spack.error.FetchError):
|
||||
"""Raised when there is no cached archive for a package."""
|
||||
|
||||
|
||||
class FailedDownloadError(web_util.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, url, msg=""):
|
||||
@@ -1699,23 +1704,23 @@ def __init__(self, url, msg=""):
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(web_util.FetchError):
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
"""Raised when an archive file is expected but none exists."""
|
||||
|
||||
|
||||
class NoDigestError(web_util.FetchError):
|
||||
class NoDigestError(spack.error.FetchError):
|
||||
"""Raised after attempt to checksum when URL has no digest."""
|
||||
|
||||
|
||||
class ExtrapolationError(web_util.FetchError):
|
||||
class ExtrapolationError(spack.error.FetchError):
|
||||
"""Raised when we can't extrapolate a version for a package."""
|
||||
|
||||
|
||||
class FetcherConflict(web_util.FetchError):
|
||||
class FetcherConflict(spack.error.FetchError):
|
||||
"""Raised for packages with invalid fetch attributes."""
|
||||
|
||||
|
||||
class InvalidArgsError(web_util.FetchError):
|
||||
class InvalidArgsError(spack.error.FetchError):
|
||||
"""Raised when a version can't be deduced from a set of arguments."""
|
||||
|
||||
def __init__(self, pkg=None, version=None, **args):
|
||||
@@ -1728,11 +1733,11 @@ def __init__(self, pkg=None, version=None, **args):
|
||||
super().__init__(msg, long_msg)
|
||||
|
||||
|
||||
class ChecksumError(web_util.FetchError):
|
||||
class ChecksumError(spack.error.FetchError):
|
||||
"""Raised when archive fails to checksum."""
|
||||
|
||||
|
||||
class NoStageError(web_util.FetchError):
|
||||
class NoStageError(spack.error.FetchError):
|
||||
"""Raised when fetch operations are called before set_stage()."""
|
||||
|
||||
def __init__(self, method):
|
||||
|
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
@@ -1,28 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import urllib.parse
|
||||
import urllib.response
|
||||
from urllib.error import URLError
|
||||
from urllib.request import BaseHandler
|
||||
|
||||
|
||||
def gcs_open(req, *args, **kwargs):
|
||||
"""Open a reader stream to a blob object on GCS"""
|
||||
import spack.util.gcs as gcs_util
|
||||
|
||||
url = urllib.parse.urlparse(req.get_full_url())
|
||||
gcsblob = gcs_util.GCSBlob(url)
|
||||
|
||||
if not gcsblob.exists():
|
||||
raise URLError("GCS blob {0} does not exist".format(gcsblob.blob_path))
|
||||
stream = gcsblob.get_blob_byte_stream()
|
||||
headers = gcsblob.get_blob_headers()
|
||||
|
||||
return urllib.response.addinfourl(stream, headers, url)
|
||||
|
||||
|
||||
class GCSHandler(BaseHandler):
|
||||
def gs_open(self, req):
|
||||
return gcs_open(req)
|
@@ -38,11 +38,12 @@
|
||||
"""
|
||||
import enum
|
||||
import sys
|
||||
from typing import List, Optional, Set, TextIO, Tuple, Union
|
||||
from typing import List, Optional, Set, TextIO, Tuple
|
||||
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack.dependency
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
|
||||
@@ -78,7 +79,7 @@ def __init__(self):
|
||||
self.node_character = "o"
|
||||
self.debug = False
|
||||
self.indent = 0
|
||||
self.deptype = spack.dependency.all_deptypes
|
||||
self.depflag = dt.ALL
|
||||
|
||||
# These are colors in the order they'll be used for edges.
|
||||
# See llnl.util.tty.color for details on color characters.
|
||||
@@ -326,7 +327,7 @@ def write(self, spec, color=None, out=None):
|
||||
nodes_in_topological_order = [
|
||||
edge.spec
|
||||
for edge in spack.traverse.traverse_edges_topo(
|
||||
[spec], direction="children", deptype=self.deptype
|
||||
[spec], direction="children", deptype=self.depflag
|
||||
)
|
||||
]
|
||||
nodes_in_topological_order.reverse()
|
||||
@@ -424,7 +425,7 @@ def write(self, spec, color=None, out=None):
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True)
|
||||
edges = sorted(node.edges_to_dependencies(depflag=self.depflag), reverse=True)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
@@ -433,13 +434,14 @@ def write(self, spec, color=None, out=None):
|
||||
self._collapse_line(i)
|
||||
|
||||
|
||||
def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"):
|
||||
def graph_ascii(
|
||||
spec, node="o", out=None, debug=False, indent=0, color=None, depflag: dt.DepFlag = dt.ALL
|
||||
):
|
||||
graph = AsciiGraph()
|
||||
graph.debug = debug
|
||||
graph.indent = indent
|
||||
graph.node_character = node
|
||||
if deptype:
|
||||
graph.deptype = spack.dependency.canonical_deptype(deptype)
|
||||
graph.depflag = depflag
|
||||
|
||||
graph.write(spec, color=color, out=out)
|
||||
|
||||
@@ -513,7 +515,7 @@ def __init__(self):
|
||||
|
||||
def visit(self, edge):
|
||||
if edge.parent is None:
|
||||
for node in spack.traverse.traverse_nodes([edge.spec], deptype=("link", "run")):
|
||||
for node in spack.traverse.traverse_nodes([edge.spec], deptype=dt.LINK | dt.RUN):
|
||||
self.main_unified_space.add(node.dag_hash())
|
||||
super().visit(edge)
|
||||
|
||||
@@ -529,40 +531,38 @@ def edge_entry(self, edge):
|
||||
return (
|
||||
edge.parent.dag_hash(),
|
||||
edge.spec.dag_hash(),
|
||||
f"[color=\"{':'.join(colormap[x] for x in edge.deptypes)}\"]",
|
||||
f"[color=\"{':'.join(colormap[x] for x in dt.flag_to_tuple(edge.depflag))}\"]",
|
||||
)
|
||||
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
def _static_edges(specs, depflag):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
for dependency_name in dependencies:
|
||||
yield spack.spec.DependencySpec(
|
||||
spack.spec.Spec(parent_name),
|
||||
spack.spec.Spec(dependency_name),
|
||||
deptypes=deptype,
|
||||
depflag=depflag,
|
||||
virtuals=(),
|
||||
)
|
||||
|
||||
|
||||
def static_graph_dot(
|
||||
specs: List[spack.spec.Spec],
|
||||
deptype: Optional[Union[str, Tuple[str, ...]]] = "all",
|
||||
out: Optional[TextIO] = None,
|
||||
specs: List[spack.spec.Spec], depflag: dt.DepFlag = dt.ALL, out: Optional[TextIO] = None
|
||||
):
|
||||
"""Static DOT graph with edges to all possible dependencies.
|
||||
|
||||
Args:
|
||||
specs: abstract specs to be represented
|
||||
deptype: dependency types to consider
|
||||
depflag: dependency types to consider
|
||||
out: optional output stream. If None sys.stdout is used
|
||||
"""
|
||||
out = out or sys.stdout
|
||||
builder = StaticDag()
|
||||
for edge in _static_edges(specs, deptype):
|
||||
for edge in _static_edges(specs, depflag):
|
||||
builder.visit(edge)
|
||||
out.write(builder.render())
|
||||
|
||||
@@ -570,7 +570,7 @@ def static_graph_dot(
|
||||
def graph_dot(
|
||||
specs: List[spack.spec.Spec],
|
||||
builder: Optional[DotGraphBuilder] = None,
|
||||
deptype: spack.dependency.DependencyArgument = "all",
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
out: Optional[TextIO] = None,
|
||||
):
|
||||
"""DOT graph of the concrete specs passed as input.
|
||||
@@ -578,7 +578,7 @@ def graph_dot(
|
||||
Args:
|
||||
specs: specs to be represented
|
||||
builder: builder to use to render the graph
|
||||
deptype: dependency types to consider
|
||||
depflag: dependency types to consider
|
||||
out: optional output stream. If None sys.stdout is used
|
||||
"""
|
||||
if not specs:
|
||||
@@ -587,10 +587,9 @@ def graph_dot(
|
||||
if out is None:
|
||||
out = sys.stdout
|
||||
|
||||
deptype = spack.dependency.canonical_deptype(deptype)
|
||||
builder = builder or SimpleDAG()
|
||||
for edge in spack.traverse.traverse_edges(
|
||||
specs, cover="edges", order="breadth", deptype=deptype
|
||||
specs, cover="edges", order="breadth", deptype=depflag
|
||||
):
|
||||
builder.visit(edge)
|
||||
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Definitions that control how Spack creates Spec hashes."""
|
||||
|
||||
import spack.dependency as dp
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
@@ -20,8 +20,8 @@ class SpecHashDescriptor:
|
||||
|
||||
We currently use different hashes for different use cases."""
|
||||
|
||||
def __init__(self, deptype, package_hash, name, override=None):
|
||||
self.deptype = dp.canonical_deptype(deptype)
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
||||
self.depflag = depflag
|
||||
self.package_hash = package_hash
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
@@ -39,12 +39,12 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
|
||||
dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash")
|
||||
dag_hash = SpecHashDescriptor(depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="hash")
|
||||
|
||||
|
||||
#: Hash descriptor used only to transfer a DAG, as is, across processes
|
||||
process_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run", "test"), package_hash=True, name="process_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN | dt.TEST, package_hash=True, name="process_hash"
|
||||
)
|
||||
|
||||
|
||||
@@ -56,7 +56,7 @@ def _content_hash_override(spec):
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
deptype=(), package_hash=True, name="package_hash", override=_content_hash_override
|
||||
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
|
||||
)
|
||||
|
||||
|
||||
@@ -64,10 +64,10 @@ def _content_hash_override(spec):
|
||||
# spec formats
|
||||
|
||||
full_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run"), package_hash=True, name="full_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=True, name="full_hash"
|
||||
)
|
||||
|
||||
|
||||
build_hash = SpecHashDescriptor(
|
||||
deptype=("build", "link", "run"), package_hash=False, name="build_hash"
|
||||
depflag=dt.BUILD | dt.LINK | dt.RUN, package_hash=False, name="build_hash"
|
||||
)
|
||||
|
@@ -79,8 +79,7 @@ class ElfFilesWithRPathVisitor(BaseDirectoryVisitor):
|
||||
"""Visitor that collects all elf files that have an rpath"""
|
||||
|
||||
def __init__(self):
|
||||
# Map from (ino, dev) -> path. We need 1 path per file, if there are hardlinks,
|
||||
# we don't need to store the path multiple times.
|
||||
# Keep track of what hardlinked files we've already visited.
|
||||
self.visited = set()
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
@@ -89,10 +88,10 @@ def visit_file(self, root, rel_path, depth):
|
||||
identifier = (s.st_ino, s.st_dev)
|
||||
|
||||
# We're hitting a hardlink or symlink of an excluded lib, no need to parse.
|
||||
if identifier in self.visited:
|
||||
return
|
||||
|
||||
self.visited.add(identifier)
|
||||
if s.st_nlink > 1:
|
||||
if identifier in self.visited:
|
||||
return
|
||||
self.visited.add(identifier)
|
||||
|
||||
result = drop_redundant_rpaths(filepath)
|
||||
|
||||
|
@@ -17,6 +17,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import nullcontext
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
@@ -26,7 +27,6 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.spec import Spec
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.util.string import plural
|
||||
|
||||
#: Stand-alone test failure info type
|
||||
TestFailureType = Tuple[BaseException, str]
|
||||
|
@@ -50,6 +50,7 @@
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.database
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.mirror
|
||||
@@ -90,6 +91,16 @@
|
||||
STATUS_REMOVED = "removed"
|
||||
|
||||
|
||||
def _write_timer_json(pkg, timer, cache):
|
||||
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||
try:
|
||||
with open(pkg.times_log_path, "w") as timelog:
|
||||
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
return
|
||||
|
||||
|
||||
class InstallAction:
|
||||
#: Don't perform an install
|
||||
NONE = 0
|
||||
@@ -303,7 +314,7 @@ def _packages_needed_to_bootstrap_compiler(
|
||||
# mark compiler as depended-on by the packages that use it
|
||||
for pkg in pkgs:
|
||||
dep._dependents.add(
|
||||
spack.spec.DependencySpec(pkg.spec, dep, deptypes=("build",), virtuals=())
|
||||
spack.spec.DependencySpec(pkg.spec, dep, depflag=dt.BUILD, virtuals=())
|
||||
)
|
||||
packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
|
||||
|
||||
@@ -399,6 +410,8 @@ def _install_from_cache(
|
||||
return False
|
||||
t.stop()
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec, explicit)
|
||||
@@ -481,7 +494,7 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
pkg.spec, download_result, unsigned=unsigned, force=False
|
||||
pkg.spec, download_result, unsigned=unsigned, force=False, timer=timer
|
||||
)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
@@ -592,7 +605,9 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
if node is spec:
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
fs.install_tree(
|
||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
|
||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
@@ -774,10 +789,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# Save off dependency package ids for quick checks since traversals
|
||||
# are not able to return full dependents for all packages across
|
||||
# environment specs.
|
||||
deptypes = self.get_deptypes(self.pkg)
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
for d in self.pkg.spec.dependencies(deptype=deptypes)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
)
|
||||
|
||||
@@ -816,7 +830,7 @@ def _add_default_args(self) -> None:
|
||||
]:
|
||||
_ = self.install_args.setdefault(arg, default)
|
||||
|
||||
def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]:
|
||||
def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
"""Determine the required dependency types for the associated package.
|
||||
|
||||
Args:
|
||||
@@ -825,7 +839,7 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
|
||||
Returns:
|
||||
tuple: required dependency type(s) for the package
|
||||
"""
|
||||
deptypes = ["link", "run"]
|
||||
depflag = dt.LINK | dt.RUN
|
||||
include_build_deps = self.install_args.get("include_build_deps")
|
||||
|
||||
if self.pkg_id == package_id(pkg):
|
||||
@@ -833,14 +847,15 @@ def get_deptypes(self, pkg: "spack.package_base.PackageBase") -> Tuple[str, ...]
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
|
||||
# Include build dependencies if pkg is not installed and cache_only
|
||||
# is False, or if build depdencies are explicitly called for
|
||||
# by include_build_deps.
|
||||
if include_build_deps or not (cache_only or pkg.spec.installed):
|
||||
deptypes.append("build")
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
):
|
||||
depflag |= dt.BUILD
|
||||
if self.run_tests(pkg):
|
||||
deptypes.append("test")
|
||||
return tuple(sorted(deptypes))
|
||||
depflag |= dt.TEST
|
||||
return depflag
|
||||
|
||||
def has_dependency(self, dep_id) -> bool:
|
||||
"""Returns ``True`` if the package id represents a known dependency
|
||||
@@ -873,9 +888,8 @@ def traverse_dependencies(self, spec=None, visited=None) -> Iterator["spack.spec
|
||||
spec = self.spec
|
||||
if visited is None:
|
||||
visited = set()
|
||||
deptype = self.get_deptypes(spec.package)
|
||||
|
||||
for dep in spec.dependencies(deptype=deptype):
|
||||
for dep in spec.dependencies(deptype=self.get_depflags(spec.package)):
|
||||
hash = dep.dag_hash()
|
||||
if hash in visited:
|
||||
continue
|
||||
@@ -959,10 +973,9 @@ def __init__(
|
||||
# Be consistent wrt use of dependents and dependencies. That is,
|
||||
# if use traverse for transitive dependencies, then must remove
|
||||
# transitive dependents on failure.
|
||||
deptypes = self.request.get_deptypes(self.pkg)
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
for d in self.pkg.spec.dependencies(deptype=deptypes)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
)
|
||||
|
||||
@@ -1316,7 +1329,6 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
|
||||
Preparation includes cleaning up installation and stage directories
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
@@ -2092,7 +2104,6 @@ def install(self) -> None:
|
||||
# another process has a write lock so must be (un)installing
|
||||
# the spec (or that process is hung).
|
||||
ltype, lock = self._ensure_locked("read", pkg)
|
||||
|
||||
# Requeue the spec if we cannot get at least a read lock so we
|
||||
# can check the status presumably established by another process
|
||||
# -- failed, installed, or uninstalled -- on the next pass.
|
||||
@@ -2372,8 +2383,7 @@ def run(self) -> bool:
|
||||
|
||||
# Stop the timer and save results
|
||||
self.timer.stop()
|
||||
with open(self.pkg.times_log_path, "w") as timelog:
|
||||
self.timer.write_json(timelog)
|
||||
_write_timer_json(self.pkg, self.timer, False)
|
||||
|
||||
print_install_test_log(self.pkg)
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
@@ -2394,7 +2404,9 @@ def _install_source(self) -> None:
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
def _real_install(self) -> None:
|
||||
import spack.builder
|
||||
|
@@ -30,7 +30,6 @@
|
||||
import llnl.util.tty.color as color
|
||||
from llnl.util.tty.log import log_output
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
@@ -51,7 +50,7 @@
|
||||
stat_names = pstats.Stats.sort_arg_dict_default
|
||||
|
||||
#: top-level aliases for Spack commands
|
||||
aliases = {"rm": "remove"}
|
||||
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
|
||||
|
||||
#: help levels in order of detail (i.e., number of commands shown)
|
||||
levels = ["short", "long"]
|
||||
@@ -716,7 +715,7 @@ def __call__(self, *argv, **kwargs):
|
||||
|
||||
out = io.StringIO()
|
||||
try:
|
||||
with log_output(out):
|
||||
with log_output(out, echo=True):
|
||||
self.returncode = _invoke_command(self.command, self.parser, args, unknown)
|
||||
|
||||
except SystemExit as e:
|
||||
@@ -775,7 +774,7 @@ def _profile_wrapper(command, parser, args, unknown_args):
|
||||
pr.disable()
|
||||
|
||||
# print out profile stats.
|
||||
stats = pstats.Stats(pr)
|
||||
stats = pstats.Stats(pr, stream=sys.stderr)
|
||||
stats.sort_stats(*sortby)
|
||||
stats.print_stats(nlines)
|
||||
|
||||
|
@@ -20,6 +20,7 @@
|
||||
import urllib.parse
|
||||
from typing import Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
@@ -29,7 +30,6 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.url as url
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -375,7 +375,7 @@ def _determine_extension(fetcher):
|
||||
if isinstance(fetcher, fs.URLFetchStrategy):
|
||||
if fetcher.expand_archive:
|
||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||
ext = url.determine_url_file_extension(fetcher.url)
|
||||
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
||||
|
||||
if ext:
|
||||
# Remove any leading dots
|
||||
|
@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
update_dictionary_extending_lists(spec_configuration, action)
|
||||
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
|
||||
|
||||
# Transform keywords for dependencies or prerequisites into a list of spec
|
||||
|
||||
|
@@ -142,6 +142,7 @@ def __init__(self):
|
||||
"11": "bigsur",
|
||||
"12": "monterey",
|
||||
"13": "ventura",
|
||||
"14": "sonoma",
|
||||
}
|
||||
|
||||
version = macos_version()
|
||||
|
@@ -67,7 +67,7 @@
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.builder import run_after, run_before
|
||||
from spack.dependency import all_deptypes
|
||||
from spack.deptypes import ALL_TYPES as all_deptypes
|
||||
from spack.directives import *
|
||||
from spack.install_test import (
|
||||
SkipTest,
|
||||
@@ -96,6 +96,7 @@
|
||||
on_package_attributes,
|
||||
)
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.executable import *
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user