Compare commits
460 Commits
test-tag-p
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 | ||
|
|
1b1ed1b1fa | ||
|
|
ec0e51316b | ||
|
|
533821e46f | ||
|
|
6c5d125cb0 | ||
|
|
668fb1201f | ||
|
|
f7918fd8ab | ||
|
|
fc1996e0fa | ||
|
|
ed3aaafd73 | ||
|
|
63bb2c9bad | ||
|
|
a67455707a | ||
|
|
09ca71dbe0 | ||
|
|
ea082539e4 | ||
|
|
143146f4f3 | ||
|
|
ee6ae402aa | ||
|
|
0b26b26821 | ||
|
|
c764f9b1ab | ||
|
|
db19d83ea7 | ||
|
|
24256be6d6 | ||
|
|
633723236e | ||
|
|
381f31e69e | ||
|
|
9438cac219 | ||
|
|
85cf66f650 | ||
|
|
f3c080e546 | ||
|
|
37634f8b08 | ||
|
|
2ae8bbce9e | ||
|
|
b8bfaf65bf | ||
|
|
7968cb7fa2 | ||
|
|
ebc2efdfd2 | ||
|
|
ff07fd5ccb | ||
|
|
3f83ef6566 | ||
|
|
554ce7f063 | ||
|
|
23963779f4 | ||
|
|
45c5af10c3 | ||
|
|
532a37e7ba | ||
|
|
aeb9a92845 | ||
|
|
a3c7ad7669 | ||
|
|
b99288dcae | ||
|
|
01b7cc5106 | ||
|
|
f5888d8127 | ||
|
|
77c838ca93 | ||
|
|
11e538d962 | ||
|
|
7d444038ee | ||
|
|
c24471834b | ||
|
|
b1e33ae37b | ||
|
|
c36617f9da | ||
|
|
deadb64206 | ||
|
|
9eaa88e467 | ||
|
|
bd58801415 | ||
|
|
548a9de671 | ||
|
|
8e7c53a8ba | ||
|
|
5e630174a1 | ||
|
|
175a65dfba | ||
|
|
39d4c402d5 | ||
|
|
e51748ee8f | ||
|
|
f9457fa80b | ||
|
|
4cc2ca3e2e | ||
|
|
3843001004 | ||
|
|
e24bb5dd1c | ||
|
|
f6013114eb | ||
|
|
bdca875eb3 | ||
|
|
af8c392de2 | ||
|
|
9aa3b4619b | ||
|
|
3d733da70a | ||
|
|
cda99b792c | ||
|
|
9834bad82e | ||
|
|
3453259c98 | ||
|
|
ee243b84eb | ||
|
|
5080e2cb45 | ||
|
|
f42ef7aea7 | ||
|
|
41793673d9 | ||
|
|
8b6a6982ee | ||
|
|
ee74ca6391 | ||
|
|
7165e70186 | ||
|
|
97d632a161 | ||
|
|
571919992d | ||
|
|
99112ad2ad | ||
|
|
75c70c395d | ||
|
|
960bdfe612 | ||
|
|
97892bda18 | ||
|
|
cead6ef98d | ||
|
|
5d70c0f100 | ||
|
|
361632fc4b | ||
|
|
6576655137 | ||
|
|
feb26efecd | ||
|
|
4752d1cde3 | ||
|
|
a07afa6e1a | ||
|
|
7327d2913a | ||
|
|
8f8a1f7f52 | ||
|
|
eb8d836e76 | ||
|
|
bad8495e16 | ||
|
|
43de7f4881 | ||
|
|
84585ac575 | ||
|
|
86f9d3865b | ||
|
|
834e7b2b0a | ||
|
|
c14f23ddaa | ||
|
|
49f3681a12 | ||
|
|
19e1d10cdf | ||
|
|
7caf2a512d | ||
|
|
1f6e3cc8cb | ||
|
|
169c4245e0 | ||
|
|
ee1982010f | ||
|
|
dd396c4a76 | ||
|
|
235802013d | ||
|
|
e1f07e98ae | ||
|
|
60aee6f535 | ||
|
|
2510dc9e6e | ||
|
|
5607dd259b | ||
|
|
7bd5d1fd3c | ||
|
|
f6104cc3cb | ||
|
|
b54d286b4a | ||
|
|
ea9c488897 | ||
|
|
ba1d295023 | ||
|
|
27f04b3544 | ||
|
|
8cd9497522 | ||
|
|
ef544a3b6d | ||
|
|
4eed832653 | ||
|
|
5996aaa4e3 | ||
|
|
4957607005 | ||
|
|
78bca131fb | ||
|
|
045c5cea53 | ||
|
|
ea256145d9 | ||
|
|
0b2098850c | ||
|
|
d2df0a29ce | ||
|
|
92e9daec9b | ||
|
|
d65437114a | ||
|
|
9a6e98e729 | ||
|
|
6515c16432 | ||
|
|
2826ab36f0 | ||
|
|
c035512930 | ||
|
|
cfadba47d3 | ||
|
|
95391dfe94 | ||
|
|
382ba99631 | ||
|
|
f8e25c79bf | ||
|
|
93b54b79d3 | ||
|
|
ff30efcebc | ||
|
|
54514682d4 | ||
|
|
92a75717f0 | ||
|
|
b9be8e883e | ||
|
|
da838a7d10 | ||
|
|
85e5fb9ab7 | ||
|
|
c0c300d773 | ||
|
|
6e933ac7df | ||
|
|
be679759be | ||
|
|
eace479b1e | ||
|
|
2069a42ba3 | ||
|
|
41d2161b5b | ||
|
|
ba936574fc | ||
|
|
c0d0603baa | ||
|
|
edbf12cfa8 | ||
|
|
11b3dac705 | ||
|
|
a7a5a994dc | ||
|
|
8a9a24ce1e | ||
|
|
f54974d66e | ||
|
|
0b4631a774 | ||
|
|
e7fa6d99bf | ||
|
|
03c0d74139 | ||
|
|
a14f4b5a02 | ||
|
|
3be565f49e | ||
|
|
df2938dfcf | ||
|
|
5d8482598b | ||
|
|
f079e7fc34 | ||
|
|
3369acc050 | ||
|
|
26f4fc0f34 | ||
|
|
8c0551c1c0 | ||
|
|
59866cdb11 | ||
|
|
8d2a32f66d | ||
|
|
b28ae67369 | ||
|
|
b8590fbd05 | ||
|
|
9343b9524f | ||
|
|
bb0cec1530 | ||
|
|
d4f41b51f4 | ||
|
|
347acf3cc6 | ||
|
|
65224ad6bc | ||
|
|
784d56ce05 | ||
|
|
b30523fdd8 | ||
|
|
b46e098696 | ||
|
|
20a7622602 | ||
|
|
d25f1059dd | ||
|
|
9394fa403e | ||
|
|
679c6a606d | ||
|
|
27f378601e | ||
|
|
832ddbdf6d | ||
|
|
0286455e1d | ||
|
|
4baf489460 | ||
|
|
56c7921430 | ||
|
|
c2288af55c | ||
|
|
39cd2f3754 | ||
|
|
a941ab4acb | ||
|
|
048cc711d6 | ||
|
|
63a5cf78ac | ||
|
|
bef03b9588 | ||
|
|
2859f0a7e1 | ||
|
|
c1b084d754 | ||
|
|
a8301709a8 | ||
|
|
ad0b70a64a | ||
|
|
b5444e4304 | ||
|
|
e1d5d34b56 | ||
|
|
e5b4607548 | ||
|
|
ecdd8e035c | ||
|
|
9d9e4a52f5 | ||
|
|
9e0629213c | ||
|
|
51fa4e5fc4 | ||
|
|
8908b7584e | ||
|
|
936c6045fc | ||
|
|
ca2e9cf090 | ||
|
|
288b3c3ec2 | ||
|
|
31bb259a12 | ||
|
|
a74dd96773 | ||
|
|
9594fb47e1 | ||
|
|
62cfe1ab47 | ||
|
|
8c417b3ccc | ||
|
|
52c0127fc7 | ||
|
|
2c74b433aa | ||
|
|
8bdfaf4ae5 | ||
|
|
eb19f59fb1 | ||
|
|
856834537a | ||
|
|
fba019f0be | ||
|
|
cc0ac7093b | ||
|
|
711d67090a | ||
|
|
5ce667de6a | ||
|
|
a77e6ea639 | ||
|
|
3a661803ce | ||
|
|
c6ed2227f2 | ||
|
|
bd9f8ba094 | ||
|
|
c7b849bdee | ||
|
|
3f4012cf44 | ||
|
|
e3b2e5b2cd | ||
|
|
b2ed10dd83 | ||
|
|
1c3dc0bd5f | ||
|
|
f28a2ccee2 | ||
|
|
12d86ffb6a | ||
|
|
fde1954c13 | ||
|
|
3ad65bbfc1 | ||
|
|
f017f586df | ||
|
|
399271832b | ||
|
|
4bcceddba9 | ||
|
|
0fff219aa4 | ||
|
|
ac3c0a4347 | ||
|
|
cc2fa9895e | ||
|
|
fb83c7112e | ||
|
|
c811b71336 | ||
|
|
366c798b87 | ||
|
|
90ac0ef66e | ||
|
|
66e85ae39a | ||
|
|
54fdae4a79 | ||
|
|
b215bb41dd | ||
|
|
b85803ae6c | ||
|
|
c5c75e8921 | ||
|
|
9c5ae722b2 | ||
|
|
132bb59be8 | ||
|
|
c0b42151c3 | ||
|
|
c1be7f2354 | ||
|
|
4edeabb2a2 | ||
|
|
405f563909 | ||
|
|
089d775cf2 | ||
|
|
6610b8bc27 | ||
|
|
d3c4b74095 | ||
|
|
26a74bb3bc | ||
|
|
d2566e3d62 | ||
|
|
3fbe5dd312 | ||
|
|
d1ea315e57 | ||
|
|
0bef599c21 | ||
|
|
d1d2d76183 | ||
|
|
294d81e99e | ||
|
|
22d2ef3d5a | ||
|
|
e087f3bf93 | ||
|
|
ebdaa766ae | ||
|
|
a87ee7f427 | ||
|
|
4f0020c794 | ||
|
|
b23c6f2851 | ||
|
|
81f9910c26 | ||
|
|
b40e3898b4 | ||
|
|
50b90e430d | ||
|
|
3a565c66e9 | ||
|
|
01167a1471 | ||
|
|
3caa0093f8 | ||
|
|
d9fbdfbee9 | ||
|
|
ae08b25dac | ||
|
|
9ccb018b23 | ||
|
|
185bccb70f | ||
|
|
8c8186c757 | ||
|
|
8a76430039 | ||
|
|
33939656e2 | ||
|
|
950b5579fb | ||
|
|
d996b4d240 | ||
|
|
886946395d | ||
|
|
57b69c9703 | ||
|
|
f34c93c5f8 | ||
|
|
a7f2abf924 | ||
|
|
a99eaa9541 | ||
|
|
76b6436ade | ||
|
|
0facda31eb | ||
|
|
032fd38af0 | ||
|
|
b04b3aed9e | ||
|
|
90b2e402f5 | ||
|
|
1f17f44def | ||
|
|
cf87d9f199 | ||
|
|
d7a1a61702 | ||
|
|
416edfa229 | ||
|
|
9beb02ea83 | ||
|
|
ce4162e28b | ||
|
|
14f3297cca | ||
|
|
f24f98a1e2 | ||
|
|
64361e1fc7 | ||
|
|
9a05dce3bf | ||
|
|
ffc283ab8b | ||
|
|
3fef586cfb | ||
|
|
515b53ac50 | ||
|
|
b710778bda | ||
|
|
a965fe9354 | ||
|
|
e47a2a7a65 | ||
|
|
5b23c5dcc0 | ||
|
|
ad1fdcdf48 | ||
|
|
82aa27f5a5 | ||
|
|
909f185f02 | ||
|
|
8c7adbf8f3 | ||
|
|
10165397da | ||
|
|
af60b802f7 | ||
|
|
4e8f91a0c7 | ||
|
|
7b87f0a569 | ||
|
|
377fecd86f | ||
|
|
2c7df5ce35 | ||
|
|
3bcd1a6c0e | ||
|
|
a468ca402e | ||
|
|
ecb9d35cd4 | ||
|
|
d7b2f9d756 | ||
|
|
8c303cd29a | ||
|
|
4831d45852 | ||
|
|
f05837a480 | ||
|
|
1fa60a6c70 | ||
|
|
1730bcaa31 | ||
|
|
e6235a8ff9 | ||
|
|
884b4952af | ||
|
|
cc73789744 | ||
|
|
c9b7eb3647 | ||
|
|
1100cdf5a0 | ||
|
|
6ac93e1095 | ||
|
|
193e6e7678 | ||
|
|
dc216adde2 | ||
|
|
bf43471a7c | ||
|
|
469f06a8f2 | ||
|
|
53ae969aa0 | ||
|
|
2b5a7bb4d7 | ||
|
|
e91db77930 | ||
|
|
31431f967a | ||
|
|
63b88c4b75 | ||
|
|
5e7f989019 | ||
|
|
319ef0f459 | ||
|
|
a9d5f24791 | ||
|
|
74d5da43a8 | ||
|
|
829b4fe8fe | ||
|
|
5672c64356 | ||
|
|
1f58ac5ed3 | ||
|
|
206a0a1658 | ||
|
|
b72d0e850d | ||
|
|
29835ac343 | ||
|
|
e276131b2a | ||
|
|
c5f9ae864a | ||
|
|
ac5976d17d | ||
|
|
de719e9a4b | ||
|
|
f30ede1ab8 | ||
|
|
9e0f1f8998 | ||
|
|
ee335c0d53 | ||
|
|
0c986da030 | ||
|
|
0d5d9524f2 | ||
|
|
c94137f6ea | ||
|
|
9259a6aae4 | ||
|
|
244dfb3a35 | ||
|
|
e16397b5d8 | ||
|
|
2e9e7ce7c4 | ||
|
|
f802b64e7a | ||
|
|
afe6f7ed79 | ||
|
|
82d41a7be4 | ||
|
|
412a09e78b | ||
|
|
fbc0956d19 | ||
|
|
dcb4bc3c54 | ||
|
|
1ac2f34333 | ||
|
|
033eb77aa9 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
9
.github/workflows/unit_tests.yaml
vendored
9
.github/workflows/unit_tests.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -155,7 +155,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -189,7 +190,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -66,7 +66,7 @@ jobs:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -49,6 +49,7 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -59,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -48,14 +48,10 @@ Here is an example where a build cache is created in a local directory named
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push --allow-root ./spack-cache ninja
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||
the binaries we're pushing contain references to the local Spack install
|
||||
directory.
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||
|
||||
@@ -147,7 +143,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildache only <package>
|
||||
$ spack install --use-buildcache only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
|
||||
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -73,10 +73,10 @@ arguments to the configure phase, you can use:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ['--no-python-dbus']
|
||||
return ["--no-python-dbus"]
|
||||
|
||||
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -214,6 +214,7 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
|
||||
@@ -400,28 +400,30 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by
|
||||
the :meth:`~spack.spec.Spec.format` method.
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by the
|
||||
:meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
||||
express the conflict on both modulefiles conflicting with each other.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
|
||||
@@ -217,13 +217,7 @@ file would live in the ``build_cache`` directory of a binary mirror::
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import IO, Optional, Sequence, Tuple
|
||||
from typing import IO, Any, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
|
||||
class Command:
|
||||
@@ -25,9 +25,9 @@ def __init__(
|
||||
prog: str,
|
||||
description: Optional[str],
|
||||
usage: str,
|
||||
positionals: Sequence[Tuple[str, str]],
|
||||
optionals: Sequence[Tuple[Sequence[str], str, str]],
|
||||
subcommands: Sequence[Tuple[ArgumentParser, str]],
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
||||
) -> None:
|
||||
"""Initialize a new Command instance.
|
||||
|
||||
@@ -96,13 +96,30 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
if action.option_strings:
|
||||
flags = action.option_strings
|
||||
dest_flags = fmt._format_action_invocation(action)
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
optionals.append((flags, dest_flags, help))
|
||||
nargs = action.nargs
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
|
||||
if action.choices is not None:
|
||||
dest = [str(choice) for choice in action.choices]
|
||||
else:
|
||||
dest = [action.dest]
|
||||
|
||||
optionals.append((flags, dest, dest_flags, nargs, help))
|
||||
elif isinstance(action, argparse._SubParsersAction):
|
||||
for subaction in action._choices_actions:
|
||||
subparser = action._name_parser_map[subaction.dest]
|
||||
subcommands.append((subparser, subaction.dest))
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, subaction.dest, help))
|
||||
|
||||
# Look for aliases of the form 'name (alias, ...)'
|
||||
if self.aliases and isinstance(subaction.metavar, str):
|
||||
@@ -111,12 +128,22 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
aliases = match.group(2).split(", ")
|
||||
for alias in aliases:
|
||||
subparser = action._name_parser_map[alias]
|
||||
subcommands.append((subparser, alias))
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, alias, help))
|
||||
else:
|
||||
args = fmt._format_action_invocation(action)
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
positionals.append((args, help))
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
positionals.append((args, action.choices, action.nargs, help))
|
||||
|
||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||
|
||||
@@ -146,7 +173,7 @@ def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
||||
cmd = self.parse(parser, prog)
|
||||
self.out.write(self.format(cmd))
|
||||
|
||||
for subparser, prog in cmd.subcommands:
|
||||
for subparser, prog, help in cmd.subcommands:
|
||||
self._write(subparser, prog, level=level + 1)
|
||||
|
||||
def write(self, parser: ArgumentParser) -> None:
|
||||
@@ -205,13 +232,13 @@ def format(self, cmd: Command) -> str:
|
||||
|
||||
if cmd.positionals:
|
||||
string.write(self.begin_positionals())
|
||||
for args, help in cmd.positionals:
|
||||
for args, choices, nargs, help in cmd.positionals:
|
||||
string.write(self.positional(args, help))
|
||||
string.write(self.end_positionals())
|
||||
|
||||
if cmd.optionals:
|
||||
string.write(self.begin_optionals())
|
||||
for flags, dest_flags, help in cmd.optionals:
|
||||
for flags, dest, dest_flags, nargs, help in cmd.optionals:
|
||||
string.write(self.optional(dest_flags, help))
|
||||
string.write(self.end_optionals())
|
||||
|
||||
@@ -338,7 +365,7 @@ def end_optionals(self) -> str:
|
||||
"""
|
||||
return ""
|
||||
|
||||
def begin_subcommands(self, subcommands: Sequence[Tuple[ArgumentParser, str]]) -> str:
|
||||
def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
||||
"""Table with links to other subcommands.
|
||||
|
||||
Arguments:
|
||||
@@ -355,114 +382,8 @@ def begin_subcommands(self, subcommands: Sequence[Tuple[ArgumentParser, str]]) -
|
||||
|
||||
"""
|
||||
|
||||
for cmd, _ in subcommands:
|
||||
for cmd, _, _ in subcommands:
|
||||
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
||||
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
||||
|
||||
return string + "\n"
|
||||
|
||||
|
||||
class ArgparseCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as shell programmable tab completion functions."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals: Tuple[str, ...] = ()
|
||||
if cmd.positionals:
|
||||
positionals, _ = zip(*cmd.positionals)
|
||||
optionals, _, _ = zip(*cmd.optionals)
|
||||
subcommands: Tuple[str, ...] = ()
|
||||
if cmd.subcommands:
|
||||
_, subcommands = zip(*cmd.subcommands)
|
||||
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to begin a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Function definition beginning.
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to end a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name
|
||||
|
||||
Returns:
|
||||
Function definition ending.
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(
|
||||
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||
) -> str:
|
||||
"""Return the body of the function.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Function body.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for positional arguments.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def optionals(self, optionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting optional flags.
|
||||
|
||||
Args:
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return ""
|
||||
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
|
||||
Args:
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return ""
|
||||
|
||||
@@ -821,7 +821,7 @@ def __getattr__(self, name):
|
||||
# 'instance'/'_instance' to be defined or it will enter an infinite
|
||||
# loop, so protect against that here.
|
||||
if name in ["_instance", "instance"]:
|
||||
raise AttributeError()
|
||||
raise AttributeError(f"cannot create {name}")
|
||||
return getattr(self.instance, name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -843,27 +843,6 @@ def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
class LazyReference:
|
||||
"""Lazily evaluated reference to part of a singleton."""
|
||||
|
||||
def __init__(self, ref_function):
|
||||
self.ref_function = ref_function
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "ref_function":
|
||||
raise AttributeError()
|
||||
return getattr(self.ref_function(), name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.ref_function()[name]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.ref_function())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.ref_function())
|
||||
|
||||
|
||||
def load_module_from_file(module_name, module_path):
|
||||
"""Loads a python module from the path of the corresponding file.
|
||||
|
||||
|
||||
@@ -9,9 +9,10 @@
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from types import TracebackType
|
||||
from typing import IO, Any, Callable, ContextManager, Dict, Generator, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_seconds
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.util.string
|
||||
|
||||
@@ -34,9 +35,12 @@
|
||||
]
|
||||
|
||||
|
||||
#: A useful replacement for functions that should return True when not provided
|
||||
#: for example.
|
||||
true_fn = lambda: True
|
||||
ReleaseFnType = Optional[Callable[[], bool]]
|
||||
|
||||
|
||||
def true_fn() -> bool:
|
||||
"""A function that always returns True."""
|
||||
return True
|
||||
|
||||
|
||||
class OpenFile:
|
||||
@@ -48,7 +52,7 @@ class OpenFile:
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
|
||||
def __init__(self, fh):
|
||||
def __init__(self, fh: IO) -> None:
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
@@ -78,11 +82,11 @@ class OpenFileTracker:
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors = {}
|
||||
self._descriptors: Dict[Any, OpenFile] = {}
|
||||
|
||||
def get_fh(self, path):
|
||||
def get_fh(self, path: str) -> IO:
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
@@ -90,7 +94,7 @@ def get_fh(self, path):
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path (str): path to lock file we want a filehandle for
|
||||
path: path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
@@ -139,7 +143,7 @@ def get_fh(self, path):
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
@@ -157,7 +161,7 @@ def purge(self):
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
file_tracker = OpenFileTracker()
|
||||
FILE_TRACKER = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
@@ -166,7 +170,7 @@ def _attempts_str(wait_time, nattempts):
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
class LockType:
|
||||
@@ -188,7 +192,7 @@ def to_module(tid):
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op):
|
||||
def is_valid(op: int) -> bool:
|
||||
return op == LockType.READ or op == LockType.WRITE
|
||||
|
||||
|
||||
@@ -207,7 +211,16 @@ class Lock:
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
|
||||
def __init__(
|
||||
self,
|
||||
path: str,
|
||||
*,
|
||||
start: int = 0,
|
||||
length: int = 0,
|
||||
default_timeout: Optional[float] = None,
|
||||
debug: bool = False,
|
||||
desc: str = "",
|
||||
) -> None:
|
||||
"""Construct a new lock on the file at ``path``.
|
||||
|
||||
By default, the lock applies to the whole file. Optionally,
|
||||
@@ -220,17 +233,17 @@ def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, d
|
||||
beginning of the file.
|
||||
|
||||
Args:
|
||||
path (str): path to the lock
|
||||
start (int): optional byte offset at which the lock starts
|
||||
length (int): optional number of bytes to lock
|
||||
default_timeout (int): number of seconds to wait for lock attempts,
|
||||
path: path to the lock
|
||||
start: optional byte offset at which the lock starts
|
||||
length: optional number of bytes to lock
|
||||
default_timeout: seconds to wait for lock attempts,
|
||||
where None means to wait indefinitely
|
||||
debug (bool): debug mode specific to locking
|
||||
desc (str): optional debug message lock description, which is
|
||||
debug: debug mode specific to locking
|
||||
desc: optional debug message lock description, which is
|
||||
helpful for distinguishing between different Spack locks.
|
||||
"""
|
||||
self.path = path
|
||||
self._file = None
|
||||
self._file: Optional[IO] = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
@@ -242,7 +255,7 @@ def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, d
|
||||
self.debug = debug
|
||||
|
||||
# optional debug description
|
||||
self.desc = " ({0})".format(desc) if desc else ""
|
||||
self.desc = f" ({desc})" if desc else ""
|
||||
|
||||
# If the user doesn't set a default timeout, or if they choose
|
||||
# None, 0, etc. then lock attempts will not time out (unless the
|
||||
@@ -250,11 +263,15 @@ def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, d
|
||||
self.default_timeout = default_timeout or None
|
||||
|
||||
# PID and host of lock holder (only used in debug mode)
|
||||
self.pid = self.old_pid = None
|
||||
self.host = self.old_host = None
|
||||
self.pid: Optional[int] = None
|
||||
self.old_pid: Optional[int] = None
|
||||
self.host: Optional[str] = None
|
||||
self.old_host: Optional[str] = None
|
||||
|
||||
@staticmethod
|
||||
def _poll_interval_generator(_wait_times=None):
|
||||
def _poll_interval_generator(
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||
) -> Generator[float, None, None]:
|
||||
"""This implements a backoff scheme for polling a contended resource
|
||||
by suggesting a succession of wait times between polls.
|
||||
|
||||
@@ -277,21 +294,21 @@ def _poll_interval_generator(_wait_times=None):
|
||||
num_requests += 1
|
||||
yield wait_time
|
||||
|
||||
def __repr__(self):
|
||||
def __repr__(self) -> str:
|
||||
"""Formal representation of the lock."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self):
|
||||
def __str__(self) -> str:
|
||||
"""Readable string (with key fields) of the lock."""
|
||||
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
||||
timeout = "timeout={0}".format(self.default_timeout)
|
||||
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
||||
return "({0}, {1}, {2})".format(location, timeout, activity)
|
||||
|
||||
def _lock(self, op, timeout=None):
|
||||
def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
||||
|
||||
The lock is implemented as a spin lock using a nonblocking call
|
||||
@@ -310,7 +327,7 @@ def _lock(self, op, timeout=None):
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
self._ensure_parent_directory()
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
self._file = FILE_TRACKER.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
@@ -319,7 +336,7 @@ def _lock(self, op, timeout=None):
|
||||
|
||||
self._log_debug(
|
||||
"{} locking [{}:{}]: timeout {}".format(
|
||||
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||
op_str.lower(), self._start, self._length, lang.pretty_seconds(timeout or 0)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -343,15 +360,20 @@ def _lock(self, op, timeout=None):
|
||||
total_wait_time = time.time() - start_time
|
||||
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||
|
||||
def _poll_lock(self, op):
|
||||
def _poll_lock(self, op: int) -> bool:
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
the locking attempt succeeds
|
||||
"""
|
||||
assert self._file is not None, "cannot poll a lock without the file being set"
|
||||
module_op = LockType.to_module(op)
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(
|
||||
self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
|
||||
self._file.fileno(),
|
||||
module_op | fcntl.LOCK_NB,
|
||||
self._length,
|
||||
self._start,
|
||||
os.SEEK_SET,
|
||||
)
|
||||
|
||||
# help for debugging distributed locking
|
||||
@@ -377,7 +399,7 @@ def _poll_lock(self, op):
|
||||
|
||||
return False
|
||||
|
||||
def _ensure_parent_directory(self):
|
||||
def _ensure_parent_directory(self) -> str:
|
||||
parent = os.path.dirname(self.path)
|
||||
|
||||
# relative paths to lockfiles in the current directory have no parent
|
||||
@@ -396,20 +418,22 @@ def _ensure_parent_directory(self):
|
||||
raise
|
||||
return parent
|
||||
|
||||
def _read_log_debug_data(self):
|
||||
def _read_log_debug_data(self) -> None:
|
||||
"""Read PID and host data out of the file if it is there."""
|
||||
assert self._file is not None, "cannot read debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
_, _, self.pid = pid.rpartition("=")
|
||||
_, _, pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
self.pid = int(self.pid)
|
||||
self.pid = int(pid)
|
||||
|
||||
def _write_log_debug_data(self):
|
||||
def _write_log_debug_data(self) -> None:
|
||||
"""Write PID and host data to the file, recording old values."""
|
||||
assert self._file is not None, "cannot write debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
@@ -423,20 +447,21 @@ def _write_log_debug_data(self):
|
||||
self._file.flush()
|
||||
os.fsync(self._file.fileno())
|
||||
|
||||
def _unlock(self):
|
||||
def _unlock(self) -> None:
|
||||
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
||||
|
||||
Releases the lock regardless of mode. Note that read locks may
|
||||
be masquerading as write locks, but this removes either.
|
||||
|
||||
"""
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
file_tracker.release_by_fh(self._file)
|
||||
assert self._file is not None, "cannot unlock without the file being set"
|
||||
fcntl.lockf(self._file.fileno(), fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
FILE_TRACKER.release_by_fh(self._file)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
def acquire_read(self, timeout=None):
|
||||
def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
||||
"""Acquires a recursive, shared lock for reading.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -461,7 +486,7 @@ def acquire_read(self, timeout=None):
|
||||
self._reads += 1
|
||||
return False
|
||||
|
||||
def acquire_write(self, timeout=None):
|
||||
def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
||||
"""Acquires a recursive, exclusive lock for writing.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -491,7 +516,7 @@ def acquire_write(self, timeout=None):
|
||||
self._writes += 1
|
||||
return False
|
||||
|
||||
def is_write_locked(self):
|
||||
def is_write_locked(self) -> bool:
|
||||
"""Check if the file is write locked
|
||||
|
||||
Return:
|
||||
@@ -508,7 +533,7 @@ def is_write_locked(self):
|
||||
|
||||
return False
|
||||
|
||||
def downgrade_write_to_read(self, timeout=None):
|
||||
def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
||||
"""
|
||||
Downgrade from an exclusive write lock to a shared read.
|
||||
|
||||
@@ -527,7 +552,7 @@ def downgrade_write_to_read(self, timeout=None):
|
||||
else:
|
||||
raise LockDowngradeError(self.path)
|
||||
|
||||
def upgrade_read_to_write(self, timeout=None):
|
||||
def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
||||
"""
|
||||
Attempts to upgrade from a shared read lock to an exclusive write.
|
||||
|
||||
@@ -546,7 +571,7 @@ def upgrade_read_to_write(self, timeout=None):
|
||||
else:
|
||||
raise LockUpgradeError(self.path)
|
||||
|
||||
def release_read(self, release_fn=None):
|
||||
def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
"""Releases a read lock.
|
||||
|
||||
Arguments:
|
||||
@@ -582,7 +607,7 @@ def release_read(self, release_fn=None):
|
||||
self._reads -= 1
|
||||
return False
|
||||
|
||||
def release_write(self, release_fn=None):
|
||||
def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
"""Releases a write lock.
|
||||
|
||||
Arguments:
|
||||
@@ -623,58 +648,58 @@ def release_write(self, release_fn=None):
|
||||
else:
|
||||
return False
|
||||
|
||||
def cleanup(self):
|
||||
def cleanup(self) -> None:
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
os.unlink(self.path)
|
||||
else:
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self):
|
||||
def _get_counts_desc(self) -> str:
|
||||
return (
|
||||
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
||||
)
|
||||
|
||||
def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
def _log_acquired(self, locktype, wait_time, nattempts) -> None:
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype):
|
||||
def _log_acquiring(self, locktype) -> None:
|
||||
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
def _log_debug(self, *args, **kwargs) -> None:
|
||||
"""Output lock debug messages."""
|
||||
kwargs["level"] = kwargs.get("level", 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
def _log_downgraded(self, wait_time, nattempts) -> None:
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self):
|
||||
def _log_downgrading(self) -> None:
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
||||
|
||||
def _log_released(self, locktype):
|
||||
def _log_released(self, locktype) -> None:
|
||||
now = datetime.now()
|
||||
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype):
|
||||
def _log_releasing(self, locktype) -> None:
|
||||
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
def _log_upgraded(self, wait_time, nattempts) -> None:
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self):
|
||||
def _log_upgrading(self) -> None:
|
||||
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
||||
|
||||
def _status_msg(self, locktype, status):
|
||||
def _status_msg(self, locktype: str, status: str) -> str:
|
||||
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
||||
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
||||
locktype, self, status_desc
|
||||
@@ -709,7 +734,13 @@ class LockTransaction:
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, lock, acquire=None, release=None, timeout=None):
|
||||
def __init__(
|
||||
self,
|
||||
lock: Lock,
|
||||
acquire: Union[ReleaseFnType, ContextManager] = None,
|
||||
release: Union[ReleaseFnType, ContextManager] = None,
|
||||
timeout: Optional[float] = None,
|
||||
) -> None:
|
||||
self._lock = lock
|
||||
self._timeout = timeout
|
||||
self._acquire_fn = acquire
|
||||
@@ -724,15 +755,20 @@ def __enter__(self):
|
||||
else:
|
||||
return self._as
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> bool:
|
||||
suppress = False
|
||||
|
||||
def release_fn():
|
||||
if self._release_fn is not None:
|
||||
return self._release_fn(type, value, traceback)
|
||||
return self._release_fn(exc_type, exc_value, traceback)
|
||||
|
||||
if self._as and hasattr(self._as, "__exit__"):
|
||||
if self._as.__exit__(type, value, traceback):
|
||||
if self._as.__exit__(exc_type, exc_value, traceback):
|
||||
suppress = True
|
||||
|
||||
if self._exit(release_fn):
|
||||
@@ -740,6 +776,12 @@ def release_fn():
|
||||
|
||||
return suppress
|
||||
|
||||
def _enter(self) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
def _exit(self, release_fn: ReleaseFnType) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class ReadTransaction(LockTransaction):
|
||||
"""LockTransaction context manager that does a read and releases it."""
|
||||
@@ -785,7 +827,7 @@ def __init__(self, lock_type, path, time, attempts):
|
||||
super().__init__(
|
||||
fmt.format(
|
||||
lock_type,
|
||||
pretty_seconds(time),
|
||||
lang.pretty_seconds(time),
|
||||
attempts,
|
||||
"attempt" if attempts == 1 else "attempts",
|
||||
path,
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -244,7 +245,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
||||
@@ -52,6 +52,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -61,6 +62,22 @@
|
||||
_build_cache_keys_relative_path = "_pgp"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
A database supports writing buildcache index files, in which case certain fields are not
|
||||
needed in each install record, and no locking is required. To use this feature, it provides
|
||||
``lock_cfg=NO_LOCK``, and override the list of ``record_fields``.
|
||||
"""
|
||||
|
||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
||||
|
||||
def __init__(self, root):
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||
|
||||
|
||||
class FetchCacheError(Exception):
|
||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||
|
||||
@@ -190,8 +207,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
|
||||
try:
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
@@ -703,7 +719,7 @@ def get_buildfile_manifest(spec):
|
||||
# look for them to decide if text file needs to be relocated or not
|
||||
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.layout.root))
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
|
||||
# Create a giant regex that matches all prefixes
|
||||
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||
@@ -716,7 +732,7 @@ def get_buildfile_manifest(spec):
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = os.readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
# Non-symlinks.
|
||||
@@ -764,9 +780,9 @@ def get_buildinfo_dict(spec):
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"buildpath": spack.store.layout.root,
|
||||
"buildpath": spack.store.STORE.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
||||
"relocate_textfiles": manifest["text_to_relocate"],
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
@@ -1059,13 +1075,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = spack_db.Database(
|
||||
None,
|
||||
db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=["spec", "ref_count", "in_buildcache"],
|
||||
)
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db.root = None
|
||||
db_root_dir = db.database_directory
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
@@ -1196,9 +1209,17 @@ def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
|
||||
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
|
||||
if tarinfo.name.endswith("/.spack/binary_distribution"):
|
||||
return None
|
||||
|
||||
return deterministic_tarinfo(tarinfo)
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
@@ -1206,9 +1227,6 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
#: Regenerated indices after pushing
|
||||
regenerate_index: bool = False
|
||||
|
||||
@@ -1253,7 +1271,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# without concretizing with the current spack packages
|
||||
# and preferences
|
||||
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
spec_file = spack.store.STORE.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
@@ -1281,9 +1299,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
|
||||
if not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
@@ -1298,15 +1313,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
bchecksum["hash"] = checksum
|
||||
spec_dict["binary_cache_checksum"] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
@@ -1363,7 +1370,7 @@ def specs_to_be_packaged(
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
|
||||
# Mass install check
|
||||
with spack.store.db.read_transaction():
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return list(filter(packageable, nodes))
|
||||
|
||||
|
||||
@@ -1562,12 +1569,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
relocate.ensure_binaries_are_relocatable(binaries)
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
@@ -1606,7 +1607,7 @@ def relocate_package(spec):
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.layout.root)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
@@ -1791,6 +1792,27 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1801,6 +1823,14 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
# Create the install prefix
|
||||
fsys.mkdirp(
|
||||
spec.prefix,
|
||||
mode=get_package_dir_permissions(spec),
|
||||
group=get_package_group(spec),
|
||||
default_perms="parents",
|
||||
)
|
||||
|
||||
specfile_path = download_result["specfile_stage"].save_filename
|
||||
|
||||
with open(specfile_path, "r") as inputfile:
|
||||
@@ -1854,58 +1884,58 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get("buildinfo", {})
|
||||
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
raise
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix)
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
@@ -1952,7 +1982,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
@@ -18,4 +18,5 @@
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
"store_path",
|
||||
]
|
||||
|
||||
@@ -50,7 +50,7 @@ def _try_import_from_store(
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
@@ -183,7 +183,7 @@ def _executables_in_store(
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
@@ -150,18 +150,19 @@ def _add_compilers_if_missing() -> None:
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration() -> Generator:
|
||||
spack.store.ensure_singleton_created()
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack.config.use_configuration(*config_scopes), spack.store.use_store(
|
||||
bootstrap_store_path, extra_data={"padded_length": 0}
|
||||
):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
|
||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1256,9 +1256,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||
msg = "Install failed for {0}. Nothing was installed!"
|
||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -162,17 +162,6 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
||||
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
|
||||
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -173,7 +173,7 @@ def test_imports(self):
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python.path
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -286,7 +286,7 @@ def get_external_python_for_prefix(self):
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.db.query("python") if s.prefix == self.spec.external_path
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
@@ -301,7 +301,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
@@ -401,7 +401,8 @@ def build_directory(self):
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
Requires pip 22.1+, which requires Python 3.7+.
|
||||
|
||||
Requires pip 22.1 or newer.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
@@ -415,6 +416,8 @@ def config_settings(self, spec, prefix):
|
||||
def install_options(self, spec, prefix):
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
@@ -428,6 +431,8 @@ def global_options(self, spec, prefix):
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
||||
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qt", type="build", when="build_system=qmake")
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
||||
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
from llnl.util.filesystem import find, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
@@ -39,9 +40,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
build_system("sip")
|
||||
|
||||
with when("build_system=sip"):
|
||||
extends("python")
|
||||
depends_on("qt")
|
||||
depends_on("py-sip")
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,13 +113,13 @@ class SIPBuilder(BaseBuilder):
|
||||
* install
|
||||
|
||||
The configure phase already adds a set of default flags. To see more
|
||||
options, run ``python configure.py --help``.
|
||||
options, run ``sip-build --help``.
|
||||
"""
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
@@ -130,34 +130,17 @@ class SIPBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
def configure_file(self):
|
||||
"""Returns the name of the configure file to use."""
|
||||
return "configure.py"
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
configure = self.configure_file()
|
||||
|
||||
args = self.configure_args()
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
args.extend(
|
||||
[
|
||||
"--verbose",
|
||||
"--confirm-license",
|
||||
"--qmake",
|
||||
spec["qt"].prefix.bin.qmake,
|
||||
"--sip",
|
||||
spec["py-sip"].prefix.bin.sip,
|
||||
"--sip-incdir",
|
||||
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||
"--bindir",
|
||||
prefix.bin,
|
||||
"--destdir",
|
||||
inspect.getmodule(self.pkg).python_platlib,
|
||||
]
|
||||
)
|
||||
|
||||
self.pkg.python(configure, *args)
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
@@ -167,7 +150,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -177,21 +161,11 @@ def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def extend_path_setup(self):
|
||||
# See github issue #14121 and PR #15297
|
||||
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||
if module != "sip":
|
||||
module = module.split(".")[0]
|
||||
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||
f.write("from pkgutil import extend_path\n")
|
||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1278,6 +1278,7 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
@@ -1287,9 +1288,6 @@ def main_script_replacements(cmd):
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Write out the file describing specs that should be copied
|
||||
copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
|
||||
@@ -1305,21 +1303,17 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
sorted_output = ci_opt.optimizer(sorted_output)
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1330,10 +1324,17 @@ def main_script_replacements(cmd):
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
sorted_output = {"unsupported-copy": noop_job}
|
||||
output_object = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
@@ -1419,9 +1420,7 @@ def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
|
||||
unsigned = not sign_binaries
|
||||
tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
|
||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||
return bindist.push(
|
||||
input_spec, push_url, bindist.PushOptions(force=True, allow_root=True, unsigned=unsigned)
|
||||
)
|
||||
return bindist.push(input_spec, push_url, bindist.PushOptions(force=True, unsigned=unsigned))
|
||||
|
||||
|
||||
def push_mirror_contents(input_spec: spack.spec.Spec, mirror_url, sign_binaries):
|
||||
@@ -1505,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
@@ -1691,7 +1690,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
return True
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
@@ -1707,6 +1706,11 @@ def reproduce_ci_job(url, work_dir):
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
if gpg_url:
|
||||
gpg_path = web_util.fetch_url_text(gpg_url, dest_dir=os.path.join(work_dir, "_pgp"))
|
||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
@@ -1799,60 +1803,63 @@ def reproduce_ci_job(url, work_dir):
|
||||
# more faithful reproducer if everything appears to run in the same
|
||||
# absolute path used during the CI build.
|
||||
mount_as_dir = "/work"
|
||||
mounted_workdir = "/reproducer"
|
||||
if repro_details:
|
||||
mount_as_dir = repro_details["ci_project_dir"]
|
||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||
if gpg_path:
|
||||
mounted_gpg_path = os.path.join(mounted_workdir, rel_gpg_path)
|
||||
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
commit_1 = m.group(1)
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
|
||||
# In cases where CI build was run on a shell runner, it might be useful
|
||||
# to see what tags were applied to the job so the user knows what shell
|
||||
@@ -1863,45 +1870,92 @@ def reproduce_ci_job(url, work_dir):
|
||||
job_tags = job_yaml["tags"]
|
||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
||||
|
||||
inst_list = []
|
||||
entrypoint_script = [
|
||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
||||
]
|
||||
|
||||
inst_list = []
|
||||
# Finally, print out some instructions to reproduce the build
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
# Allow interactive
|
||||
entrypoint_script.extend(
|
||||
[
|
||||
[
|
||||
"echo",
|
||||
"Re-run install script using:\n\t{0}".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh")
|
||||
if job_image
|
||||
else install_script
|
||||
),
|
||||
],
|
||||
# Allow interactive
|
||||
["exec", "$@"],
|
||||
]
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
process_command(
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
docker_command = [
|
||||
[
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
||||
job_image,
|
||||
"bash",
|
||||
]
|
||||
]
|
||||
autostart = autostart and setup_result
|
||||
process_command("start", docker_command, work_dir, run=autostart)
|
||||
|
||||
if not autostart:
|
||||
inst_list.append("\nTo run the docker reproducer:\n\n")
|
||||
inst_list.extend(
|
||||
[
|
||||
" - Start the docker container install",
|
||||
" $ {0}/start.sh".format(work_dir),
|
||||
]
|
||||
)
|
||||
else:
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||
|
||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||
inst_list.extent(
|
||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
||||
)
|
||||
|
||||
if not setup_result:
|
||||
inst_list.append(" - Clone spack and acquire tested commit\n")
|
||||
inst_list.append("{0}".format(spack_info))
|
||||
spack_root = "<spack-clone-path>"
|
||||
else:
|
||||
spack_root = "{0}/spack".format(mount_as_dir)
|
||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||
inst_list.append("\n {0}\n".format(spack_info))
|
||||
inst_list.append("\n")
|
||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
||||
|
||||
inst_list.append(" - Activate the environment\n\n")
|
||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||
inst_list.append(
|
||||
" $ spack env activate --without-view {0}\n\n".format(
|
||||
mounted_env_dir if job_image else repro_dir
|
||||
)
|
||||
)
|
||||
inst_list.append(" - Run the install script\n\n")
|
||||
inst_list.append(
|
||||
" $ {0}\n".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script
|
||||
)
|
||||
)
|
||||
|
||||
print("".join(inst_list))
|
||||
tty.msg("".join(inst_list))
|
||||
|
||||
|
||||
def process_command(name, commands, repro_dir):
|
||||
def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
"""
|
||||
Create a script for and run the command. Copy the script to the
|
||||
reproducibility directory.
|
||||
@@ -1911,6 +1965,7 @@ def process_command(name, commands, repro_dir):
|
||||
commands (list): list of arguments for single command or list of lists of
|
||||
arguments for multiple commands. No shell escape is performed.
|
||||
repro_dir (str): Job reproducibility directory
|
||||
run (bool): Run the script and return the exit code if True
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
@@ -1929,7 +1984,8 @@ def process_command(name, commands, repro_dir):
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if exit_on_failure:
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
@@ -1940,28 +1996,36 @@ def process_command(name, commands, repro_dir):
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# a login shell.
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
exit_code = None
|
||||
if run:
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
else:
|
||||
# Delete the script, it is copied to the destination dir
|
||||
os.remove(script)
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
return exit_code
|
||||
|
||||
|
||||
def create_buildcache(
|
||||
input_spec: spack.spec.Spec,
|
||||
*,
|
||||
pr_pipeline: bool,
|
||||
pipeline_mirror_url: Optional[str] = None,
|
||||
buildcache_mirror_url: Optional[str] = None,
|
||||
sign_binaries: bool = False,
|
||||
) -> List[PushResult]:
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
@@ -1969,12 +2033,10 @@ def create_buildcache(
|
||||
input_spec: Installed spec to package and push
|
||||
buildcache_mirror_url: URL for the buildcache mirror
|
||||
pipeline_mirror_url: URL for the pipeline mirror
|
||||
pr_pipeline: True if the CI job is for a PR
|
||||
sign_binaries: Whether or not to sign buildcache entry
|
||||
|
||||
Returns: A list of PushResults, indicating success or failure.
|
||||
"""
|
||||
sign_binaries = pr_pipeline is False and can_sign_binaries()
|
||||
|
||||
results = []
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
|
||||
@@ -273,9 +273,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.db.query(spec, hashes=hashes, installed=installed)
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
|
||||
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -383,7 +383,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
deps (bool): Display dependencies with specs
|
||||
long (bool): Display short hashes with specs
|
||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||
namespace (bool): Print namespaces along with names
|
||||
namespaces (bool): Print namespaces along with names
|
||||
show_flags (bool): Show compiler flags with specs
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
@@ -407,7 +407,7 @@ def get_arg(name, default=None):
|
||||
paths = get_arg("paths", False)
|
||||
deps = get_arg("deps", False)
|
||||
hashes = get_arg("long", False)
|
||||
namespace = get_arg("namespace", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
@@ -428,7 +428,7 @@ def get_arg(name, default=None):
|
||||
|
||||
format_string = get_arg("format", None)
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespace else "{name}"
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
@@ -473,7 +473,7 @@ def format_list(specs):
|
||||
out = ""
|
||||
# getting lots of prefixes requires DB lookups. Ensure
|
||||
# all spec.prefix calls are in one transaction.
|
||||
with spack.store.db.read_transaction():
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for string, spec in formatted:
|
||||
if not string:
|
||||
# print newline from above
|
||||
@@ -584,14 +584,14 @@ def require_active_env(cmd_name):
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
|
||||
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -68,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,6 +326,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
||||
@@ -2,12 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
@@ -18,7 +20,6 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -28,7 +29,6 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
@@ -38,22 +38,25 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
setattr(setup_parser, "parser", subparser)
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
push.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
"-a",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push.add_argument("-k", "--key", metavar="key", type=str, default=None, help="key for signing")
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
@@ -70,9 +73,9 @@ def setup_parser(subparser):
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help="select the buildcache mode\n\n"
|
||||
"the default is to build a cache for the package along with all its dependencies. "
|
||||
"alternatively, one can decide to build a cache for only the package or only the "
|
||||
help="select the buildcache mode. "
|
||||
"The default is to build a cache for the package along with all its dependencies. "
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
@@ -102,7 +105,7 @@ def setup_parser(subparser):
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
||||
listcache.add_argument(
|
||||
"-v",
|
||||
"--variants",
|
||||
@@ -146,23 +149,20 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
check_spec_or_specfile.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
@@ -170,16 +170,19 @@ def setup_parser(subparser):
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
"-p",
|
||||
"--path",
|
||||
required=True,
|
||||
default=None,
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
help="path to directory where tarball should be downloaded",
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -187,32 +190,32 @@ def setup_parser(subparser):
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
required=True,
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
@@ -254,60 +257,35 @@ def setup_parser(subparser):
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs, spec_file):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if spec_file:
|
||||
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||
|
||||
if specs:
|
||||
constraints = spack.cmd.parse_specs(specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
)
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -333,7 +311,6 @@ def push_fn(args):
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
@@ -407,29 +384,30 @@ def keys_fn(args):
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
# Cycle over the specs that match
|
||||
for spec in specs:
|
||||
print("Relocatable nodes")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(status_fn=spack.relocate.is_relocatable))
|
||||
tty.warn(
|
||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||
"now the default. This command will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
if not specs:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
@@ -459,26 +437,28 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
if not args.path:
|
||||
tty.msg("No download path provided, exiting")
|
||||
return
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not result:
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -488,29 +468,24 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specs:
|
||||
tty.msg("No dependent specs provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
tty.msg("No yaml directory provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = "json"
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,18 +4,21 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import deprecated_version, preferred_version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
@@ -31,35 +34,38 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version only",
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the preferred version only",
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
)
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
@@ -77,89 +83,174 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict = {}
|
||||
if not args.versions and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
else:
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn("Version {0} is deprecated".format(version))
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn(f"Version {version} is deprecated")
|
||||
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
print_checksum_status(pkg, version_hashes)
|
||||
sys.exit(0)
|
||||
|
||||
# convert dict into package.py version statements
|
||||
version_lines = get_version_lines(version_hashes, url_dict)
|
||||
print()
|
||||
print(version_lines)
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
# Make sure we also have a newline after the last version
|
||||
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||
versions.append("\n")
|
||||
# We need to insert the versions in reversed order
|
||||
versions.reverse()
|
||||
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||
version_line = None
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for i in range(len(lines)):
|
||||
# Black is drunk, so this is what it looks like for now
|
||||
# See https://github.com/psf/black/issues/2156 for more information
|
||||
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||
i
|
||||
].startswith(" version("):
|
||||
version_line = i
|
||||
break
|
||||
|
||||
if version_line is not None:
|
||||
for v in versions:
|
||||
lines.insert(version_line, v)
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
"""
|
||||
Verify checksums present in version_hashes against those present
|
||||
in the package's instructions.
|
||||
|
||||
with open(filename, "w") as f:
|
||||
f.writelines(lines)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
||||
|
||||
msg = "opening editor to verify"
|
||||
"""
|
||||
results = []
|
||||
num_verified = 0
|
||||
failed = False
|
||||
|
||||
if not sys.stdout.isatty():
|
||||
msg = "please verify"
|
||||
max_len = max(len(str(v)) for v in version_hashes)
|
||||
num_total = len(version_hashes)
|
||||
|
||||
tty.info(
|
||||
"Added {0} new versions to {1}, "
|
||||
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||
)
|
||||
for version, sha in version_hashes.items():
|
||||
if version not in pkg.versions:
|
||||
msg = "No previous checksum"
|
||||
status = "-"
|
||||
|
||||
elif sha == pkg.versions[version]["sha256"]:
|
||||
msg = "Correct"
|
||||
status = "="
|
||||
num_verified += 1
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
else:
|
||||
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||
msg = sha
|
||||
status = "x"
|
||||
failed = True
|
||||
|
||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
||||
|
||||
# Display table of checksum results.
|
||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
||||
|
||||
# Terminate at the end of function to prevent additional output.
|
||||
if failed:
|
||||
print()
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
# Split rendered version lines into tuple of (version, version_line)
|
||||
# We reverse sort here to make sure the versions match the version_lines
|
||||
new_versions = []
|
||||
for ver_line in version_lines.split("\n"):
|
||||
match = version_re.match(ver_line)
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -155,11 +156,27 @@ def setup_parser(subparser):
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
default="docker",
|
||||
choices=["docker", "podman"],
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
||||
)
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
@@ -270,6 +287,17 @@ def ci_rebuild(args):
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
gpg_util.list(False, True)
|
||||
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
@@ -394,11 +422,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -655,7 +678,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
pr_pipeline=spack_is_pr_pipeline,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
@@ -699,7 +722,7 @@ def ci_rebuild(args):
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -725,8 +748,18 @@ def ci_reproduce(args):
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
elif args.gpg_url:
|
||||
gpg_key_url = args.gpg_url
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -114,22 +115,18 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg("Removing all temporary build stages")
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug("Removing {0}".format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -9,16 +9,11 @@
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Sequence, Set
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
Command,
|
||||
)
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -41,9 +36,15 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
}
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -178,9 +179,63 @@ def format(self, cmd: Command) -> str:
|
||||
}
|
||||
|
||||
|
||||
class BashCompletionWriter(ArgparseCompletionWriter):
|
||||
class BashCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as bash programmable tab completion."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals: Tuple[str, ...] = ()
|
||||
if cmd.positionals:
|
||||
positionals, _, _, _ = zip(*cmd.positionals)
|
||||
optionals, _, _, _, _ = zip(*cmd.optionals)
|
||||
subcommands: Tuple[str, ...] = ()
|
||||
if cmd.subcommands:
|
||||
_, subcommands, _ = zip(*cmd.subcommands)
|
||||
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to begin a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Function definition beginning.
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to end a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name
|
||||
|
||||
Returns:
|
||||
Function definition ending.
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(
|
||||
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||
) -> str:
|
||||
@@ -264,6 +319,396 @@ def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
|
||||
|
||||
# Map argument destination names to their complete commands
|
||||
# Earlier items in the list have higher precedence
|
||||
_dest_to_fish_complete = {
|
||||
("activate", "view"): "-f -a '(__fish_complete_directories)'",
|
||||
("bootstrap root", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("mirror add", "mirror"): "-f",
|
||||
("repo add", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("test find", "filter"): "-f -a '(__fish_spack_tests)'",
|
||||
("bootstrap", "name"): "-f -a '(__fish_spack_bootstrap_names)'",
|
||||
("buildcache create", "key"): "-f -a '(__fish_spack_gpg_keys)'",
|
||||
("build-env", r"spec \[--\].*"): "-f -a '(__fish_spack_build_env_spec)'",
|
||||
("checksum", "package"): "-f -a '(__fish_spack_packages)'",
|
||||
(
|
||||
"checksum",
|
||||
"versions",
|
||||
): "-f -a '(__fish_spack_package_versions $__fish_spack_argparse_argv[1])'",
|
||||
("config", "path"): "-f -a '(__fish_spack_colon_path)'",
|
||||
("config", "section"): "-f -a '(__fish_spack_config_sections)'",
|
||||
("develop", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("diff", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("gpg sign", "output"): "-f -a '(__fish_complete_directories)'",
|
||||
("gpg", "keys?"): "-f -a '(__fish_spack_gpg_keys)'",
|
||||
("graph", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("help", "help_command"): "-f -a '(__fish_spack_commands)'",
|
||||
("list", "filter"): "-f -a '(__fish_spack_packages)'",
|
||||
("mirror", "mirror"): "-f -a '(__fish_spack_mirrors)'",
|
||||
("pkg", "package"): "-f -a '(__fish_spack_pkg_packages)'",
|
||||
("remove", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("repo", "namespace_or_path"): "$__fish_spack_force_files -a '(__fish_spack_repos)'",
|
||||
("restage", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("rm", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("solve", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("spec", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("stage", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("test-env", r"spec \[--\].*"): "-f -a '(__fish_spack_build_env_spec)'",
|
||||
("test", r"\[?name.*"): "-f -a '(__fish_spack_tests)'",
|
||||
("undevelop", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("verify", "specs_or_files"): "$__fish_spack_force_files -a '(__fish_spack_installed_specs)'",
|
||||
("view", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "comment"): "-f",
|
||||
("", "compiler_spec"): "-f -a '(__fish_spack_installed_compilers)'",
|
||||
("", "config_scopes"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "extendable"): "-f -a '(__fish_spack_extensions)'",
|
||||
("", "installed_specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("", "job_url"): "-f",
|
||||
("", "location_env"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "pytest_args"): "-f -a '(__fish_spack_unit_tests)'",
|
||||
("", "package_or_file"): "$__fish_spack_force_files -a '(__fish_spack_packages)'",
|
||||
("", "package_or_user"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "package"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "PKG"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "prefix"): "-f -a '(__fish_complete_directories)'",
|
||||
("", r"rev\d?"): "-f -a '(__fish_spack_git_rev)'",
|
||||
("", "specs?"): "-f -k -a '(__fish_spack_specs)'",
|
||||
("", "tags?"): "-f -a '(__fish_spack_tags)'",
|
||||
("", "virtual_package"): "-f -a '(__fish_spack_providers)'",
|
||||
("", "working_dir"): "-f -a '(__fish_complete_directories)'",
|
||||
("", r"(\w*_)?env"): "-f -a '(__fish_spack_environments)'",
|
||||
("", r"(\w*_)?dir(ectory)?"): "-f -a '(__fish_spack_environments)'",
|
||||
("", r"(\w*_)?mirror_name"): "-f -a '(__fish_spack_mirrors)'",
|
||||
}
|
||||
|
||||
|
||||
def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
|
||||
"""Map from subcommand to autocompletion argument.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
dest: Destination.
|
||||
|
||||
Returns:
|
||||
Autocompletion argument.
|
||||
"""
|
||||
s = prog.split(None, 1)
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
|
||||
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
class FishCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as bash programmable tab completion."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of a node.
|
||||
"""
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We also need help messages and how arguments are used
|
||||
# So we pass everything to completion writer
|
||||
positionals = cmd.positionals
|
||||
optionals = cmd.optionals
|
||||
subcommands = cmd.subcommands
|
||||
|
||||
return (
|
||||
self.prog_comment(cmd.prog)
|
||||
+ self.optspecs(cmd.prog, optionals)
|
||||
+ self.complete(cmd.prog, positionals, optionals, subcommands)
|
||||
)
|
||||
|
||||
def _quote(self, string: str) -> str:
|
||||
"""Quote string and escape special characters if necessary.
|
||||
|
||||
Args:
|
||||
string: Input string.
|
||||
|
||||
Returns:
|
||||
Quoted string.
|
||||
"""
|
||||
# Goal here is to match fish_indent behavior
|
||||
|
||||
# Strings without spaces (or other special characters) do not need to be escaped
|
||||
if not any([sub in string for sub in [" ", "'", '"']]):
|
||||
return string
|
||||
|
||||
string = string.replace("'", r"\'")
|
||||
return f"'{string}'"
|
||||
|
||||
def optspecs(
|
||||
self,
|
||||
prog: str,
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Read the optionals and return the command to set optspec.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Command to set optspec variable.
|
||||
"""
|
||||
# Variables of optspecs
|
||||
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
|
||||
|
||||
if optionals is None:
|
||||
return "set -g %s\n" % optspec_var
|
||||
|
||||
# Build optspec by iterating over options
|
||||
args = []
|
||||
|
||||
for flags, dest, _, nargs, _ in optionals:
|
||||
if len(flags) == 0:
|
||||
continue
|
||||
|
||||
required = ""
|
||||
|
||||
# Because nargs '?' is treated differently in fish, we treat it as required.
|
||||
# Because multi-argument options are not supported, we treat it like one argument.
|
||||
required = "="
|
||||
if nargs == 0:
|
||||
required = ""
|
||||
|
||||
# Pair short options with long options
|
||||
|
||||
# We need to do this because fish doesn't support multiple short
|
||||
# or long options.
|
||||
# However, since we are paring options only, this is fine
|
||||
|
||||
short = [f[1:] for f in flags if f.startswith("-") and len(f) == 2]
|
||||
long = [f[2:] for f in flags if f.startswith("--")]
|
||||
|
||||
while len(short) > 0 and len(long) > 0:
|
||||
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
|
||||
while len(short) > 0:
|
||||
arg = "%s/%s" % (short.pop(), required)
|
||||
while len(long) > 0:
|
||||
arg = "%s%s" % (long.pop(), required)
|
||||
|
||||
args.append(arg)
|
||||
|
||||
# Even if there is no option, we still set variable.
|
||||
# In fish such variable is an empty array, we use it to
|
||||
# indicate that such subcommand exists.
|
||||
args = " ".join(args)
|
||||
|
||||
return "set -g %s %s\n" % (optspec_var, args)
|
||||
|
||||
@staticmethod
|
||||
def complete_head(
|
||||
prog: str, index: Optional[int] = None, nargs: Optional[Union[int, str]] = None
|
||||
) -> str:
|
||||
"""Return the head of the completion command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
index: Index of positional argument.
|
||||
nargs: Number of arguments.
|
||||
|
||||
Returns:
|
||||
Head of the completion command.
|
||||
"""
|
||||
# Split command and subcommand
|
||||
s = prog.split(None, 1)
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
if index is None:
|
||||
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
|
||||
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
|
||||
else:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
|
||||
return head % (s[0], index, subcmd)
|
||||
|
||||
def complete(
|
||||
self,
|
||||
prog: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
||||
) -> str:
|
||||
"""Return all the completion commands.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
|
||||
if positionals:
|
||||
commands.append(self.positionals(prog, positionals))
|
||||
|
||||
if subcommands:
|
||||
commands.append(self.subcommands(prog, subcommands))
|
||||
|
||||
if optionals:
|
||||
commands.append(self.optionals(prog, optionals))
|
||||
|
||||
return "".join(commands)
|
||||
|
||||
def positionals(
|
||||
self,
|
||||
prog: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Return the completion for positional arguments.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
|
||||
for idx, (args, choices, nargs, help) in enumerate(positionals):
|
||||
# Make sure we always get same order of output
|
||||
if isinstance(choices, dict):
|
||||
choices = sorted(choices.keys())
|
||||
elif isinstance(choices, (set, frozenset)):
|
||||
choices = sorted(choices)
|
||||
|
||||
# Remove platform-specific choices to avoid hard-coding the platform.
|
||||
if choices is not None:
|
||||
valid_choices = []
|
||||
for choice in choices:
|
||||
if spack.platforms.host().name not in choice:
|
||||
valid_choices.append(choice)
|
||||
choices = valid_choices
|
||||
|
||||
head = self.complete_head(prog, idx, nargs)
|
||||
|
||||
if choices is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, args)
|
||||
if value is not None:
|
||||
commands.append(head + " " + value)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def prog_comment(self, prog: str) -> str:
|
||||
"""Return a comment line for the command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Comment line.
|
||||
"""
|
||||
return "\n# %s\n" % prog
|
||||
|
||||
def optionals(
|
||||
self,
|
||||
prog: str,
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Return the completion for optional arguments.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
head = self.complete_head(prog)
|
||||
|
||||
for flags, dest, _, nargs, help in optionals:
|
||||
# Make sure we always get same order of output
|
||||
if isinstance(dest, dict):
|
||||
dest = sorted(dest.keys())
|
||||
elif isinstance(dest, (set, frozenset)):
|
||||
dest = sorted(dest)
|
||||
|
||||
# Remove platform-specific choices to avoid hard-coding the platform.
|
||||
if dest is not None:
|
||||
valid_choices = []
|
||||
for choice in dest:
|
||||
if spack.platforms.host().name not in choice:
|
||||
valid_choices.append(choice)
|
||||
dest = valid_choices
|
||||
|
||||
# To provide description for optionals, and also possible values,
|
||||
# we need to use two split completion command.
|
||||
# Otherwise, each option will have same description.
|
||||
prefix = head
|
||||
|
||||
# Add all flags to the completion
|
||||
for f in flags:
|
||||
if f.startswith("--"):
|
||||
long = f[2:]
|
||||
prefix += " -l %s" % long
|
||||
elif f.startswith("-"):
|
||||
short = f[1:]
|
||||
assert len(short) == 1
|
||||
prefix += " -s %s" % short
|
||||
|
||||
# Check if option require argument.
|
||||
# Currently multi-argument options are not supported, so we treat it like one argument.
|
||||
if nargs != 0:
|
||||
prefix += " -r"
|
||||
|
||||
if dest is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, dest)
|
||||
if value is not None:
|
||||
commands.append(prefix + " " + value)
|
||||
|
||||
if help:
|
||||
commands.append(prefix + " -d %s" % self._quote(help))
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
||||
"""Return the completion for subcommands.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
head = self.complete_head(prog, 0)
|
||||
|
||||
for _, subcommand, help in subcommands:
|
||||
command = head + " -f -a %s" % self._quote(subcommand)
|
||||
|
||||
if help is not None and len(help) > 0:
|
||||
help = help.split("\n")[0]
|
||||
command += " -d %s" % self._quote(help)
|
||||
|
||||
commands.append(command)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
|
||||
@formatter
|
||||
def subcommands(args: Namespace, out: IO) -> None:
|
||||
"""Hierarchical tree of subcommands.
|
||||
@@ -371,6 +816,15 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
@formatter
|
||||
def fish(args, out):
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
writer = FishCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
def prepend_header(args: Namespace, out: IO) -> None:
|
||||
"""Prepend header text at the beginning of a file.
|
||||
|
||||
|
||||
@@ -82,12 +82,12 @@ def _specs(self, **kwargs):
|
||||
|
||||
# return everything for an empty query.
|
||||
if not qspecs:
|
||||
return spack.store.db.query(**kwargs)
|
||||
return spack.store.STORE.db.query(**kwargs)
|
||||
|
||||
# Return only matching stuff otherwise.
|
||||
specs = {}
|
||||
for spec in qspecs:
|
||||
for s in spack.store.db.query(spec, **kwargs):
|
||||
for s in spack.store.STORE.db.query(spec, **kwargs):
|
||||
# This is fast for already-concrete specs
|
||||
specs[s.dag_hash()] = s
|
||||
|
||||
@@ -331,6 +331,17 @@ def tags():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def namespaces():
|
||||
return Args(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def jobs():
|
||||
return Args(
|
||||
|
||||
@@ -106,7 +106,7 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
visitor = AreDepsInstalledVisitor(context=context)
|
||||
|
||||
# Mass install check needs read transaction.
|
||||
with spack.store.db.read_transaction():
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
|
||||
|
||||
if visitor.has_uninstalled_deps:
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -399,8 +398,8 @@ def config_prefer_upstream(args):
|
||||
if scope is None:
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
|
||||
all_specs = set(spack.store.db.query(installed=True))
|
||||
local_specs = set(spack.store.db.query_local(installed=True))
|
||||
all_specs = set(spack.store.STORE.db.query(installed=True))
|
||||
local_specs = set(spack.store.STORE.db.query_local(installed=True))
|
||||
pref_specs = local_specs if args.local else all_specs - local_specs
|
||||
|
||||
conflicting_variants = set()
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
@@ -325,6 +326,7 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
# FIXME: Add a build backend, usually defined in pyproject.toml. If no such file
|
||||
# exists, use setuptools.
|
||||
# depends_on("py-setuptools", type="build")
|
||||
# depends_on("py-hatchling", type="build")
|
||||
# depends_on("py-flit-core", type="build")
|
||||
# depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -332,17 +334,11 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
# depends_on("py-foo", type=("build", "run"))"""
|
||||
|
||||
body_def = """\
|
||||
def global_options(self, spec, prefix):
|
||||
# FIXME: Add options to pass to setup.py
|
||||
def config_settings(self, spec, prefix):
|
||||
# FIXME: Add configuration settings to be passed to the build backend
|
||||
# FIXME: If not needed, delete this function
|
||||
options = []
|
||||
return options
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
# FIXME: Add options to pass to setup.py install
|
||||
# FIXME: If not needed, delete this function
|
||||
options = []
|
||||
return options"""
|
||||
settings = {}
|
||||
return settings"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
|
||||
@@ -837,13 +833,15 @@ def get_versions(args, name):
|
||||
version = parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
@@ -917,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
||||
@@ -60,16 +60,16 @@ def create_db_tarball(args):
|
||||
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||
tarball_path = os.path.abspath(tarball_name)
|
||||
|
||||
base = os.path.basename(str(spack.store.root))
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
else:
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.root))
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
with working_dir(wd):
|
||||
files = [spack.store.db._index_path]
|
||||
files = [spack.store.STORE.db._index_path]
|
||||
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
||||
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
||||
@@ -60,7 +60,7 @@ def dependencies(parser, args):
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||
deps = spack.store.db.installed_relatives(
|
||||
deps = spack.store.STORE.db.installed_relatives(
|
||||
spec, "children", args.transitive, deptype=args.deptype
|
||||
)
|
||||
if deps:
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
@@ -96,7 +96,7 @@ def dependents(parser, args):
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||
deps = spack.store.db.installed_relatives(spec, "parents", args.transitive)
|
||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||
if deps:
|
||||
spack.cmd.display_specs(deps, long=True)
|
||||
else:
|
||||
|
||||
@@ -130,7 +130,7 @@ def deprecate(parser, args):
|
||||
already_deprecated = []
|
||||
already_deprecated_for = []
|
||||
for spec in all_deprecate:
|
||||
deprecated_for = spack.store.db.deprecator(spec)
|
||||
deprecated_for = spack.store.STORE.db.deprecator(spec)
|
||||
if deprecated_for:
|
||||
already_deprecated.append(spec)
|
||||
already_deprecated_for.append(deprecated_for)
|
||||
|
||||
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
||||
@@ -66,8 +66,7 @@ def develop(parser, args):
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls(spec).stage.steal_source(abspath)
|
||||
env.develop(spec=spec, path=path, clone=True)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
||||
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
@@ -91,7 +91,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("installed", "all"):
|
||||
# List specs of installed extensions.
|
||||
installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)]
|
||||
installed = [s.spec for s in spack.store.STORE.db.installed_extensions_for(spec)]
|
||||
|
||||
if args.show == "all":
|
||||
print
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -133,9 +133,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +144,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
@@ -239,7 +239,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
|
||||
description = "fetch archives for packages"
|
||||
section = "build"
|
||||
@@ -36,6 +37,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def fetch(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
else:
|
||||
@@ -55,18 +62,17 @@ def fetch(parser, args):
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
if args.dependencies or args.missing:
|
||||
to_be_fetched = spack.traverse.traverse_nodes(specs, key=spack.traverse.by_dag_hash)
|
||||
else:
|
||||
to_be_fetched = specs
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
for spec in to_be_fetched:
|
||||
if args.missing and spec.installed:
|
||||
continue
|
||||
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse(root=False):
|
||||
# Skip already-installed packages with --missing
|
||||
if args.missing and s.installed:
|
||||
continue
|
||||
pkg = spec.package
|
||||
|
||||
s.package.do_fetch()
|
||||
spec.package.do_fetch()
|
||||
pkg.stage.keep = True
|
||||
with pkg.stage:
|
||||
pkg.do_fetch()
|
||||
|
||||
@@ -67,7 +67,7 @@ def setup_parser(subparser):
|
||||
help="do not group specs by arch/compiler",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
@@ -140,9 +140,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N", "--namespace", action="store_true", help="show fully qualified package names"
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -230,7 +227,7 @@ def display_env(env, args, decorator, results):
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -271,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
||||
@@ -20,7 +20,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def gc(parser, args):
|
||||
specs = spack.store.db.unused_specs
|
||||
specs = spack.store.STORE.db.unused_specs
|
||||
|
||||
# Restrict garbage collection to the active environment
|
||||
# speculating over roots that are yet to be installed
|
||||
|
||||
@@ -63,7 +63,7 @@ def graph(parser, args):
|
||||
if env:
|
||||
specs = env.all_specs()
|
||||
else:
|
||||
specs = spack.store.db.query()
|
||||
specs = spack.store.STORE.db.query()
|
||||
|
||||
else:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=not args.static)
|
||||
|
||||
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -266,7 +266,7 @@ def require_user_confirmation_for_overwrite(concrete_specs, args):
|
||||
if args.yes_to_all:
|
||||
return
|
||||
|
||||
installed = list(filter(lambda x: x, map(spack.store.db.query_one, concrete_specs)))
|
||||
installed = list(filter(lambda x: x, map(spack.store.STORE.db.query_one, concrete_specs)))
|
||||
display_args = {"long": True, "show_flags": True, "variants": True}
|
||||
|
||||
if installed:
|
||||
|
||||
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
with spack.store.db.read_transaction():
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if "dependencies" in args.things_to_load:
|
||||
include_roots = "package" in args.things_to_load
|
||||
specs = [
|
||||
|
||||
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
||||
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
||||
@@ -71,7 +71,7 @@ def find_matching_specs(specs, allow_multiple_matches=False):
|
||||
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED]
|
||||
matching = spack.store.db.query_local(spec, installed=install_query)
|
||||
matching = spack.store.STORE.db.query_local(spec, installed=install_query)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
@@ -102,7 +102,7 @@ def do_mark(specs, explicit):
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
for spec in specs:
|
||||
spack.store.db.update_explicit(spec, explicit)
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -253,12 +252,12 @@ def _configure_mirror(args):
|
||||
|
||||
|
||||
def mirror_set(args):
|
||||
"""Configure the connection details of a mirror"""
|
||||
"""configure the connection details of a mirror"""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
def mirror_set_url(args):
|
||||
"""Change the URL of a mirror."""
|
||||
"""change the URL of a mirror"""
|
||||
_configure_mirror(args)
|
||||
|
||||
|
||||
@@ -444,7 +443,7 @@ def mirror_create(args):
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
path = args.directory or spack.caches.FETCH_CACHE_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
@@ -474,7 +473,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import sys
|
||||
|
||||
from llnl.util import filesystem, tty
|
||||
from llnl.util.tty import color
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
@@ -308,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -320,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -347,14 +349,20 @@ def refresh(module_type, specs, args):
|
||||
spack.modules.common.generate_module_index(
|
||||
module_type_root, writers, overwrite=args.delete_tree
|
||||
)
|
||||
errors = []
|
||||
for x in writers:
|
||||
try:
|
||||
x.write(overwrite=True)
|
||||
except spack.error.SpackError as e:
|
||||
msg = f"{x.layout.filename}: {e.message}"
|
||||
errors.append(msg)
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
msg = "Could not write module file [{0}]"
|
||||
tty.warn(msg.format(x.layout.filename))
|
||||
tty.warn("\t--> {0} <--".format(str(e)))
|
||||
msg = f"{x.layout.filename}: {str(e)}"
|
||||
errors.append(msg)
|
||||
|
||||
if errors:
|
||||
errors.insert(0, color.colorize("@*{some module files could not be written}"))
|
||||
tty.warn("\n".join(errors))
|
||||
|
||||
|
||||
#: Dictionary populated with the list of sub-commands.
|
||||
@@ -368,7 +376,9 @@ def refresh(module_type, specs, args):
|
||||
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
# Get the specs that match the query from the DB
|
||||
|
||||
@@ -7,7 +7,11 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
|
||||
description = "patch expanded archive sources in preparation for install"
|
||||
section = "build"
|
||||
@@ -21,7 +25,10 @@ def setup_parser(subparser):
|
||||
|
||||
def patch(parser, args):
|
||||
if not args.specs:
|
||||
tty.die("patch requires at least one spec argument")
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
tty.die("`spack patch` requires a spec or an active environment")
|
||||
return _patch_env(env)
|
||||
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
@@ -29,6 +36,19 @@ def patch(parser, args):
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
for spec in specs:
|
||||
spec.package.do_patch()
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
|
||||
|
||||
def _patch_env(env: ev.Environment):
|
||||
tty.msg(f"Patching specs from environment {env.name}")
|
||||
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
||||
_patch(spec.package)
|
||||
|
||||
|
||||
def _patch(pkg: spack.package_base.PackageBase):
|
||||
pkg.stage.keep = True
|
||||
with pkg.stage:
|
||||
pkg.do_patch()
|
||||
tty.msg(f"Patched {pkg.name} in {pkg.stage.path}")
|
||||
|
||||
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
||||
@@ -11,4 +11,4 @@
|
||||
|
||||
|
||||
def reindex(parser, args):
|
||||
spack.store.store.reindex()
|
||||
spack.store.STORE.reindex()
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -73,13 +73,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -67,13 +67,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
@@ -100,7 +93,7 @@ def spec(parser, args):
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
if args.install_status:
|
||||
tree_context = spack.store.db.read_transaction
|
||||
tree_context = spack.store.STORE.db.read_transaction
|
||||
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
|
||||
@@ -9,9 +9,12 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
|
||||
description = "expand downloaded archive in preparation for install"
|
||||
section = "build"
|
||||
@@ -27,24 +30,18 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
tty.msg("Staging specs from environment %s" % env.name)
|
||||
for spec in env.specs_by_hash.values():
|
||||
for dep in spec.traverse():
|
||||
dep.package.do_stage()
|
||||
tty.msg("Staged {0} in {1}".format(dep.package.name, dep.package.stage.path))
|
||||
return
|
||||
else:
|
||||
tty.die("`spack stage` requires a spec or an active environment")
|
||||
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if not env:
|
||||
tty.die("`spack stage` requires a spec or an active environment")
|
||||
return _stage_env(env)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
@@ -57,7 +54,24 @@ def stage(parser, args):
|
||||
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
spec.package.path = custom_path
|
||||
spec.package.do_stage()
|
||||
tty.msg("Staged {0} in {1}".format(spec.package.name, spec.package.stage.path))
|
||||
pkg.path = custom_path
|
||||
|
||||
_stage(pkg)
|
||||
|
||||
|
||||
def _stage_env(env: ev.Environment):
|
||||
tty.msg(f"Staging specs from environment {env.name}")
|
||||
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
||||
_stage(spec.package)
|
||||
|
||||
|
||||
def _stage(pkg: spack.package_base.PackageBase):
|
||||
# Use context manager to ensure we don't restage while an installation is in progress
|
||||
# keep = True ensures that the stage is not removed after exiting the context manager
|
||||
pkg.stage.keep = True
|
||||
with pkg.stage:
|
||||
pkg.do_stage()
|
||||
tty.msg(f"Staged {pkg.name} in {pkg.stage.path}")
|
||||
|
||||
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
@@ -174,7 +174,7 @@ def test_run(args):
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs_to_test = []
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
@@ -252,7 +252,7 @@ def has_test_and_tags(pkg_class):
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
|
||||
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
@@ -329,7 +329,7 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
qspecs = spack.cmd.parse_specs(constraints)
|
||||
specs = {}
|
||||
for spec in qspecs:
|
||||
for s in spack.store.db.query(spec, installed=True):
|
||||
for s in spack.store.STORE.db.query(spec, installed=True):
|
||||
specs[s.dag_hash()] = s
|
||||
specs = sorted(specs.values())
|
||||
test_specs = dict((test_suite.test_pkg_id(s), s) for s in test_suite.specs if s in specs)
|
||||
|
||||
@@ -103,7 +103,7 @@ def find_matching_specs(
|
||||
has_errors = False
|
||||
for spec in specs:
|
||||
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
|
||||
matching = spack.store.db.query_local(
|
||||
matching = spack.store.STORE.db.query_local(
|
||||
spec, hashes=hashes, installed=install_query, origin=origin
|
||||
)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
@@ -139,7 +139,7 @@ def installed_dependents(specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
# input; in that case we return an empty list.
|
||||
|
||||
def is_installed(spec):
|
||||
record = spack.store.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
record = spack.store.STORE.db.query_local_by_spec_hash(spec.dag_hash())
|
||||
return record and record.installed
|
||||
|
||||
specs = traverse.traverse_nodes(
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
||||
for spec in spack.cmd.parse_specs(args.specs)
|
||||
]
|
||||
else:
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
specs = spack.store.STORE.db.query(hashes=hashes)
|
||||
|
||||
if not args.shell:
|
||||
specs_str = " ".join(args.specs) or "SPECS"
|
||||
|
||||
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
||||
@@ -71,7 +71,7 @@ def verify(parser, args):
|
||||
spec_args = spack.cmd.parse_specs(args.specs_or_files)
|
||||
|
||||
if args.all:
|
||||
query = spack.store.db.query_local if local else spack.store.db.query
|
||||
query = spack.store.STORE.db.query_local if local else spack.store.STORE.db.query
|
||||
|
||||
# construct spec list
|
||||
if spec_args:
|
||||
|
||||
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
@@ -70,7 +70,7 @@ def squash(matching_specs):
|
||||
return matching_in_view[0] if matching_in_view else matching_specs[0]
|
||||
|
||||
# make function always return a list to keep consistency between py2/3
|
||||
return list(map(squash, map(spack.store.db.query, specs)))
|
||||
return list(map(squash, map(spack.store.STORE.db.query, specs)))
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
@@ -200,7 +200,7 @@ def view(parser, args):
|
||||
|
||||
view = YamlFilesystemView(
|
||||
path,
|
||||
spack.store.layout,
|
||||
spack.store.STORE.layout,
|
||||
projections=ordered_projections,
|
||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||
link=link_fn,
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -2,13 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
@@ -119,108 +115,3 @@ def c23_flag(self):
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||
|
||||
On macOS, not all buildsystems support querying CC and CXX for the
|
||||
compilers to use and instead query the Xcode toolchain for what
|
||||
compiler to run. This side-steps the spack wrappers. In order to inject
|
||||
spack into this setup, we need to copy (a subset of) Xcode.app and
|
||||
replace the compiler executables with symlinks to the spack wrapper.
|
||||
Currently, the stage is used to store the Xcode.app copies. We then set
|
||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||
related tools to use this Xcode.app.
|
||||
"""
|
||||
super().setup_custom_environment(pkg, env)
|
||||
|
||||
if not pkg.use_xcode:
|
||||
# if we do it for all packages, we get into big troubles with MPI:
|
||||
# filter_compilers(self) will use mockup XCode compilers on macOS
|
||||
# with Clang. Those point to Spack's compiler wrappers and
|
||||
# consequently render MPI non-functional outside of Spack.
|
||||
return
|
||||
|
||||
# Use special XCode versions of compiler wrappers when using XCode
|
||||
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
|
||||
xcrun = spack.util.executable.Executable("xcrun")
|
||||
xcode_clang = xcrun("-f", "clang", output=str).strip()
|
||||
xcode_clangpp = xcrun("-f", "clang++", output=str).strip()
|
||||
env.set("SPACK_CC", xcode_clang, force=True)
|
||||
env.set("SPACK_CXX", xcode_clangpp, force=True)
|
||||
|
||||
xcode_select = spack.util.executable.Executable("xcode-select")
|
||||
|
||||
# Get the path of the active developer directory
|
||||
real_root = xcode_select("--print-path", output=str).strip()
|
||||
|
||||
# The path name can be used to determine whether the full Xcode suite
|
||||
# or just the command-line tools are installed
|
||||
if real_root.endswith("Developer"):
|
||||
# The full Xcode suite is installed
|
||||
pass
|
||||
else:
|
||||
if real_root.endswith("CommandLineTools"):
|
||||
# Only the command-line tools are installed
|
||||
msg = "It appears that you have the Xcode command-line tools "
|
||||
msg += "but not the full Xcode suite installed.\n"
|
||||
|
||||
else:
|
||||
# Xcode is not installed
|
||||
msg = "It appears that you do not have Xcode installed.\n"
|
||||
|
||||
msg += "In order to use Spack to build the requested application, "
|
||||
msg += "you need the full Xcode suite. It can be installed "
|
||||
msg += "through the App Store. Make sure you launch the "
|
||||
msg += "application and accept the license agreement.\n"
|
||||
|
||||
raise OSError(msg)
|
||||
|
||||
real_root = os.path.dirname(os.path.dirname(real_root))
|
||||
developer_root = os.path.join(
|
||||
spack.stage.get_stage_root(), "xcode-select", self.name, str(self.version)
|
||||
)
|
||||
xcode_link = os.path.join(developer_root, "Xcode.app")
|
||||
|
||||
if not os.path.exists(developer_root):
|
||||
tty.warn(
|
||||
"Copying Xcode from %s to %s in order to add spack "
|
||||
"wrappers to it. Please do not interrupt." % (real_root, developer_root)
|
||||
)
|
||||
|
||||
# We need to make a new Xcode.app instance, but with symlinks to
|
||||
# the spack wrappers for the compilers it ships. This is necessary
|
||||
# because some projects insist on just asking xcrun and related
|
||||
# tools where the compiler runs. These tools are very hard to trick
|
||||
# as they do realpath and end up ignoring the symlinks in a
|
||||
# "softer" tree of nothing but symlinks in the right places.
|
||||
shutil.copytree(
|
||||
real_root,
|
||||
developer_root,
|
||||
symlinks=True,
|
||||
ignore=shutil.ignore_patterns(
|
||||
"AppleTV*.platform",
|
||||
"Watch*.platform",
|
||||
"iPhone*.platform",
|
||||
"Documentation",
|
||||
"swift*",
|
||||
),
|
||||
)
|
||||
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
for real_dir in real_dirs:
|
||||
dev_dir = os.path.join(developer_root, "Contents", "Developer", real_dir)
|
||||
for fname in os.listdir(dev_dir):
|
||||
if fname in bins:
|
||||
os.unlink(os.path.join(dev_dir, fname))
|
||||
symlink(
|
||||
os.path.join(spack.paths.build_env_path, "cc"),
|
||||
os.path.join(dev_dir, fname),
|
||||
)
|
||||
|
||||
symlink(developer_root, xcode_link)
|
||||
|
||||
env.set("DEVELOPER_DIR", xcode_link)
|
||||
|
||||
@@ -4,8 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import List
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.compiler
|
||||
|
||||
|
||||
@@ -32,7 +35,13 @@ class Nag(spack.compiler.Compiler):
|
||||
}
|
||||
|
||||
version_argument = "-V"
|
||||
version_regex = r"NAG Fortran Compiler Release ([0-9.]+)"
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
match = re.search(r"NAG Fortran Compiler Release (\d+).(\d+)\(.*\) Build (\d+)", output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -767,7 +769,7 @@ def _add_command_line_scopes(cfg, command_line_scopes):
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
|
||||
|
||||
def _config():
|
||||
def create():
|
||||
"""Singleton Configuration instance.
|
||||
|
||||
This constructs one instance associated with this module and returns
|
||||
@@ -781,14 +783,14 @@ def _config():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def _config():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def _config():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_config)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1278,9 +1280,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1339,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user