Compare commits
533 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
783bbdf2db | ||
|
|
a65f13f79f | ||
|
|
fc391d5332 | ||
|
|
e05f12f18e | ||
|
|
14f248652c | ||
|
|
87f99de3fb | ||
|
|
9e7fe04a77 | ||
|
|
45d149c7d3 | ||
|
|
8250a085b0 | ||
|
|
39b9f214a8 | ||
|
|
7631b5ea14 | ||
|
|
a4d2f8332f | ||
|
|
007f02e06a | ||
|
|
8ec1657136 | ||
|
|
c5fc794d77 | ||
|
|
5c409f794a | ||
|
|
06b30842e4 | ||
|
|
ebbe63013d | ||
|
|
3f7f10ca2b | ||
|
|
6a5a074150 | ||
|
|
c046c61cff | ||
|
|
7bd95f6ad3 | ||
|
|
4429e17db0 | ||
|
|
65dd6e1196 | ||
|
|
a43da48d23 | ||
|
|
f9c06669ca | ||
|
|
11c6431c9a | ||
|
|
1e85a1b227 | ||
|
|
b81aa42179 | ||
|
|
c59f68a33d | ||
|
|
743a93902d | ||
|
|
5bc5139552 | ||
|
|
3be450c16f | ||
|
|
c733fe9c34 | ||
|
|
e2edb45d2c | ||
|
|
b2a95fb4b7 | ||
|
|
7bf7a266ba | ||
|
|
2341074694 | ||
|
|
1c0dbab821 | ||
|
|
865c8b606c | ||
|
|
c98afbc44c | ||
|
|
57cd822fb7 | ||
|
|
627c2d3bf6 | ||
|
|
3b1b261cd8 | ||
|
|
40c4c81c19 | ||
|
|
642451e047 | ||
|
|
6630ddb47d | ||
|
|
7fd7d0b9fd | ||
|
|
f7d71ec792 | ||
|
|
d80bc70481 | ||
|
|
81cfe39ae3 | ||
|
|
ed058fd212 | ||
|
|
1da12490fa | ||
|
|
8b5b4ade0e | ||
|
|
12bc4cf093 | ||
|
|
f8676db7f4 | ||
|
|
dd747c5c48 | ||
|
|
cf031e83f0 | ||
|
|
f709518916 | ||
|
|
aa9eb33108 | ||
|
|
818c9aeb5a | ||
|
|
cfdf19ed6b | ||
|
|
566754440f | ||
|
|
f0658243c0 | ||
|
|
06b6b05dbd | ||
|
|
189cd59d13 | ||
|
|
5a43f4ba55 | ||
|
|
29aa7117f4 | ||
|
|
d367b4285a | ||
|
|
260e735425 | ||
|
|
ca872f9c34 | ||
|
|
b72a268bc5 | ||
|
|
818195a3bd | ||
|
|
679d41ea66 | ||
|
|
86216cc36e | ||
|
|
ecb7ad493f | ||
|
|
fb1e81657c | ||
|
|
34e4c62e8c | ||
|
|
acb02326aa | ||
|
|
c1756257c2 | ||
|
|
1ee7c735ec | ||
|
|
22deed708e | ||
|
|
6693dc5eb8 | ||
|
|
396f219011 | ||
|
|
a3ecd7efed | ||
|
|
ae5511afd6 | ||
|
|
78fe2c63fa | ||
|
|
f4f396745e | ||
|
|
f3c6d892b1 | ||
|
|
2f5988cec7 | ||
|
|
44922f734d | ||
|
|
144e657c58 | ||
|
|
6f48fe2b6f | ||
|
|
fcd03adc02 | ||
|
|
0620b954be | ||
|
|
6174b829f7 | ||
|
|
0d4b1c6a73 | ||
|
|
fb9797bd67 | ||
|
|
4eee3c12c1 | ||
|
|
3e5f9a2138 | ||
|
|
8295a45999 | ||
|
|
5138c71d34 | ||
|
|
eef9939c21 | ||
|
|
ffddaabaa0 | ||
|
|
f664d1edaa | ||
|
|
6d5d1562bd | ||
|
|
70c71e8f93 | ||
|
|
d9d1eb24f9 | ||
|
|
cef59ad0bf | ||
|
|
a1e117a98b | ||
|
|
cb855d5ffd | ||
|
|
3866ff0096 | ||
|
|
6dc167e43d | ||
|
|
0fd085be8e | ||
|
|
74fba221f1 | ||
|
|
deeeb86067 | ||
|
|
98daf5b7ec | ||
|
|
8a3d98b632 | ||
|
|
0cc945b367 | ||
|
|
e732155e8c | ||
|
|
c07fb833a9 | ||
|
|
7d566b481f | ||
|
|
a72e5e762e | ||
|
|
0eb22ef770 | ||
|
|
95f78440f1 | ||
|
|
74a51aba50 | ||
|
|
b370ecfbda | ||
|
|
04d55b7600 | ||
|
|
d695438851 | ||
|
|
f0447d63ad | ||
|
|
e8a7a04f14 | ||
|
|
23316f0352 | ||
|
|
b3433cb872 | ||
|
|
349ba83bc6 | ||
|
|
ecfd9ef12b | ||
|
|
4502351659 | ||
|
|
8a08f09ac0 | ||
|
|
60ecd0374e | ||
|
|
52ccee79d8 | ||
|
|
7f0f1b63d6 | ||
|
|
b65f1f22ec | ||
|
|
e9efa1df75 | ||
|
|
884a5b8b07 | ||
|
|
91d674f5d0 | ||
|
|
76fbb8cd8f | ||
|
|
0f3f2a8024 | ||
|
|
5a5f774369 | ||
|
|
f5212ae139 | ||
|
|
4b618704bf | ||
|
|
46285d9725 | ||
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 | ||
|
|
1b1ed1b1fa | ||
|
|
ec0e51316b | ||
|
|
533821e46f | ||
|
|
6c5d125cb0 | ||
|
|
668fb1201f | ||
|
|
f7918fd8ab | ||
|
|
fc1996e0fa | ||
|
|
ed3aaafd73 | ||
|
|
63bb2c9bad | ||
|
|
a67455707a | ||
|
|
09ca71dbe0 | ||
|
|
ea082539e4 | ||
|
|
143146f4f3 | ||
|
|
ee6ae402aa | ||
|
|
0b26b26821 | ||
|
|
c764f9b1ab | ||
|
|
db19d83ea7 | ||
|
|
24256be6d6 | ||
|
|
633723236e | ||
|
|
381f31e69e | ||
|
|
9438cac219 | ||
|
|
85cf66f650 | ||
|
|
f3c080e546 | ||
|
|
37634f8b08 | ||
|
|
2ae8bbce9e | ||
|
|
b8bfaf65bf | ||
|
|
7968cb7fa2 | ||
|
|
ebc2efdfd2 | ||
|
|
ff07fd5ccb | ||
|
|
3f83ef6566 | ||
|
|
554ce7f063 | ||
|
|
23963779f4 | ||
|
|
45c5af10c3 | ||
|
|
532a37e7ba | ||
|
|
aeb9a92845 | ||
|
|
a3c7ad7669 | ||
|
|
b99288dcae | ||
|
|
01b7cc5106 | ||
|
|
f5888d8127 | ||
|
|
77c838ca93 | ||
|
|
11e538d962 | ||
|
|
7d444038ee | ||
|
|
c24471834b | ||
|
|
b1e33ae37b | ||
|
|
c36617f9da | ||
|
|
deadb64206 | ||
|
|
9eaa88e467 | ||
|
|
bd58801415 | ||
|
|
548a9de671 | ||
|
|
8e7c53a8ba | ||
|
|
5e630174a1 | ||
|
|
175a65dfba | ||
|
|
39d4c402d5 | ||
|
|
e51748ee8f | ||
|
|
f9457fa80b | ||
|
|
4cc2ca3e2e | ||
|
|
3843001004 | ||
|
|
e24bb5dd1c | ||
|
|
f6013114eb | ||
|
|
bdca875eb3 | ||
|
|
af8c392de2 | ||
|
|
9aa3b4619b | ||
|
|
3d733da70a | ||
|
|
cda99b792c | ||
|
|
9834bad82e | ||
|
|
3453259c98 | ||
|
|
ee243b84eb | ||
|
|
5080e2cb45 | ||
|
|
f42ef7aea7 | ||
|
|
41793673d9 | ||
|
|
8b6a6982ee | ||
|
|
ee74ca6391 | ||
|
|
7165e70186 | ||
|
|
97d632a161 | ||
|
|
571919992d | ||
|
|
99112ad2ad | ||
|
|
75c70c395d | ||
|
|
960bdfe612 | ||
|
|
97892bda18 | ||
|
|
cead6ef98d | ||
|
|
5d70c0f100 | ||
|
|
361632fc4b | ||
|
|
6576655137 | ||
|
|
feb26efecd | ||
|
|
4752d1cde3 | ||
|
|
a07afa6e1a | ||
|
|
7327d2913a | ||
|
|
8f8a1f7f52 | ||
|
|
eb8d836e76 | ||
|
|
bad8495e16 | ||
|
|
43de7f4881 | ||
|
|
84585ac575 | ||
|
|
86f9d3865b | ||
|
|
834e7b2b0a | ||
|
|
c14f23ddaa | ||
|
|
49f3681a12 | ||
|
|
19e1d10cdf | ||
|
|
7caf2a512d | ||
|
|
1f6e3cc8cb | ||
|
|
169c4245e0 | ||
|
|
ee1982010f | ||
|
|
dd396c4a76 | ||
|
|
235802013d | ||
|
|
e1f07e98ae | ||
|
|
60aee6f535 | ||
|
|
2510dc9e6e | ||
|
|
5607dd259b | ||
|
|
7bd5d1fd3c | ||
|
|
f6104cc3cb | ||
|
|
b54d286b4a | ||
|
|
ea9c488897 | ||
|
|
ba1d295023 | ||
|
|
27f04b3544 | ||
|
|
8cd9497522 | ||
|
|
ef544a3b6d | ||
|
|
4eed832653 | ||
|
|
5996aaa4e3 | ||
|
|
4957607005 | ||
|
|
78bca131fb | ||
|
|
045c5cea53 | ||
|
|
ea256145d9 | ||
|
|
0b2098850c | ||
|
|
d2df0a29ce | ||
|
|
92e9daec9b | ||
|
|
d65437114a | ||
|
|
9a6e98e729 | ||
|
|
6515c16432 | ||
|
|
2826ab36f0 | ||
|
|
c035512930 | ||
|
|
cfadba47d3 | ||
|
|
95391dfe94 | ||
|
|
382ba99631 | ||
|
|
f8e25c79bf | ||
|
|
93b54b79d3 | ||
|
|
ff30efcebc | ||
|
|
54514682d4 | ||
|
|
92a75717f0 | ||
|
|
b9be8e883e | ||
|
|
da838a7d10 | ||
|
|
85e5fb9ab7 | ||
|
|
c0c300d773 | ||
|
|
6e933ac7df | ||
|
|
be679759be | ||
|
|
eace479b1e | ||
|
|
2069a42ba3 | ||
|
|
41d2161b5b | ||
|
|
ba936574fc | ||
|
|
c0d0603baa | ||
|
|
edbf12cfa8 | ||
|
|
11b3dac705 | ||
|
|
a7a5a994dc | ||
|
|
8a9a24ce1e | ||
|
|
f54974d66e | ||
|
|
0b4631a774 | ||
|
|
e7fa6d99bf | ||
|
|
03c0d74139 | ||
|
|
a14f4b5a02 | ||
|
|
3be565f49e | ||
|
|
df2938dfcf | ||
|
|
5d8482598b | ||
|
|
f079e7fc34 | ||
|
|
3369acc050 | ||
|
|
26f4fc0f34 | ||
|
|
8c0551c1c0 | ||
|
|
59866cdb11 | ||
|
|
8d2a32f66d | ||
|
|
b28ae67369 | ||
|
|
b8590fbd05 | ||
|
|
9343b9524f | ||
|
|
bb0cec1530 | ||
|
|
d4f41b51f4 | ||
|
|
347acf3cc6 | ||
|
|
65224ad6bc | ||
|
|
784d56ce05 | ||
|
|
b30523fdd8 | ||
|
|
b46e098696 | ||
|
|
20a7622602 | ||
|
|
d25f1059dd | ||
|
|
9394fa403e | ||
|
|
679c6a606d | ||
|
|
27f378601e | ||
|
|
832ddbdf6d | ||
|
|
0286455e1d | ||
|
|
4baf489460 | ||
|
|
56c7921430 | ||
|
|
c2288af55c | ||
|
|
39cd2f3754 | ||
|
|
a941ab4acb | ||
|
|
048cc711d6 | ||
|
|
63a5cf78ac | ||
|
|
bef03b9588 | ||
|
|
2859f0a7e1 | ||
|
|
c1b084d754 | ||
|
|
a8301709a8 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -86,7 +86,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
11
.github/workflows/unit_tests.yaml
vendored
11
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -186,7 +187,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
9
.github/workflows/windows_python.yml
vendored
9
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -75,6 +75,5 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
|
||||
32
SECURITY.md
32
SECURITY.md
@@ -2,24 +2,26 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
set spack="%SPACK_ROOT%"\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -39,12 +39,26 @@ function Read-SpackArgs {
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
@@ -61,7 +75,7 @@ function Invoke-SpackCD {
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
@@ -69,46 +83,46 @@ function Invoke-SpackEnv {
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +130,7 @@ function Invoke-SpackLoad {
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
@@ -128,5 +142,5 @@ switch($SpackSubCommand)
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -49,6 +49,7 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -59,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -9,9 +9,32 @@
|
||||
Bundle
|
||||
------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
associated software is specified as bundle dependencies.
|
||||
``BundlePackage`` represents a set of packages that are expected to work
|
||||
well together, such as a collection of commonly used software libraries.
|
||||
The associated software is specified as dependencies.
|
||||
|
||||
If it makes sense, variants, conflicts, and requirements can be added to
|
||||
the package. :ref:`Variants <variants>` ensure that common build options
|
||||
are consistent across the packages supporting them. :ref:`Conflicts
|
||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||
bugs or limitations.
|
||||
|
||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||
it could use the ``require`` directive as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
|
||||
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -73,10 +73,10 @@ arguments to the configure phase, you can use:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ['--no-python-dbus']
|
||||
return ["--no-python-dbus"]
|
||||
|
||||
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
113
lib/spack/docs/gpu_configuration.rst
Normal file
113
lib/spack/docs/gpu_configuration.rst
Normal file
@@ -0,0 +1,113 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
@@ -77,6 +77,7 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -363,6 +363,42 @@ one of these::
|
||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||
``nano``, and ``notepad``, in that order.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Bundling software
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
common build options are consistently applied to dependencies. Known build
|
||||
failures, such as not building on a platform or when certain compilers or
|
||||
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
||||
Build requirements, such as only building with specific compilers, can similarly
|
||||
be flagged with :ref:`requires <packaging_conflicts>`.
|
||||
|
||||
The ``spack create --template bundle`` command will create a skeleton
|
||||
``BundlePackage`` ``package.py`` for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create --template bundle --name coolsdk
|
||||
|
||||
Now you can fill in the basic package documentation, version(s), and software
|
||||
package dependencies along with any other relevant customizations.
|
||||
|
||||
.. note::
|
||||
|
||||
Remember that bundle packages have no software of their own so there
|
||||
is nothing to download.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-downloadable software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -610,7 +646,16 @@ add a line like this in the package class:
|
||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||
|
||||
Versions should be listed in descending order, from newest to oldest.
|
||||
.. note::
|
||||
|
||||
By convention, we list versions in descending order, from newest to oldest.
|
||||
|
||||
.. note::
|
||||
|
||||
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
||||
there is nothing to fetch. Consequently, their version directives
|
||||
consist solely of the version name (e.g., ``version("202309")``).
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Date Versions
|
||||
@@ -2243,7 +2288,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -2678,7 +2723,7 @@ Conflicts and requirements
|
||||
--------------------------
|
||||
|
||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||
to build e.g. against other dependencies or with certain compilers. Spack
|
||||
from building e.g. against other dependencies or with certain compilers. Spack
|
||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||
|
||||
Adding the following to a package:
|
||||
@@ -4773,17 +4818,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4972,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5192,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5271,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5247,14 +5291,14 @@ Saving build-time files
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5306,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5335,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5372,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5393,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5440,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5410,7 +5456,7 @@ Reading expected output from a file
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
@@ -5444,7 +5490,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5457,7 +5503,7 @@ source code, the path would be obtained as shown below.
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -5494,9 +5540,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5585,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -5551,7 +5600,7 @@ Inheriting stand-alone tests
|
||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||
packages that inherit from or provide interface implementations for those
|
||||
packages, respectively.
|
||||
packages, respectively.
|
||||
|
||||
The table below summarizes the stand-alone tests that will be executed along
|
||||
with those implemented in the package itself.
|
||||
@@ -5621,7 +5670,7 @@ for ``openmpi``:
|
||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||
...
|
||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||
Openmpi::test_bin_mpirun .. PASSED
|
||||
Openmpi::test_bin_ompi_info .. PASSED
|
||||
@@ -6071,7 +6120,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.5
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pytest==7.4.0
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.1.0
|
||||
flake8==6.0.0
|
||||
mypy==1.4.1
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -217,13 +217,7 @@ file would live in the ``build_cache`` directory of a binary mirror::
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
@@ -18,11 +18,13 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
@@ -101,7 +103,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
@@ -169,7 +171,7 @@ def rename(src, dst):
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -566,7 +568,7 @@ def set_install_permissions(path):
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
# this function will be invoked on the target. If the file is outside a
|
||||
# Spack-maintained prefix, the permissions should not be modified.
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
return
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0o755)
|
||||
@@ -635,7 +637,7 @@ def chmod_x(entry, perms):
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link."""
|
||||
if os.path.islink(dest):
|
||||
if islink(dest):
|
||||
return
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
@@ -721,26 +723,12 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -763,6 +751,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -770,6 +760,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -783,6 +775,11 @@ def copy_tree(
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
# symlinks at the end.
|
||||
links = []
|
||||
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
@@ -805,7 +802,7 @@ def copy_tree(
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True,
|
||||
):
|
||||
if os.path.islink(s):
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
@@ -819,7 +816,9 @@ def escaped_path(path):
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
symlink(target, d)
|
||||
links.append((target, d, s))
|
||||
continue
|
||||
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -834,9 +833,17 @@ def escaped_path(path):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -847,12 +854,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1256,7 +1272,12 @@ def traverse_tree(
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||
paths too.
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1283,11 +1304,24 @@ def traverse_tree(
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||
# but only do this if the ignore is defined.
|
||||
if ignore is not None:
|
||||
if islink(source_child) and not follow_links:
|
||||
target = readlink(source_child)
|
||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||
if any(map(ignore, all_parents)):
|
||||
tty.warn(
|
||||
f"Skipping {source_path} because the source or a part of the source's "
|
||||
f"path is included in the ignores."
|
||||
)
|
||||
continue
|
||||
|
||||
# Treat as a directory
|
||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1313,7 +1347,11 @@ def traverse_tree(
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
@@ -1528,7 +1566,7 @@ def remove_if_dead_link(path):
|
||||
Parameters:
|
||||
path (str): The potential dead link
|
||||
"""
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
if islink(path) and not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@@ -1587,7 +1625,7 @@ def remove_linked_tree(path):
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
@@ -1754,9 +1792,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -2688,7 +2731,7 @@ def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
if os.path.isfile(entry) or islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
||||
@@ -143,7 +143,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
||||
@@ -2,77 +2,188 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
|
||||
from llnl.util import lang
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
Create a link.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link patg ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
Windows-only methods will return false for other operating systems.
|
||||
|
||||
For Windows: spack considers symlinks, hard links, and junctions to
|
||||
all be links, so if any of those are True, return True.
|
||||
|
||||
Args:
|
||||
path (str): path to check if it is a link.
|
||||
|
||||
Returns:
|
||||
bool - whether the path is any kind link or not.
|
||||
"""
|
||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
def _windows_is_hardlink(path: str) -> bool:
|
||||
"""Determines if a path is a windows hard link. This is accomplished
|
||||
by looking at the number of links using os.stat. A non-hard-linked file
|
||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||
larger than 1. Note that both the original and hard-linked file will
|
||||
return True because they share the same inode.
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
Args:
|
||||
path (str): Windows path to check for a hard link
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
Returns:
|
||||
bool - Whether the path is a hard link or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||
return False
|
||||
|
||||
CreateHardLink(link, path)
|
||||
return os.stat(path).st_nlink > 1
|
||||
|
||||
|
||||
def _windows_is_junction(path: str) -> bool:
|
||||
"""Determines if a path is a windows junction. A junction can be
|
||||
determined using a bitwise AND operation between the file's
|
||||
attribute bitmask and the known junction bitmask (0x400).
|
||||
|
||||
Args:
|
||||
path (str): A non-file path
|
||||
|
||||
Returns:
|
||||
bool - whether the path is a junction or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||
return False
|
||||
|
||||
import ctypes.wintypes
|
||||
|
||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
invalid_file_attributes = 0xFFFFFFFF
|
||||
reparse_point = 0x400
|
||||
file_attr = get_file_attributes(str(path))
|
||||
|
||||
if file_attr == invalid_file_attributes:
|
||||
return False
|
||||
|
||||
return file_attr & reparse_point > 0
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
def _windows_can_symlink() -> bool:
|
||||
"""
|
||||
Determines if windows is able to make a symlink depending on
|
||||
the system configuration and the level of the user's permissions.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||
return False
|
||||
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
dpath = os.path.join(tempdir, "dpath")
|
||||
fpath = os.path.join(tempdir, "fpath.txt")
|
||||
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
dlink = os.path.join(tempdir, "dlink")
|
||||
flink = os.path.join(tempdir, "flink.txt")
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -96,24 +207,136 @@ def _win32_can_symlink():
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
def _windows_create_link(source: str, link: str):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
Attempts to create a Hard Link or Junction as an alternative
|
||||
to a symbolic link. This is called when symbolic links cannot
|
||||
be created.
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||
elif os.path.isdir(source):
|
||||
_windows_create_junction(source=source, link=link)
|
||||
elif os.path.isfile(source):
|
||||
_windows_create_hard_link(path=source, link=link)
|
||||
else:
|
||||
raise SymlinkError(
|
||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
def _windows_create_junction(source: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a junction,
|
||||
then create the junction.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
import subprocess
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
return False
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a hard
|
||||
link, then create the hard link.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
"""Find all of the files that point to the same inode as the link"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||
link = os.path.abspath(link)
|
||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||
|
||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||
links.remove(link)
|
||||
if len(links) == 1:
|
||||
return links.pop()
|
||||
elif len(links) > 1:
|
||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||
else:
|
||||
raise SymlinkError("Cannot determine hard link source path.")
|
||||
|
||||
|
||||
def _windows_read_junction(link: str):
|
||||
"""Find the path that a junction points to."""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||
|
||||
link = os.path.abspath(link)
|
||||
link_basename = os.path.basename(link)
|
||||
link_parent = os.path.dirname(link)
|
||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||
if matches:
|
||||
return matches.group(1)
|
||||
else:
|
||||
raise SymlinkError("Could not find junction path.")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(SpackError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -244,7 +245,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -49,9 +48,11 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -875,32 +876,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -1312,15 +1299,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
bchecksum["hash"] = checksum
|
||||
spec_dict["binary_cache_checksum"] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.STORE.layout.root)
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
@@ -1799,16 +1778,46 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
"""
|
||||
timer.start("extract")
|
||||
if os.path.exists(spec.prefix):
|
||||
if force:
|
||||
shutil.rmtree(spec.prefix)
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
# Create the install prefix
|
||||
fsys.mkdirp(
|
||||
spec.prefix,
|
||||
mode=get_package_dir_permissions(spec),
|
||||
group=get_package_group(spec),
|
||||
default_perms="parents",
|
||||
)
|
||||
|
||||
specfile_path = download_result["specfile_stage"].save_filename
|
||||
|
||||
with open(specfile_path, "r") as inputfile:
|
||||
@@ -1862,42 +1871,25 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.STORE.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get("buildinfo", {})
|
||||
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
raise
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
timer.stop("extract")
|
||||
|
||||
timer.start("relocate")
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix)
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
@@ -1910,10 +1902,28 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
timer.stop("relocate")
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
@@ -2363,22 +2373,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
"cmake",
|
||||
"bison",
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
"gawk",
|
||||
# develop deps
|
||||
"git",
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
search_list.append("winbison")
|
||||
externals = spack.detection.by_path(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -15,14 +15,15 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.environment
|
||||
import spack.tengine
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
from spack.environment import depfile
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -136,7 +137,7 @@ def _install_with_depfile(self) -> None:
|
||||
"-C",
|
||||
str(self.environment_root()),
|
||||
"-j",
|
||||
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||
str(spack.util.cpus.determine_number_of_jobs(parallel=True)),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import cpus_available
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
EnvironmentModifications,
|
||||
@@ -537,39 +537,6 @@ def update_compiler_args_for_dep(dep):
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
|
||||
|
||||
def determine_number_of_jobs(
|
||||
parallel=False, command_line=None, config_default=None, max_cpus=None
|
||||
):
|
||||
"""
|
||||
Packages that require sequential builds need 1 job. Otherwise we use the
|
||||
number of jobs set on the command line. If not set, then we use the config
|
||||
defaults (which is usually set through the builtin config scope), but we
|
||||
cap to the number of CPUs available to avoid oversubscription.
|
||||
|
||||
Parameters:
|
||||
parallel (bool or None): true when package supports parallel builds
|
||||
command_line (int or None): command line override
|
||||
config_default (int or None): config default number of jobs
|
||||
max_cpus (int or None): maximum number of CPUs available. When None, this
|
||||
value is automatically determined.
|
||||
"""
|
||||
if not parallel:
|
||||
return 1
|
||||
|
||||
if command_line is None and "command_line" in spack.config.scopes():
|
||||
command_line = spack.config.get("config:build_jobs", scope="command_line")
|
||||
|
||||
if command_line is not None:
|
||||
return command_line
|
||||
|
||||
max_cpus = max_cpus or cpus_available()
|
||||
|
||||
# in some rare cases _builtin config may not be set, so default to max 16
|
||||
config_default = config_default or spack.config.get("config:build_jobs", 16)
|
||||
|
||||
return min(max_cpus, config_default)
|
||||
|
||||
|
||||
def set_module_variables_for_package(pkg):
|
||||
"""Populate the Python module of a package with some useful global names.
|
||||
This makes things easier for package writers.
|
||||
@@ -1027,7 +994,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1015,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1069,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1116,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1141,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1150,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1165,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1194,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
@@ -1256,9 +1242,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -162,17 +162,6 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
||||
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
@@ -273,7 +274,6 @@ def std_args(pkg, generator=None):
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
|
||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||
@@ -450,7 +450,6 @@ def cmake_args(self):
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
* CMAKE_BUILD_TYPE
|
||||
* BUILD_TESTING
|
||||
|
||||
which will be set automatically.
|
||||
"""
|
||||
|
||||
@@ -154,7 +154,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
|
||||
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||
|
||||
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
@@ -95,7 +95,7 @@ def makefile_root(self):
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def nmakefile_name(self):
|
||||
def makefile_name(self):
|
||||
"""Name of the current makefile. This is currently an empty value.
|
||||
If a project defines this value, it will be used with the /f argument
|
||||
to provide nmake an explicit makefile. This is usefule in scenarios where
|
||||
@@ -126,8 +126,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the build targets specified by the builder."""
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
@@ -139,8 +139,8 @@ def install(self, pkg, spec, prefix):
|
||||
opts = self.std_nmake_args
|
||||
opts += self.nmake_args()
|
||||
opts += self.nmake_install_args()
|
||||
if self.nmakefile_name:
|
||||
opts.append("/f {}".format(self.nmakefile_name))
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -300,8 +300,8 @@ def get_external_python_for_prefix(self):
|
||||
if python_externals_configured:
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
python_externals_detection = spack.detection.by_path(
|
||||
["python"], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
||||
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qt", type="build", when="build_system=qmake")
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
||||
@@ -10,9 +10,10 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -92,7 +93,7 @@ def install(self, pkg, spec, prefix):
|
||||
"--copy",
|
||||
"-i",
|
||||
"-j",
|
||||
str(determine_number_of_jobs(parallel)),
|
||||
str(determine_number_of_jobs(parallel=parallel)),
|
||||
"--",
|
||||
os.getcwd(),
|
||||
]
|
||||
|
||||
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
from llnl.util.filesystem import find, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
@@ -39,9 +40,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
build_system("sip")
|
||||
|
||||
with when("build_system=sip"):
|
||||
extends("python")
|
||||
depends_on("qt")
|
||||
depends_on("py-sip")
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,13 +113,13 @@ class SIPBuilder(BaseBuilder):
|
||||
* install
|
||||
|
||||
The configure phase already adds a set of default flags. To see more
|
||||
options, run ``python configure.py --help``.
|
||||
options, run ``sip-build --help``.
|
||||
"""
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
@@ -130,34 +130,17 @@ class SIPBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
def configure_file(self):
|
||||
"""Returns the name of the configure file to use."""
|
||||
return "configure.py"
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
configure = self.configure_file()
|
||||
|
||||
args = self.configure_args()
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
args.extend(
|
||||
[
|
||||
"--verbose",
|
||||
"--confirm-license",
|
||||
"--qmake",
|
||||
spec["qt"].prefix.bin.qmake,
|
||||
"--sip",
|
||||
spec["py-sip"].prefix.bin.sip,
|
||||
"--sip-incdir",
|
||||
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||
"--bindir",
|
||||
prefix.bin,
|
||||
"--destdir",
|
||||
inspect.getmodule(self.pkg).python_platlib,
|
||||
]
|
||||
)
|
||||
|
||||
self.pkg.python(configure, *args)
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
@@ -167,7 +150,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -177,21 +161,11 @@ def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def extend_path_setup(self):
|
||||
# See github issue #14121 and PR #15297
|
||||
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||
if module != "sip":
|
||||
module = module.split(".")[0]
|
||||
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||
f.write("from pkgutil import extend_path\n")
|
||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1504,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
@@ -1690,7 +1690,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
return True
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
@@ -1706,6 +1706,11 @@ def reproduce_ci_job(url, work_dir):
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
if gpg_url:
|
||||
gpg_path = web_util.fetch_url_text(gpg_url, dest_dir=os.path.join(work_dir, "_pgp"))
|
||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
@@ -1798,60 +1803,63 @@ def reproduce_ci_job(url, work_dir):
|
||||
# more faithful reproducer if everything appears to run in the same
|
||||
# absolute path used during the CI build.
|
||||
mount_as_dir = "/work"
|
||||
mounted_workdir = "/reproducer"
|
||||
if repro_details:
|
||||
mount_as_dir = repro_details["ci_project_dir"]
|
||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||
if gpg_path:
|
||||
mounted_gpg_path = os.path.join(mounted_workdir, rel_gpg_path)
|
||||
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
commit_1 = m.group(1)
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
|
||||
# In cases where CI build was run on a shell runner, it might be useful
|
||||
# to see what tags were applied to the job so the user knows what shell
|
||||
@@ -1862,45 +1870,92 @@ def reproduce_ci_job(url, work_dir):
|
||||
job_tags = job_yaml["tags"]
|
||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
||||
|
||||
inst_list = []
|
||||
entrypoint_script = [
|
||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
||||
]
|
||||
|
||||
inst_list = []
|
||||
# Finally, print out some instructions to reproduce the build
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
# Allow interactive
|
||||
entrypoint_script.extend(
|
||||
[
|
||||
[
|
||||
"echo",
|
||||
"Re-run install script using:\n\t{0}".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh")
|
||||
if job_image
|
||||
else install_script
|
||||
),
|
||||
],
|
||||
# Allow interactive
|
||||
["exec", "$@"],
|
||||
]
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
process_command(
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
docker_command = [
|
||||
[
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
||||
job_image,
|
||||
"bash",
|
||||
]
|
||||
]
|
||||
autostart = autostart and setup_result
|
||||
process_command("start", docker_command, work_dir, run=autostart)
|
||||
|
||||
if not autostart:
|
||||
inst_list.append("\nTo run the docker reproducer:\n\n")
|
||||
inst_list.extend(
|
||||
[
|
||||
" - Start the docker container install",
|
||||
" $ {0}/start.sh".format(work_dir),
|
||||
]
|
||||
)
|
||||
else:
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||
|
||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||
inst_list.extent(
|
||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
||||
)
|
||||
|
||||
if not setup_result:
|
||||
inst_list.append(" - Clone spack and acquire tested commit\n")
|
||||
inst_list.append("{0}".format(spack_info))
|
||||
spack_root = "<spack-clone-path>"
|
||||
else:
|
||||
spack_root = "{0}/spack".format(mount_as_dir)
|
||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||
inst_list.append("\n {0}\n".format(spack_info))
|
||||
inst_list.append("\n")
|
||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
||||
|
||||
inst_list.append(" - Activate the environment\n\n")
|
||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||
inst_list.append(
|
||||
" $ spack env activate --without-view {0}\n\n".format(
|
||||
mounted_env_dir if job_image else repro_dir
|
||||
)
|
||||
)
|
||||
inst_list.append(" - Run the install script\n\n")
|
||||
inst_list.append(
|
||||
" $ {0}\n".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script
|
||||
)
|
||||
)
|
||||
|
||||
print("".join(inst_list))
|
||||
tty.msg("".join(inst_list))
|
||||
|
||||
|
||||
def process_command(name, commands, repro_dir):
|
||||
def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
"""
|
||||
Create a script for and run the command. Copy the script to the
|
||||
reproducibility directory.
|
||||
@@ -1910,6 +1965,7 @@ def process_command(name, commands, repro_dir):
|
||||
commands (list): list of arguments for single command or list of lists of
|
||||
arguments for multiple commands. No shell escape is performed.
|
||||
repro_dir (str): Job reproducibility directory
|
||||
run (bool): Run the script and return the exit code if True
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
@@ -1928,7 +1984,8 @@ def process_command(name, commands, repro_dir):
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if exit_on_failure:
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
@@ -1939,19 +1996,27 @@ def process_command(name, commands, repro_dir):
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# a login shell.
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
exit_code = None
|
||||
if run:
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
else:
|
||||
# Delete the script, it is copied to the destination dir
|
||||
os.remove(script)
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
return exit_code
|
||||
|
||||
|
||||
|
||||
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
@@ -383,7 +383,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
deps (bool): Display dependencies with specs
|
||||
long (bool): Display short hashes with specs
|
||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||
namespace (bool): Print namespaces along with names
|
||||
namespaces (bool): Print namespaces along with names
|
||||
show_flags (bool): Show compiler flags with specs
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
@@ -407,7 +407,7 @@ def get_arg(name, default=None):
|
||||
paths = get_arg("paths", False)
|
||||
deps = get_arg("deps", False)
|
||||
hashes = get_arg("long", False)
|
||||
namespace = get_arg("namespace", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
@@ -428,7 +428,7 @@ def get_arg(name, default=None):
|
||||
|
||||
format_string = get_arg("format", None)
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespace else "{name}"
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
@@ -584,14 +584,14 @@ def require_active_env(cmd_name):
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
|
||||
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -68,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,6 +326,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
||||
@@ -2,12 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
@@ -18,7 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -28,7 +30,6 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
@@ -38,8 +39,8 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
setattr(setup_parser, "parser", subparser)
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -105,7 +111,7 @@ def setup_parser(subparser):
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
||||
listcache.add_argument(
|
||||
"-v",
|
||||
"--variants",
|
||||
@@ -149,23 +155,20 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
check_spec_or_specfile.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
@@ -173,16 +176,19 @@ def setup_parser(subparser):
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
"-p",
|
||||
"--path",
|
||||
required=True,
|
||||
default=None,
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
help="path to directory where tarball should be downloaded",
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -190,32 +196,32 @@ def setup_parser(subparser):
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
required=True,
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
@@ -257,54 +263,24 @@ def setup_parser(subparser):
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs, spec_file):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if spec_file:
|
||||
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||
|
||||
if specs:
|
||||
constraints = spack.cmd.parse_specs(specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
)
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
@@ -315,7 +291,7 @@ def push_fn(args):
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -326,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -356,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -370,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -423,16 +417,21 @@ def preview_fn(args):
|
||||
def check_fn(args):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
if not specs:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
@@ -462,26 +461,28 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
if not args.path:
|
||||
tty.msg("No download path provided, exiting")
|
||||
return
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not result:
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -491,29 +492,24 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specs:
|
||||
tty.msg("No dependent specs provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
tty.msg("No yaml directory provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = "json"
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,18 +4,21 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import deprecated_version, preferred_version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
@@ -31,35 +34,38 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version only",
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the preferred version only",
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
)
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
@@ -77,89 +83,174 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict = {}
|
||||
if not args.versions and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
else:
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn("Version {0} is deprecated".format(version))
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn(f"Version {version} is deprecated")
|
||||
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
print_checksum_status(pkg, version_hashes)
|
||||
sys.exit(0)
|
||||
|
||||
# convert dict into package.py version statements
|
||||
version_lines = get_version_lines(version_hashes, url_dict)
|
||||
print()
|
||||
print(version_lines)
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
# Make sure we also have a newline after the last version
|
||||
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||
versions.append("\n")
|
||||
# We need to insert the versions in reversed order
|
||||
versions.reverse()
|
||||
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||
version_line = None
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for i in range(len(lines)):
|
||||
# Black is drunk, so this is what it looks like for now
|
||||
# See https://github.com/psf/black/issues/2156 for more information
|
||||
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||
i
|
||||
].startswith(" version("):
|
||||
version_line = i
|
||||
break
|
||||
|
||||
if version_line is not None:
|
||||
for v in versions:
|
||||
lines.insert(version_line, v)
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
"""
|
||||
Verify checksums present in version_hashes against those present
|
||||
in the package's instructions.
|
||||
|
||||
with open(filename, "w") as f:
|
||||
f.writelines(lines)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
||||
|
||||
msg = "opening editor to verify"
|
||||
"""
|
||||
results = []
|
||||
num_verified = 0
|
||||
failed = False
|
||||
|
||||
if not sys.stdout.isatty():
|
||||
msg = "please verify"
|
||||
max_len = max(len(str(v)) for v in version_hashes)
|
||||
num_total = len(version_hashes)
|
||||
|
||||
tty.info(
|
||||
"Added {0} new versions to {1}, "
|
||||
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||
)
|
||||
for version, sha in version_hashes.items():
|
||||
if version not in pkg.versions:
|
||||
msg = "No previous checksum"
|
||||
status = "-"
|
||||
|
||||
elif sha == pkg.versions[version]["sha256"]:
|
||||
msg = "Correct"
|
||||
status = "="
|
||||
num_verified += 1
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
else:
|
||||
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||
msg = sha
|
||||
status = "x"
|
||||
failed = True
|
||||
|
||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
||||
|
||||
# Display table of checksum results.
|
||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
||||
|
||||
# Terminate at the end of function to prevent additional output.
|
||||
if failed:
|
||||
print()
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
# Split rendered version lines into tuple of (version, version_line)
|
||||
# We reverse sort here to make sure the versions match the version_lines
|
||||
new_versions = []
|
||||
for ver_line in version_lines.split("\n"):
|
||||
match = version_re.match(ver_line)
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.timer as timer
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -156,11 +157,27 @@ def setup_parser(subparser):
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
default="docker",
|
||||
choices=["docker", "podman"],
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
||||
)
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
@@ -237,6 +254,8 @@ def ci_rebuild(args):
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
rebuild_timer = timer.Timer()
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -273,6 +292,10 @@ def ci_rebuild(args):
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
@@ -402,11 +425,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -663,7 +681,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=sign_binaries,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
@@ -707,7 +725,7 @@ def ci_rebuild(args):
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -721,6 +739,14 @@ def ci_rebuild(args):
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
extra_attributes = {"name": ".ci-rebuild"}
|
||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||
except Exception as e:
|
||||
tty.debug(str(e))
|
||||
|
||||
# Tie job success/failure to the success/failure of building the spec
|
||||
return install_exit_code
|
||||
|
||||
@@ -733,8 +759,18 @@ def ci_reproduce(args):
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
elif args.gpg_url:
|
||||
gpg_key_url = args.gpg_url
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -114,22 +115,18 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg("Removing all temporary build stages")
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug("Removing {0}".format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -36,13 +36,13 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
@@ -812,6 +812,9 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
|
||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
@@ -331,6 +331,17 @@ def tags():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def namespaces():
|
||||
return Args(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def jobs():
|
||||
return Args(
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
@@ -186,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
@@ -832,13 +833,15 @@ def get_versions(args, name):
|
||||
version = parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
@@ -912,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
|
||||
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
||||
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
||||
@@ -239,6 +239,13 @@ def env_deactivate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to activate the environment",
|
||||
)
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -13,6 +14,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +29,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,14 +48,14 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--all", action="store_true", help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags", "jobs"])
|
||||
find_parser.add_argument("packages", nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
@@ -120,52 +121,29 @@ def external_find(args):
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
# Outside the Cray manifest, the search is done by tag for performance reasons,
|
||||
# since tags are cached.
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
args.tags = []
|
||||
if args.all or args.packages:
|
||||
# Each detectable package has at least the detectable tag
|
||||
args.tags = ["detectable"]
|
||||
elif not args.tags:
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
args.tags = ["core-packages", "build-tools"]
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
pkg_cls_to_check = []
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
# If we arrived here we didn't have any explicit package passed
|
||||
# as argument, which means to search all packages.
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
pkg_cls_to_check = [pkg for pkg in pkg_cls_to_check if pkg.name not in args.exclude]
|
||||
|
||||
detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
|
||||
detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
@@ -173,6 +151,19 @@ def external_find(args):
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
def packages_to_search_for(
|
||||
*, names: Optional[List[str]], tags: List[str], exclude: Optional[List[str]]
|
||||
):
|
||||
result = []
|
||||
for current_tag in tags:
|
||||
result.extend(spack.repo.PATH.packages_with_tags(current_tag))
|
||||
if names:
|
||||
result = [x for x in result if x in names]
|
||||
if exclude:
|
||||
result = [x for x in result if x not in exclude]
|
||||
return result
|
||||
|
||||
|
||||
def external_read_cray_manifest(args):
|
||||
_collect_and_consume_cray_manifest_files(
|
||||
manifest_file=args.file,
|
||||
@@ -239,7 +230,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -67,7 +67,7 @@ def setup_parser(subparser):
|
||||
help="do not group specs by arch/compiler",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
@@ -140,9 +140,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N", "--namespace", action="store_true", help="show fully qualified package names"
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -230,7 +227,7 @@ def display_env(env, args, decorator, results):
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -271,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
||||
@@ -64,11 +64,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
||||
@@ -52,6 +52,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
||||
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
||||
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -474,7 +473,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
||||
@@ -309,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -321,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -376,7 +377,7 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
||||
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -73,13 +73,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
@@ -144,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -67,13 +67,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
@@ -84,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -51,6 +51,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
|
||||
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
||||
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -2,13 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
@@ -119,108 +115,3 @@ def c23_flag(self):
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||
|
||||
On macOS, not all buildsystems support querying CC and CXX for the
|
||||
compilers to use and instead query the Xcode toolchain for what
|
||||
compiler to run. This side-steps the spack wrappers. In order to inject
|
||||
spack into this setup, we need to copy (a subset of) Xcode.app and
|
||||
replace the compiler executables with symlinks to the spack wrapper.
|
||||
Currently, the stage is used to store the Xcode.app copies. We then set
|
||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||
related tools to use this Xcode.app.
|
||||
"""
|
||||
super().setup_custom_environment(pkg, env)
|
||||
|
||||
if not pkg.use_xcode:
|
||||
# if we do it for all packages, we get into big troubles with MPI:
|
||||
# filter_compilers(self) will use mockup XCode compilers on macOS
|
||||
# with Clang. Those point to Spack's compiler wrappers and
|
||||
# consequently render MPI non-functional outside of Spack.
|
||||
return
|
||||
|
||||
# Use special XCode versions of compiler wrappers when using XCode
|
||||
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
|
||||
xcrun = spack.util.executable.Executable("xcrun")
|
||||
xcode_clang = xcrun("-f", "clang", output=str).strip()
|
||||
xcode_clangpp = xcrun("-f", "clang++", output=str).strip()
|
||||
env.set("SPACK_CC", xcode_clang, force=True)
|
||||
env.set("SPACK_CXX", xcode_clangpp, force=True)
|
||||
|
||||
xcode_select = spack.util.executable.Executable("xcode-select")
|
||||
|
||||
# Get the path of the active developer directory
|
||||
real_root = xcode_select("--print-path", output=str).strip()
|
||||
|
||||
# The path name can be used to determine whether the full Xcode suite
|
||||
# or just the command-line tools are installed
|
||||
if real_root.endswith("Developer"):
|
||||
# The full Xcode suite is installed
|
||||
pass
|
||||
else:
|
||||
if real_root.endswith("CommandLineTools"):
|
||||
# Only the command-line tools are installed
|
||||
msg = "It appears that you have the Xcode command-line tools "
|
||||
msg += "but not the full Xcode suite installed.\n"
|
||||
|
||||
else:
|
||||
# Xcode is not installed
|
||||
msg = "It appears that you do not have Xcode installed.\n"
|
||||
|
||||
msg += "In order to use Spack to build the requested application, "
|
||||
msg += "you need the full Xcode suite. It can be installed "
|
||||
msg += "through the App Store. Make sure you launch the "
|
||||
msg += "application and accept the license agreement.\n"
|
||||
|
||||
raise OSError(msg)
|
||||
|
||||
real_root = os.path.dirname(os.path.dirname(real_root))
|
||||
developer_root = os.path.join(
|
||||
spack.stage.get_stage_root(), "xcode-select", self.name, str(self.version)
|
||||
)
|
||||
xcode_link = os.path.join(developer_root, "Xcode.app")
|
||||
|
||||
if not os.path.exists(developer_root):
|
||||
tty.warn(
|
||||
"Copying Xcode from %s to %s in order to add spack "
|
||||
"wrappers to it. Please do not interrupt." % (real_root, developer_root)
|
||||
)
|
||||
|
||||
# We need to make a new Xcode.app instance, but with symlinks to
|
||||
# the spack wrappers for the compilers it ships. This is necessary
|
||||
# because some projects insist on just asking xcrun and related
|
||||
# tools where the compiler runs. These tools are very hard to trick
|
||||
# as they do realpath and end up ignoring the symlinks in a
|
||||
# "softer" tree of nothing but symlinks in the right places.
|
||||
shutil.copytree(
|
||||
real_root,
|
||||
developer_root,
|
||||
symlinks=True,
|
||||
ignore=shutil.ignore_patterns(
|
||||
"AppleTV*.platform",
|
||||
"Watch*.platform",
|
||||
"iPhone*.platform",
|
||||
"Documentation",
|
||||
"swift*",
|
||||
),
|
||||
)
|
||||
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
for real_dir in real_dirs:
|
||||
dev_dir = os.path.join(developer_root, "Contents", "Developer", real_dir)
|
||||
for fname in os.listdir(dev_dir):
|
||||
if fname in bins:
|
||||
os.unlink(os.path.join(dev_dir, fname))
|
||||
symlink(
|
||||
os.path.join(spack.paths.build_env_path, "cc"),
|
||||
os.path.join(dev_dir, fname),
|
||||
)
|
||||
|
||||
symlink(developer_root, xcode_link)
|
||||
|
||||
env.set("DEVELOPER_DIR", xcode_link)
|
||||
|
||||
@@ -99,6 +99,28 @@ def cxx17_flag(self):
|
||||
else:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("8.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "< 8.0"
|
||||
)
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "< 11.0"
|
||||
)
|
||||
elif self.real_version < Version("14.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < Version("4.5"):
|
||||
|
||||
@@ -29,6 +29,90 @@
|
||||
}
|
||||
|
||||
|
||||
class CmdCall:
|
||||
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
|
||||
|
||||
def __init__(self, *cmds):
|
||||
if not cmds:
|
||||
raise RuntimeError(
|
||||
"""Attempting to run commands from CMD without specifying commands.
|
||||
Please add commands to be run."""
|
||||
)
|
||||
self._cmds = cmds
|
||||
|
||||
def __call__(self):
|
||||
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
|
||||
return out.decode("utf-16le", errors="replace") # novermin
|
||||
|
||||
@property
|
||||
def cmd_line(self):
|
||||
base_call = "cmd /u /c "
|
||||
commands = " && ".join([x.command_str() for x in self._cmds])
|
||||
# If multiple commands are being invoked by a single subshell
|
||||
# they must be encapsulated by a double quote. Always double
|
||||
# quote to be sure of proper handling
|
||||
# cmd will properly resolve nested double quotes as needed
|
||||
#
|
||||
# `set`` writes out the active env to the subshell stdout,
|
||||
# and in this context we are always trying to obtain env
|
||||
# state so it should always be appended
|
||||
return base_call + f'"{commands} && set"'
|
||||
|
||||
|
||||
class VarsInvocation:
|
||||
def __init__(self, script):
|
||||
self._script = script
|
||||
|
||||
def command_str(self):
|
||||
return f'"{self._script}"'
|
||||
|
||||
@property
|
||||
def script(self):
|
||||
return self._script
|
||||
|
||||
|
||||
class VCVarsInvocation(VarsInvocation):
|
||||
def __init__(self, script, arch, msvc_version):
|
||||
super(VCVarsInvocation, self).__init__(script)
|
||||
self._arch = arch
|
||||
self._msvc_version = msvc_version
|
||||
|
||||
@property
|
||||
def sdk_ver(self):
|
||||
"""Accessor for Windows SDK version property
|
||||
|
||||
Note: This property may not be set by
|
||||
the calling context and as such this property will
|
||||
return an empty string
|
||||
|
||||
This property will ONLY be set if the SDK package
|
||||
is a dependency somewhere in the Spack DAG of the package
|
||||
for which we are constructing an MSVC compiler env.
|
||||
Otherwise this property should be unset to allow the VCVARS
|
||||
script to use its internal heuristics to determine appropriate
|
||||
SDK version
|
||||
"""
|
||||
if getattr(self, "_sdk_ver", None):
|
||||
return self._sdk_ver + ".0"
|
||||
return ""
|
||||
|
||||
@sdk_ver.setter
|
||||
def sdk_ver(self, val):
|
||||
self._sdk_ver = val
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
return self._arch
|
||||
|
||||
@property
|
||||
def vcvars_ver(self):
|
||||
return f"-vcvars_ver={self._msvc_version}"
|
||||
|
||||
def command_str(self):
|
||||
script = super(VCVarsInvocation, self).command_str()
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
@@ -75,22 +159,48 @@ class Msvc(Compiler):
|
||||
# file based on compiler executable path.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
super().__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||
env_cmds.append(self.vcvars_call)
|
||||
# Below is a check for a valid fortran path
|
||||
# paths has c, cxx, fc, and f77 paths in that order
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
# If this found, it sets all the vars
|
||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
||||
else:
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
|
||||
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
# env first
|
||||
env_cmds.extend(
|
||||
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
@@ -119,16 +229,30 @@ def platform_toolset_ver(self):
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
# non zero return code if it is not provided
|
||||
# and input file
|
||||
return Version(
|
||||
re.search(
|
||||
Msvc.version_regex,
|
||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
||||
spack.compiler.get_compiler_version_output(
|
||||
compiler, version_arg=None, ignore_errors=True
|
||||
),
|
||||
).group(1)
|
||||
)
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
return self._compiler_version(self.cc)
|
||||
|
||||
@property
|
||||
def ifx_version(self):
|
||||
"""Ifx compiler version associated with this version of MSVC"""
|
||||
return self._compiler_version(self.fc)
|
||||
|
||||
@property
|
||||
def vs_root(self):
|
||||
# The MSVC install root is located at a fix level above the compiler
|
||||
@@ -146,27 +270,12 @@ def setup_custom_environment(self, pkg, env):
|
||||
# output, sort into dictionary, use that to make the build
|
||||
# environment.
|
||||
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
'cmd /u /c "{}" {} {} {} && set'.format(
|
||||
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
||||
),
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
if sys.version_info[0] >= 3:
|
||||
out = out.decode("utf-16le", errors="replace") # novermin
|
||||
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
|
||||
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
|
||||
|
||||
out = self.msvc_compiler_environment()
|
||||
int_env = dict(
|
||||
(key, value)
|
||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,18 +851,18 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
|
||||
components = process_config_path(fullpath)
|
||||
|
||||
has_existing_value = True
|
||||
path = ""
|
||||
override = False
|
||||
value = syaml.load_config(components[-1])
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = "::" if override else ":" if path else ""
|
||||
@@ -881,14 +883,14 @@ def add(fullpath, scope=None):
|
||||
existing = get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1 : -1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if override:
|
||||
path += "::"
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = fullpath.rpartition(":")
|
||||
value = syaml.load_config(value)
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1229,11 +1231,17 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
#
|
||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
||||
# trailing ':')
|
||||
#
|
||||
def process_config_path(path):
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||
quoted path components outside of the value will be considered ill formed and will
|
||||
raise.
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
|
||||
[this, is, a, path, value:with:colon]
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
@@ -1261,6 +1269,17 @@ def process_config_path(path):
|
||||
front.append = True
|
||||
|
||||
result.append(front)
|
||||
|
||||
quote = "['\"]"
|
||||
not_quote = "[^'\"]"
|
||||
|
||||
if re.match(f"^{quote}", path):
|
||||
m = re.match(rf"^({quote}{not_quote}+{quote})$", path)
|
||||
if not m:
|
||||
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||
result.append(m.group(1))
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -1278,9 +1297,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1356,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -90,7 +90,7 @@ def spec_from_entry(entry):
|
||||
name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(entry["name"])
|
||||
|
||||
if "parameters" in entry:
|
||||
variant_strs = list()
|
||||
|
||||
@@ -21,10 +21,11 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, List, NamedTuple, Set, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -141,22 +142,23 @@ class InstallStatuses:
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
elif query_arg is False:
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
elif query_arg is any:
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
elif isinstance(query_arg, InstallStatus):
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
else:
|
||||
try: # Try block catches if it is not an iterable at all
|
||||
if any(type(x) != InstallStatus for x in query_arg):
|
||||
raise TypeError
|
||||
except TypeError:
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
return query_arg
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -306,15 +308,16 @@ def __reduce__(self):
|
||||
|
||||
"""
|
||||
|
||||
#: Data class to configure locks in Database objects
|
||||
#:
|
||||
#: Args:
|
||||
#: enable (bool): whether to enable locks or not.
|
||||
#: database_timeout (int or None): timeout for the database lock
|
||||
#: package_timeout (int or None): timeout for the package lock
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
Args:
|
||||
enable: whether to enable locks or not.
|
||||
database_timeout: timeout for the database lock
|
||||
package_timeout: timeout for the package lock
|
||||
"""
|
||||
|
||||
enable: bool
|
||||
database_timeout: Optional[int]
|
||||
package_timeout: Optional[int]
|
||||
@@ -348,13 +351,230 @@ def lock_configuration(configuration):
|
||||
)
|
||||
|
||||
|
||||
def prefix_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the prefix lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_lock"
|
||||
|
||||
|
||||
def failures_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the failures lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_failures"
|
||||
|
||||
|
||||
class SpecLocker:
|
||||
"""Manages acquiring and releasing read or write locks on concrete specs."""
|
||||
|
||||
def __init__(self, lock_path: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
self.lock_path = pathlib.Path(lock_path)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
# Maps (spec.dag_hash(), spec.name) to the corresponding lock object
|
||||
self.locks: Dict[Tuple[str, str], lk.Lock] = {}
|
||||
|
||||
def lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a lock on a concrete spec.
|
||||
|
||||
The lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``self.lock_path``.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes likelihood of collision is
|
||||
very low AND it gives us readers-writer lock semantics with just a single lockfile, so
|
||||
no cleanup required.
|
||||
"""
|
||||
assert spec.concrete, "cannot lock a non-concrete spec"
|
||||
timeout = timeout or self.default_timeout
|
||||
key = self._lock_key(spec)
|
||||
|
||||
if key not in self.locks:
|
||||
self.locks[key] = self.raw_lock(spec, timeout=timeout)
|
||||
else:
|
||||
self.locks[key].default_timeout = timeout
|
||||
|
||||
return self.locks[key]
|
||||
|
||||
def raw_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a raw lock for a Spec, but doesn't keep track of it."""
|
||||
return lk.Lock(
|
||||
str(self.lock_path),
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
def has_lock(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Returns True if the spec is already managed by this spec locker"""
|
||||
return self._lock_key(spec) in self.locks
|
||||
|
||||
def _lock_key(self, spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
return (spec.dag_hash(), spec.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_lock(self, spec: "spack.spec.Spec") -> Generator["SpecLocker", None, None]:
|
||||
lock = self.lock(spec)
|
||||
lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
lock.release_write()
|
||||
raise
|
||||
else:
|
||||
lock.release_write()
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec") -> Tuple[bool, Optional[lk.Lock]]:
|
||||
key = self._lock_key(spec)
|
||||
lock = self.locks.pop(key, None)
|
||||
return bool(lock), lock
|
||||
|
||||
def clear_all(self, clear_fn: Optional[Callable[[lk.Lock], Any]] = None) -> None:
|
||||
if clear_fn is not None:
|
||||
for lock in self.locks.values():
|
||||
clear_fn(lock)
|
||||
self.locks.clear()
|
||||
|
||||
|
||||
class FailureTracker:
|
||||
"""Tracks installation failures.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file lives alongside the install DB.
|
||||
|
||||
``n`` is the sys.maxsize-bit prefix of the associated DAG hash to make
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a failure lock
|
||||
exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
locked = self.lock_taken(spec)
|
||||
if locked and not force:
|
||||
tty.msg(f"Retaining failure marking for {spec.name} due to lock")
|
||||
return
|
||||
|
||||
if locked:
|
||||
tty.warn(f"Removing failure marking despite lock for {spec.name}")
|
||||
|
||||
succeeded, lock = self.locker.clear(spec)
|
||||
if succeeded and lock is not None:
|
||||
lock.release_write()
|
||||
|
||||
if self.persistent_mark(spec):
|
||||
path = self._path(spec)
|
||||
tty.debug(f"Removing failure marking for {spec.name}")
|
||||
try:
|
||||
path.unlink()
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
f"Unable to remove failure marking for {spec.name} ({str(path)}): {str(err)}"
|
||||
)
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
self.locker.clear_all(
|
||||
clear_fn=lambda x: x.release_write() if x.is_write_locked() else True
|
||||
)
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
if not self.locker.has_lock(spec):
|
||||
try:
|
||||
mark = self.locker.lock(spec)
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(f"PID {os.getpid()} failed to mark install failure for {spec.name}")
|
||||
tty.warn(f"Unable to mark {spec.name} as failed.")
|
||||
|
||||
return self.locker.lock(spec)
|
||||
|
||||
def has_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the spec is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if self.locker.has_lock(spec):
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.lock_taken(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.persistent_mark(spec)
|
||||
|
||||
def lock_taken(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if another process has a failure lock on the spec."""
|
||||
check = self.locker.raw_lock(spec)
|
||||
return check.is_write_locked()
|
||||
|
||||
def persistent_mark(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return self._path(spec).exists()
|
||||
|
||||
def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
assert spec.concrete, "concrete spec required for failure path"
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class Database:
|
||||
#: Per-process lock objects for each install prefix
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Per-process failure (lock) objects for each install prefix
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
|
||||
@@ -392,24 +612,10 @@ def __init__(
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# This is for other classes to use to lock prefix directories.
|
||||
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
|
||||
|
||||
# Ensure a persistent location for dealing with parallel installation
|
||||
# failures (e.g., across near-concurrent processes).
|
||||
self._failure_dir = os.path.join(self.database_directory, "failures")
|
||||
|
||||
# Support special locks for handling parallel installation failures
|
||||
# of a spec.
|
||||
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
# Failed write transactions (interrupted by exceptions) will alert
|
||||
@@ -423,15 +629,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
self.package_lock_timeout = lock_cfg.package_timeout
|
||||
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
timeout_format_str = (
|
||||
"{0}s".format(str(self.package_lock_timeout))
|
||||
if self.package_lock_timeout
|
||||
else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -471,212 +669,6 @@ def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
if not spec.concrete:
|
||||
raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
for pkg_id in list(self._prefix_failures.keys()):
|
||||
lock = self._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
# Remove all failure markings (aka files)
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
for fail_mark in os.listdir(self._failure_dir):
|
||||
try:
|
||||
os.remove(os.path.join(self._failure_dir, fail_mark))
|
||||
except OSError as exc:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
|
||||
)
|
||||
|
||||
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""
|
||||
Remove any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark_failed()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a prefix failure
|
||||
lock exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
failure_locked = self.prefix_failure_locked(spec)
|
||||
if failure_locked and not force:
|
||||
tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
|
||||
return
|
||||
|
||||
if failure_locked:
|
||||
tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
|
||||
|
||||
lock = self._prefix_failures.pop(spec.prefix, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
if self.prefix_failure_marked(spec):
|
||||
try:
|
||||
path = self._failed_spec_path(spec)
|
||||
tty.debug("Removing failure marking for {0}".format(spec.name))
|
||||
os.remove(path)
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking for {0} ({1}): {2}".format(
|
||||
spec.name, path, str(err)
|
||||
)
|
||||
)
|
||||
|
||||
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""
|
||||
Mark a spec as failing to install.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
|
||||
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
|
||||
associated DAG hash to make the likelihood of collision very low with
|
||||
no cleanup required.
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._failed_spec_path(spec)
|
||||
with open(path, "w") as f:
|
||||
spec.to_json(f)
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
err = "Unable to mark {0.name} as failed."
|
||||
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_failures:
|
||||
mark = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
try:
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(
|
||||
"PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
|
||||
)
|
||||
tty.warn(err.format(spec))
|
||||
|
||||
# Whether we or another process marked it as a failure, track it
|
||||
# as such locally.
|
||||
self._prefix_failures[prefix] = mark
|
||||
|
||||
return self._prefix_failures[prefix]
|
||||
|
||||
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the prefix (installation) is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if spec.prefix in self._prefix_failures:
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.prefix_failure_locked(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.prefix_failure_marked(spec)
|
||||
|
||||
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if a process has a failure lock on the spec."""
|
||||
check = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
return check.is_write_locked()
|
||||
|
||||
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return os.path.exists(self._failed_spec_path(spec))
|
||||
|
||||
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Get a lock on a particular spec's installation directory.
|
||||
|
||||
NOTE: The installation directory **does not** need to exist.
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
likelihood of collision is very low AND it gives us
|
||||
readers-writer lock semantics with just a single lockfile, so no
|
||||
cleanup required.
|
||||
"""
|
||||
timeout = timeout or self.package_lock_timeout
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_locks:
|
||||
self._prefix_locks[prefix] = lk.Lock(
|
||||
self.prefix_lock_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
elif timeout != self._prefix_locks[prefix].default_timeout:
|
||||
self._prefix_locks[prefix].default_timeout = timeout
|
||||
|
||||
return self._prefix_locks[prefix]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_read_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_read()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_read()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_read()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_write_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_write()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_write()
|
||||
|
||||
def _write_to_file(self, stream):
|
||||
"""Write out the database in JSON format to the stream passed
|
||||
as argument.
|
||||
|
||||
@@ -3,12 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_executable, by_library, executables_in_path
|
||||
from .path import by_path, executables_in_path
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_library",
|
||||
"by_executable",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
|
||||
@@ -13,13 +13,13 @@
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
"""
|
||||
import collections
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
@@ -29,12 +29,28 @@
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.windows_registry
|
||||
|
||||
#: Information on a package that has been detected
|
||||
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml():
|
||||
"""Return all the specs mentioned as externals in packages.yaml"""
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
already_defined_specs = set()
|
||||
for pkg_name, package_configuration in packages_yaml.items():
|
||||
@@ -43,7 +59,12 @@ def _externals_in_packages_yaml():
|
||||
return already_defined_specs
|
||||
|
||||
|
||||
def _pkg_config_dict(external_pkg_entries):
|
||||
ExternalEntryType = Union[str, Dict[str, str]]
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
This does not generate the entire packages.yaml. For example, given some
|
||||
@@ -65,7 +86,10 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
if not _spec_is_valid(e.spec):
|
||||
continue
|
||||
|
||||
external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
|
||||
@@ -83,15 +107,14 @@ def _pkg_config_dict(external_pkg_entries):
|
||||
return pkg_dict
|
||||
|
||||
|
||||
def _spec_is_valid(spec):
|
||||
def _spec_is_valid(spec: spack.spec.Spec) -> bool:
|
||||
try:
|
||||
str(spec)
|
||||
except spack.error.SpackError:
|
||||
# It is assumed here that we can at least extract the package name from
|
||||
# the spec so we can look up the implementation of
|
||||
# determine_spec_details
|
||||
msg = "Constructed spec for {0} does not have a string representation"
|
||||
llnl.util.tty.warn(msg.format(spec.name))
|
||||
# It is assumed here that we can at least extract the package name from the spec so we
|
||||
# can look up the implementation of determine_spec_details
|
||||
msg = f"Constructed spec for {spec.name} does not have a string representation"
|
||||
llnl.util.tty.warn(msg)
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -106,7 +129,7 @@ def _spec_is_valid(spec):
|
||||
return True
|
||||
|
||||
|
||||
def path_to_dict(search_paths):
|
||||
def path_to_dict(search_paths: List[str]):
|
||||
"""Return dictionary[fullpath]: basename from list of paths"""
|
||||
path_to_lib = {}
|
||||
# Reverse order of search directories so that a lib in the first
|
||||
@@ -124,7 +147,7 @@ def path_to_dict(search_paths):
|
||||
return path_to_lib
|
||||
|
||||
|
||||
def is_executable(file_path):
|
||||
def is_executable(file_path: str) -> bool:
|
||||
"""Return True if the path passed as argument is that of an executable"""
|
||||
return os.path.isfile(file_path) and os.access(file_path, os.X_OK)
|
||||
|
||||
@@ -146,7 +169,7 @@ def _convert_to_iterable(single_val_or_multiple):
|
||||
return [x]
|
||||
|
||||
|
||||
def executable_prefix(executable_dir):
|
||||
def executable_prefix(executable_dir: str) -> str:
|
||||
"""Given a directory where an executable is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
@@ -167,12 +190,12 @@ def executable_prefix(executable_dir):
|
||||
return os.sep.join(components[:idx])
|
||||
|
||||
|
||||
def library_prefix(library_dir):
|
||||
"""Given a directory where an library is found, guess the prefix
|
||||
def library_prefix(library_dir: str) -> str:
|
||||
"""Given a directory where a library is found, guess the prefix
|
||||
(i.e. the "root" directory of that installation) and return it.
|
||||
|
||||
Args:
|
||||
library_dir: directory where an library is found
|
||||
library_dir: directory where a library is found
|
||||
"""
|
||||
# Given a prefix where an library is found, assuming that prefix
|
||||
# contains /lib/ or /lib64/, strip off the 'lib' or 'lib64' directory
|
||||
@@ -195,13 +218,17 @@ def library_prefix(library_dir):
|
||||
return library_dir
|
||||
|
||||
|
||||
def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages (list): list of DetectedPackage objects to be added
|
||||
scope (str): configuration scope where to add the detected packages
|
||||
buildable (bool): whether the detected packages are buildable or not
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
@@ -209,7 +236,10 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
@@ -222,16 +252,19 @@ def update_configuration(detected_packages, scope=None, buildable=True):
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def _windows_drive():
|
||||
"""Return Windows drive string extracted from PROGRAMFILES
|
||||
env var, which is garunteed to be defined for all logins"""
|
||||
drive = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"]).group(1)
|
||||
return drive
|
||||
def _windows_drive() -> str:
|
||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||
which is guaranteed to be defined for all logins.
|
||||
"""
|
||||
match = re.match(r"([a-zA-Z]:)", os.environ["PROGRAMFILES"])
|
||||
if match is None:
|
||||
raise RuntimeError("cannot read the PROGRAMFILES environment variable")
|
||||
return match.group(1)
|
||||
|
||||
|
||||
class WindowsCompilerExternalPaths:
|
||||
@staticmethod
|
||||
def find_windows_compiler_root_paths():
|
||||
def find_windows_compiler_root_paths() -> List[str]:
|
||||
"""Helper for Windows compiler installation root discovery
|
||||
|
||||
At the moment simply returns location of VS install paths from VSWhere
|
||||
@@ -239,7 +272,7 @@ def find_windows_compiler_root_paths():
|
||||
return list(winOs.WindowsOs.vs_install_paths)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_cmake_paths():
|
||||
def find_windows_compiler_cmake_paths() -> List[str]:
|
||||
"""Semi hard-coded search path for cmake bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(
|
||||
@@ -249,7 +282,7 @@ def find_windows_compiler_cmake_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_ninja_paths():
|
||||
def find_windows_compiler_ninja_paths() -> List[str]:
|
||||
"""Semi hard-coded search heuristic for locating ninja bundled with MSVC"""
|
||||
return [
|
||||
os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
|
||||
@@ -257,7 +290,7 @@ def find_windows_compiler_ninja_paths():
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def find_windows_compiler_bundled_packages():
|
||||
def find_windows_compiler_bundled_packages() -> List[str]:
|
||||
"""Return all MSVC compiler bundled packages"""
|
||||
return (
|
||||
WindowsCompilerExternalPaths.find_windows_compiler_cmake_paths()
|
||||
@@ -266,14 +299,15 @@ def find_windows_compiler_bundled_packages():
|
||||
|
||||
|
||||
class WindowsKitExternalPaths:
|
||||
plat_major_ver = None
|
||||
if sys.platform == "win32":
|
||||
plat_major_ver = str(winOs.windows_version()[0])
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_roots():
|
||||
def find_windows_kit_roots() -> Optional[str]:
|
||||
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
return None
|
||||
program_files = os.environ["PROGRAMFILES(x86)"]
|
||||
kit_base = os.path.join(
|
||||
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
|
||||
@@ -281,21 +315,23 @@ def find_windows_kit_roots():
|
||||
return kit_base
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_bin_paths(kit_base=None):
|
||||
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit bin directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_bin = os.path.join(kit_base, "bin")
|
||||
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_lib_paths(kit_base=None):
|
||||
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
|
||||
"""Returns Windows kit lib directory per version"""
|
||||
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
|
||||
assert kit_base is not None, "unexpected value for kit_base"
|
||||
kit_lib = os.path.join(kit_base, "Lib")
|
||||
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
|
||||
|
||||
@staticmethod
|
||||
def find_windows_driver_development_kit_paths():
|
||||
def find_windows_driver_development_kit_paths() -> List[str]:
|
||||
"""Provides a list of all installation paths
|
||||
for the WDK by version and architecture
|
||||
"""
|
||||
@@ -303,7 +339,7 @@ def find_windows_driver_development_kit_paths():
|
||||
return WindowsKitExternalPaths.find_windows_kit_lib_paths(wdk_content_root)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_installed_roots_paths():
|
||||
def find_windows_kit_reg_installed_roots_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
|
||||
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
|
||||
@@ -316,7 +352,7 @@ def find_windows_kit_reg_installed_roots_paths():
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def find_windows_kit_reg_sdk_paths():
|
||||
def find_windows_kit_reg_sdk_paths() -> List[str]:
|
||||
reg = spack.util.windows_registry.WindowsRegistryView(
|
||||
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
|
||||
% WindowsKitExternalPaths.plat_major_ver,
|
||||
@@ -330,7 +366,7 @@ def find_windows_kit_reg_sdk_paths():
|
||||
)
|
||||
|
||||
|
||||
def find_win32_additional_install_paths():
|
||||
def find_win32_additional_install_paths() -> List[str]:
|
||||
"""Not all programs on Windows live on the PATH
|
||||
Return a list of other potential install locations.
|
||||
"""
|
||||
@@ -357,13 +393,12 @@ def find_win32_additional_install_paths():
|
||||
return windows_search_ext
|
||||
|
||||
|
||||
def compute_windows_program_path_for_package(pkg):
|
||||
"""Given a package, attempt to compute its Windows
|
||||
program files location, return list of best guesses
|
||||
def compute_windows_program_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package, attempts to compute its Windows program files location,
|
||||
and returns the list of best guesses.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package for which
|
||||
Program Files location is to be computed
|
||||
pkg: package for which Program Files location is to be computed
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
return []
|
||||
@@ -378,7 +413,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
]
|
||||
|
||||
|
||||
def compute_windows_user_path_for_package(pkg):
|
||||
def compute_windows_user_path_for_package(pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Given a package attempt to compute its user scoped
|
||||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
|
||||
@@ -6,11 +6,13 @@
|
||||
and running executables.
|
||||
"""
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
@@ -18,7 +20,7 @@
|
||||
import spack.util.environment
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import ( # find_windows_compiler_bundled_packages,
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
@@ -31,8 +33,13 @@
|
||||
path_to_dict,
|
||||
)
|
||||
|
||||
#: Timeout used for package detection (seconds)
|
||||
DETECTION_TIMEOUT = 60
|
||||
if sys.platform == "win32":
|
||||
DETECTION_TIMEOUT = 120
|
||||
|
||||
def common_windows_package_paths():
|
||||
|
||||
def common_windows_package_paths() -> List[str]:
|
||||
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
|
||||
paths.extend(find_win32_additional_install_paths())
|
||||
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
|
||||
@@ -41,7 +48,7 @@ def common_windows_package_paths():
|
||||
return paths
|
||||
|
||||
|
||||
def executables_in_path(path_hints):
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
"""Get the paths of all executables available from the current PATH.
|
||||
|
||||
For convenience, this is constructed as a dictionary where the keys are
|
||||
@@ -52,7 +59,7 @@ def executables_in_path(path_hints):
|
||||
assumed there are two different instances of the executable.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
@@ -61,7 +68,9 @@ def executables_in_path(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
def libraries_in_ld_and_system_library_path(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
) -> Dict[str, str]:
|
||||
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
|
||||
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
|
||||
standard system library paths.
|
||||
@@ -74,7 +83,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
assumed there are two different instances of the library.
|
||||
|
||||
Args:
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
path_hints: list of paths to be searched. If None the list will be
|
||||
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
|
||||
variables as well as the standard system library paths.
|
||||
@@ -90,7 +99,7 @@ def libraries_in_ld_and_system_library_path(path_hints=None):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def libraries_in_windows_paths(path_hints):
|
||||
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
|
||||
path_hints.extend(spack.util.environment.get_path("PATH"))
|
||||
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
|
||||
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
|
||||
@@ -106,218 +115,250 @@ def libraries_in_windows_paths(path_hints):
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths):
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
return groups.items()
|
||||
return groups
|
||||
|
||||
|
||||
# TODO consolidate this with by_executable
|
||||
# Packages should be able to define both .libraries and .executables in the future
|
||||
# determine_spec_details should get all relevant libraries and executables in one call
|
||||
def by_library(packages_to_check, path_hints=None):
|
||||
# Techniques for finding libraries is determined on a per recipe basis in
|
||||
# the determine_version class method. Some packages will extract the
|
||||
# version number from a shared libraries filename.
|
||||
# Other libraries could use the strings function to extract it as described
|
||||
# in https://unix.stackexchange.com/questions/58846/viewing-linux-library-executable-version-info
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by LD_LIBRARY_PATH, LIBRARY_PATH, DYLD_LIBRARY_PATH,
|
||||
DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths.
|
||||
class Finder:
|
||||
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of packages to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH environment variables
|
||||
and standard system library paths.
|
||||
"""
|
||||
# If no path hints from command line, intialize to empty list so
|
||||
# we can add default hints on a per package basis
|
||||
path_hints = [] if path_hints is None else path_hints
|
||||
def path_hints(
|
||||
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
|
||||
) -> List[str]:
|
||||
"""Returns the list of paths to be searched.
|
||||
|
||||
lib_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "libraries"):
|
||||
for lib in pkg.libraries:
|
||||
lib_pattern_to_pkgs[lib].append(pkg)
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
Args:
|
||||
pkg: package being detected
|
||||
initial_guess: initial list of paths from caller
|
||||
"""
|
||||
result = initial_guess or []
|
||||
result.extend(compute_windows_user_path_for_package(pkg))
|
||||
result.extend(compute_windows_program_path_for_package(pkg))
|
||||
return result
|
||||
|
||||
path_to_lib_name = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=path_hints)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(path_hints)
|
||||
)
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
pkg_to_found_libs = collections.defaultdict(set)
|
||||
for lib_pattern, pkgs in lib_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(lib_pattern)
|
||||
for path, lib in path_to_lib_name.items():
|
||||
if compiled_re.search(lib):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_libs[pkg].add(path)
|
||||
Args:
|
||||
pkg: package being detected
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> lib found for the spec
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
"""Returns a list of candidate files found on the system.
|
||||
|
||||
for pkg, libs in pkg_to_found_libs.items():
|
||||
Args:
|
||||
patterns: search patterns to be used for matching files
|
||||
paths: paths where to search for files
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
"""Given a path where a file was found, returns the corresponding prefix.
|
||||
|
||||
Args:
|
||||
path: path of a detected file
|
||||
"""
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg: package being detected
|
||||
paths: files matching the package search patterns
|
||||
"""
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
warnings.warn(
|
||||
f"{pkg.name} must define 'determine_spec_details' in order"
|
||||
f" for Spack to detect externally-provided instances"
|
||||
f" of the package."
|
||||
)
|
||||
continue
|
||||
return []
|
||||
|
||||
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following libraries in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
|
||||
)
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = library_prefix(prefix)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no lib/ or lib64/ dir found in {0}. Cannot "
|
||||
"add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Libraries in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
|
||||
|
||||
def by_executable(packages_to_check, path_hints=None):
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_check (list): list of package classes to be detected
|
||||
path_hints (list): list of paths to be searched. If None the list will be
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
path_hints = spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
exe_pattern_to_pkgs = collections.defaultdict(list)
|
||||
for pkg in packages_to_check:
|
||||
if hasattr(pkg, "executables"):
|
||||
for exe in pkg.platform_executables():
|
||||
exe_pattern_to_pkgs[exe].append(pkg)
|
||||
# Add Windows specific, package related paths to the search paths
|
||||
path_hints.extend(compute_windows_user_path_for_package(pkg))
|
||||
path_hints.extend(compute_windows_program_path_for_package(pkg))
|
||||
|
||||
path_to_exe_name = executables_in_path(path_hints=path_hints)
|
||||
pkg_to_found_exes = collections.defaultdict(set)
|
||||
for exe_pattern, pkgs in exe_pattern_to_pkgs.items():
|
||||
compiled_re = re.compile(exe_pattern)
|
||||
for path, exe in path_to_exe_name.items():
|
||||
if compiled_re.search(exe):
|
||||
for pkg in pkgs:
|
||||
pkg_to_found_exes[pkg].add(path)
|
||||
|
||||
pkg_to_entries = collections.defaultdict(list)
|
||||
resolved_specs = {} # spec -> exe found for the spec
|
||||
|
||||
for pkg, exes in pkg_to_found_exes.items():
|
||||
if not hasattr(pkg, "determine_spec_details"):
|
||||
llnl.util.tty.warn(
|
||||
"{0} must define 'determine_spec_details' in order"
|
||||
" for Spack to detect externally-provided instances"
|
||||
" of the package.".format(pkg.name)
|
||||
)
|
||||
continue
|
||||
|
||||
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
try:
|
||||
specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(candidate_path, items_in_prefix)
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
"The following executables in {0} were decidedly not "
|
||||
"part of the package {1}: {2}".format(
|
||||
prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
|
||||
)
|
||||
warnings.warn(
|
||||
f'error detecting "{pkg.name}" from prefix {candidate_path} [{str(e)}]'
|
||||
)
|
||||
|
||||
for spec in specs:
|
||||
pkg_prefix = executable_prefix(prefix)
|
||||
if not specs:
|
||||
files = ", ".join(_convert_to_iterable(items_in_prefix))
|
||||
llnl.util.tty.debug(
|
||||
f"The following files in {candidate_path} were decidedly not "
|
||||
f"part of the package {pkg.name}: {files}"
|
||||
)
|
||||
|
||||
if not pkg_prefix:
|
||||
msg = "no bin/ dir found in {0}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg.format(prefix))
|
||||
resolved_specs: Dict[spack.spec.Spec, str] = {} # spec -> exe found for the spec
|
||||
for spec in specs:
|
||||
prefix = self.prefix_from_path(path=candidate_path)
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
if spec in resolved_specs:
|
||||
prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
|
||||
|
||||
llnl.util.tty.debug(
|
||||
"Executables in {0} and {1} are both associated"
|
||||
" with the same spec {2}".format(prefix, prior_prefix, str(spec))
|
||||
f"Files in {candidate_path} and {prior_prefix} are both associated"
|
||||
f" with the same spec {str(spec)}"
|
||||
)
|
||||
continue
|
||||
else:
|
||||
resolved_specs[spec] = prefix
|
||||
|
||||
resolved_specs[spec] = candidate_path
|
||||
try:
|
||||
spec.validate_detection()
|
||||
except Exception as e:
|
||||
msg = (
|
||||
'"{0}" has been detected on the system but will '
|
||||
"not be added to packages.yaml [reason={1}]"
|
||||
f'"{spec}" has been detected on the system but will '
|
||||
f"not be added to packages.yaml [reason={str(e)}]"
|
||||
)
|
||||
llnl.util.tty.warn(msg.format(spec, str(e)))
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
pkg_prefix = spec.external_path
|
||||
prefix = spec.external_path
|
||||
|
||||
pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
|
||||
return pkg_to_entries
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
|
||||
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
|
||||
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
|
||||
return result
|
||||
|
||||
|
||||
class ExecutablesFinder(Finder):
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
if not result:
|
||||
msg = f"no bin/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
class LibrariesFinder(Finder):
|
||||
"""Finds libraries on the system, searching by LD_LIBRARY_PATH, LIBRARY_PATH,
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
return result
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
libraries_by_path = (
|
||||
libraries_in_ld_and_system_library_path(path_hints=paths)
|
||||
if sys.platform != "win32"
|
||||
else libraries_in_windows_paths(paths)
|
||||
)
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in libraries_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return result
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = library_prefix(path)
|
||||
if not result:
|
||||
msg = f"no lib/ or lib64/ dir found in {path}. Cannot add it as a Spack package"
|
||||
llnl.util.tty.debug(msg)
|
||||
return result
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
"""Return the list of packages that have been detected on the system,
|
||||
searching by path.
|
||||
|
||||
Args:
|
||||
packages_to_search: list of package classes to be detected
|
||||
path_hints: initial list of paths to be searched
|
||||
"""
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
|
||||
executables_path_guess = (
|
||||
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
|
||||
)
|
||||
libraries_path_guess = [] if path_hints is None else path_hints
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
for pkg_name, futures in detected_specs_by_package.items():
|
||||
for future in futures:
|
||||
try:
|
||||
detected = future.result(timeout=DETECTION_TIMEOUT)
|
||||
if detected:
|
||||
result[pkg_name].extend(detected)
|
||||
except Exception:
|
||||
llnl.util.tty.debug(
|
||||
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -33,7 +33,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Optional, Set, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -42,6 +42,7 @@ class OpenMpi(Package):
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
@@ -407,10 +408,7 @@ def version(
|
||||
|
||||
def _execute_version(pkg, ver, **kwargs):
|
||||
if (
|
||||
any(
|
||||
s in kwargs
|
||||
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
|
||||
)
|
||||
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||
and hasattr(pkg, "has_code")
|
||||
and not pkg.has_code
|
||||
):
|
||||
@@ -520,7 +518,8 @@ def _execute_conflicts(pkg):
|
||||
|
||||
# Save in a list the conflicts and the associated custom messages
|
||||
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
|
||||
when_spec_list.append((when_spec, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, msg_with_name))
|
||||
|
||||
return _execute_conflicts
|
||||
|
||||
@@ -663,39 +662,35 @@ def _execute_patch(pkg_or_dep):
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name,
|
||||
default=None,
|
||||
description="",
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None,
|
||||
sticky=False,
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
validator: Optional[Callable[[str, str, Tuple[Any, ...]], None]] = None,
|
||||
when: Optional[Union[str, bool]] = None,
|
||||
sticky: bool = False,
|
||||
):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
"""Define a variant for the package.
|
||||
|
||||
Packager can specify a default value as well as a text description.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant
|
||||
default (str or bool): default value for the variant, if not
|
||||
specified otherwise the default will be False for a boolean
|
||||
variant and 'nothing' for a multi-valued variant
|
||||
description (str): description of the purpose of the variant
|
||||
values (tuple or typing.Callable): either a tuple of strings containing the
|
||||
allowed values, or a callable accepting one value and returning
|
||||
True if it is valid
|
||||
multi (bool): if False only one value per spec is allowed for
|
||||
this variant
|
||||
validator (typing.Callable): optional group validator to enforce additional
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
sticky (bool): the variant should not be changed by the concretizer to
|
||||
find a valid concrete spec.
|
||||
name: Name of the variant
|
||||
default: Default value for the variant, if not specified otherwise the default will be
|
||||
False for a boolean variant and 'nothing' for a multi-valued variant
|
||||
description: Description of the purpose of the variant
|
||||
values: Either a tuple of strings containing the allowed values, or a callable accepting
|
||||
one value and returning True if it is valid
|
||||
multi: If False only one value per spec is allowed for this variant
|
||||
validator: Optional group validator to enforce additional logic. It receives the package
|
||||
name, the variant name and a tuple of values and should raise an instance of SpackError
|
||||
if the group doesn't meet the additional constraints
|
||||
when: Optional condition on which the variant applies
|
||||
sticky: The variant should not be changed by the concretizer to find a valid concrete spec
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
def format_error(msg, pkg):
|
||||
@@ -763,7 +758,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
@@ -900,7 +895,8 @@ def _execute_requires(pkg):
|
||||
|
||||
# Save in a list the requirements and the associated custom messages
|
||||
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
|
||||
when_spec_list.append((when_spec, policy, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, policy, msg_with_name))
|
||||
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user