Compare commits
428 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 | ||
|
|
1b1ed1b1fa | ||
|
|
ec0e51316b | ||
|
|
533821e46f | ||
|
|
6c5d125cb0 | ||
|
|
668fb1201f | ||
|
|
f7918fd8ab | ||
|
|
fc1996e0fa | ||
|
|
ed3aaafd73 | ||
|
|
63bb2c9bad | ||
|
|
a67455707a | ||
|
|
09ca71dbe0 | ||
|
|
ea082539e4 | ||
|
|
143146f4f3 | ||
|
|
ee6ae402aa | ||
|
|
0b26b26821 | ||
|
|
c764f9b1ab | ||
|
|
db19d83ea7 | ||
|
|
24256be6d6 | ||
|
|
633723236e | ||
|
|
381f31e69e | ||
|
|
9438cac219 | ||
|
|
85cf66f650 | ||
|
|
f3c080e546 | ||
|
|
37634f8b08 | ||
|
|
2ae8bbce9e | ||
|
|
b8bfaf65bf | ||
|
|
7968cb7fa2 | ||
|
|
ebc2efdfd2 | ||
|
|
ff07fd5ccb | ||
|
|
3f83ef6566 | ||
|
|
554ce7f063 | ||
|
|
23963779f4 | ||
|
|
45c5af10c3 | ||
|
|
532a37e7ba | ||
|
|
aeb9a92845 | ||
|
|
a3c7ad7669 | ||
|
|
b99288dcae | ||
|
|
01b7cc5106 | ||
|
|
f5888d8127 | ||
|
|
77c838ca93 | ||
|
|
11e538d962 | ||
|
|
7d444038ee | ||
|
|
c24471834b | ||
|
|
b1e33ae37b | ||
|
|
c36617f9da | ||
|
|
deadb64206 | ||
|
|
9eaa88e467 | ||
|
|
bd58801415 | ||
|
|
548a9de671 | ||
|
|
8e7c53a8ba | ||
|
|
5e630174a1 | ||
|
|
175a65dfba | ||
|
|
39d4c402d5 | ||
|
|
e51748ee8f | ||
|
|
f9457fa80b | ||
|
|
4cc2ca3e2e | ||
|
|
3843001004 | ||
|
|
e24bb5dd1c | ||
|
|
f6013114eb | ||
|
|
bdca875eb3 | ||
|
|
af8c392de2 | ||
|
|
9aa3b4619b | ||
|
|
3d733da70a | ||
|
|
cda99b792c | ||
|
|
9834bad82e | ||
|
|
3453259c98 | ||
|
|
ee243b84eb | ||
|
|
5080e2cb45 | ||
|
|
f42ef7aea7 | ||
|
|
41793673d9 | ||
|
|
8b6a6982ee | ||
|
|
ee74ca6391 | ||
|
|
7165e70186 | ||
|
|
97d632a161 | ||
|
|
571919992d | ||
|
|
99112ad2ad | ||
|
|
75c70c395d | ||
|
|
960bdfe612 | ||
|
|
97892bda18 | ||
|
|
cead6ef98d | ||
|
|
5d70c0f100 | ||
|
|
361632fc4b | ||
|
|
6576655137 | ||
|
|
feb26efecd | ||
|
|
4752d1cde3 | ||
|
|
a07afa6e1a | ||
|
|
7327d2913a | ||
|
|
8f8a1f7f52 | ||
|
|
eb8d836e76 | ||
|
|
bad8495e16 | ||
|
|
43de7f4881 | ||
|
|
84585ac575 | ||
|
|
86f9d3865b | ||
|
|
834e7b2b0a | ||
|
|
c14f23ddaa | ||
|
|
49f3681a12 | ||
|
|
19e1d10cdf | ||
|
|
7caf2a512d | ||
|
|
1f6e3cc8cb | ||
|
|
169c4245e0 | ||
|
|
ee1982010f | ||
|
|
dd396c4a76 | ||
|
|
235802013d | ||
|
|
e1f07e98ae | ||
|
|
60aee6f535 | ||
|
|
2510dc9e6e | ||
|
|
5607dd259b | ||
|
|
7bd5d1fd3c | ||
|
|
f6104cc3cb | ||
|
|
b54d286b4a | ||
|
|
ea9c488897 | ||
|
|
ba1d295023 | ||
|
|
27f04b3544 | ||
|
|
8cd9497522 | ||
|
|
ef544a3b6d | ||
|
|
4eed832653 | ||
|
|
5996aaa4e3 | ||
|
|
4957607005 | ||
|
|
78bca131fb | ||
|
|
045c5cea53 | ||
|
|
ea256145d9 | ||
|
|
0b2098850c | ||
|
|
d2df0a29ce | ||
|
|
92e9daec9b | ||
|
|
d65437114a | ||
|
|
9a6e98e729 | ||
|
|
6515c16432 | ||
|
|
2826ab36f0 | ||
|
|
c035512930 | ||
|
|
cfadba47d3 | ||
|
|
95391dfe94 | ||
|
|
382ba99631 | ||
|
|
f8e25c79bf | ||
|
|
93b54b79d3 | ||
|
|
ff30efcebc | ||
|
|
54514682d4 | ||
|
|
92a75717f0 | ||
|
|
b9be8e883e | ||
|
|
da838a7d10 | ||
|
|
85e5fb9ab7 | ||
|
|
c0c300d773 | ||
|
|
6e933ac7df | ||
|
|
be679759be | ||
|
|
eace479b1e | ||
|
|
2069a42ba3 | ||
|
|
41d2161b5b | ||
|
|
ba936574fc | ||
|
|
c0d0603baa | ||
|
|
edbf12cfa8 | ||
|
|
11b3dac705 | ||
|
|
a7a5a994dc | ||
|
|
8a9a24ce1e | ||
|
|
f54974d66e | ||
|
|
0b4631a774 | ||
|
|
e7fa6d99bf | ||
|
|
03c0d74139 | ||
|
|
a14f4b5a02 | ||
|
|
3be565f49e | ||
|
|
df2938dfcf | ||
|
|
5d8482598b | ||
|
|
f079e7fc34 | ||
|
|
3369acc050 | ||
|
|
26f4fc0f34 | ||
|
|
8c0551c1c0 | ||
|
|
59866cdb11 | ||
|
|
8d2a32f66d | ||
|
|
b28ae67369 | ||
|
|
b8590fbd05 | ||
|
|
9343b9524f | ||
|
|
bb0cec1530 | ||
|
|
d4f41b51f4 | ||
|
|
347acf3cc6 | ||
|
|
65224ad6bc | ||
|
|
784d56ce05 | ||
|
|
b30523fdd8 | ||
|
|
b46e098696 | ||
|
|
20a7622602 | ||
|
|
d25f1059dd | ||
|
|
9394fa403e | ||
|
|
679c6a606d | ||
|
|
27f378601e | ||
|
|
832ddbdf6d | ||
|
|
0286455e1d | ||
|
|
4baf489460 | ||
|
|
56c7921430 | ||
|
|
c2288af55c | ||
|
|
39cd2f3754 | ||
|
|
a941ab4acb | ||
|
|
048cc711d6 | ||
|
|
63a5cf78ac | ||
|
|
bef03b9588 | ||
|
|
2859f0a7e1 | ||
|
|
c1b084d754 | ||
|
|
a8301709a8 | ||
|
|
ad0b70a64a | ||
|
|
b5444e4304 | ||
|
|
e1d5d34b56 | ||
|
|
e5b4607548 | ||
|
|
ecdd8e035c | ||
|
|
9d9e4a52f5 | ||
|
|
9e0629213c | ||
|
|
51fa4e5fc4 | ||
|
|
8908b7584e | ||
|
|
936c6045fc | ||
|
|
ca2e9cf090 | ||
|
|
288b3c3ec2 | ||
|
|
31bb259a12 | ||
|
|
a74dd96773 | ||
|
|
9594fb47e1 | ||
|
|
62cfe1ab47 | ||
|
|
8c417b3ccc | ||
|
|
52c0127fc7 | ||
|
|
2c74b433aa | ||
|
|
8bdfaf4ae5 | ||
|
|
eb19f59fb1 | ||
|
|
856834537a | ||
|
|
fba019f0be | ||
|
|
cc0ac7093b | ||
|
|
711d67090a | ||
|
|
5ce667de6a | ||
|
|
a77e6ea639 | ||
|
|
3a661803ce | ||
|
|
c6ed2227f2 | ||
|
|
bd9f8ba094 | ||
|
|
c7b849bdee | ||
|
|
3f4012cf44 | ||
|
|
e3b2e5b2cd | ||
|
|
b2ed10dd83 | ||
|
|
1c3dc0bd5f | ||
|
|
f28a2ccee2 | ||
|
|
12d86ffb6a | ||
|
|
fde1954c13 | ||
|
|
3ad65bbfc1 | ||
|
|
f017f586df | ||
|
|
399271832b | ||
|
|
4bcceddba9 | ||
|
|
0fff219aa4 | ||
|
|
ac3c0a4347 | ||
|
|
cc2fa9895e |
1
.github/workflows/unit_tests.yaml
vendored
1
.github/workflows/unit_tests.yaml
vendored
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
||||
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -49,6 +49,7 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -59,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -143,7 +143,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildache only <package>
|
||||
$ spack install --use-buildcache only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
|
||||
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends('python')
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -73,10 +73,10 @@ arguments to the configure phase, you can use:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ['--no-python-dbus']
|
||||
return ["--no-python-dbus"]
|
||||
|
||||
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -4773,17 +4773,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4927,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5147,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5226,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5254,7 +5253,7 @@ Saving build-time files
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5261,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5290,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5327,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5348,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5395,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5444,7 +5445,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5494,9 +5495,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5540,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -6071,7 +6075,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.1.0
|
||||
flake8==6.0.0
|
||||
mypy==1.4.1
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -217,13 +217,7 @@ file would live in the ``build_cache`` directory of a binary mirror::
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
},
|
||||
|
||||
"buildinfo": {
|
||||
"relative_prefix":
|
||||
"linux-ubuntu18.04-haswell/gcc-7.5.0/zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow",
|
||||
"relative_rpaths": false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
@@ -1754,9 +1754,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
||||
@@ -143,7 +143,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -244,7 +245,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs):
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -52,6 +51,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -875,32 +875,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -1208,9 +1194,17 @@ def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
|
||||
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
|
||||
if tarinfo.name.endswith("/.spack/binary_distribution"):
|
||||
return None
|
||||
|
||||
return deterministic_tarinfo(tarinfo)
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
@@ -1304,15 +1298,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
bchecksum["hash"] = checksum
|
||||
spec_dict["binary_cache_checksum"] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.STORE.layout.root)
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
@@ -1791,6 +1777,27 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1801,6 +1808,14 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
# Create the install prefix
|
||||
fsys.mkdirp(
|
||||
spec.prefix,
|
||||
mode=get_package_dir_permissions(spec),
|
||||
group=get_package_group(spec),
|
||||
default_perms="parents",
|
||||
)
|
||||
|
||||
specfile_path = download_result["specfile_stage"].save_filename
|
||||
|
||||
with open(specfile_path, "r") as inputfile:
|
||||
@@ -1854,42 +1869,23 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.STORE.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get("buildinfo", {})
|
||||
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
raise
|
||||
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix)
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
@@ -1902,12 +1898,29 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
@@ -2355,22 +2368,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1027,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1048,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1102,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1149,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1174,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1183,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1198,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1227,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
@@ -1256,9 +1275,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -162,17 +162,6 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
|
||||
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
|
||||
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -301,7 +301,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
||||
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qt", type="build", when="build_system=qmake")
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
||||
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
||||
@@ -7,13 +7,14 @@
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
from llnl.util.filesystem import find, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
@@ -39,9 +40,8 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
build_system("sip")
|
||||
|
||||
with when("build_system=sip"):
|
||||
extends("python")
|
||||
depends_on("qt")
|
||||
depends_on("py-sip")
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,13 +113,13 @@ class SIPBuilder(BaseBuilder):
|
||||
* install
|
||||
|
||||
The configure phase already adds a set of default flags. To see more
|
||||
options, run ``python configure.py --help``.
|
||||
options, run ``sip-build --help``.
|
||||
"""
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
@@ -130,34 +130,17 @@ class SIPBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
def configure_file(self):
|
||||
"""Returns the name of the configure file to use."""
|
||||
return "configure.py"
|
||||
build_directory = "build"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
configure = self.configure_file()
|
||||
|
||||
args = self.configure_args()
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
args.extend(
|
||||
[
|
||||
"--verbose",
|
||||
"--confirm-license",
|
||||
"--qmake",
|
||||
spec["qt"].prefix.bin.qmake,
|
||||
"--sip",
|
||||
spec["py-sip"].prefix.bin.sip,
|
||||
"--sip-incdir",
|
||||
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||
"--bindir",
|
||||
prefix.bin,
|
||||
"--destdir",
|
||||
inspect.getmodule(self.pkg).python_platlib,
|
||||
]
|
||||
)
|
||||
|
||||
self.pkg.python(configure, *args)
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
@@ -167,7 +150,8 @@ def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -177,21 +161,11 @@ def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def extend_path_setup(self):
|
||||
# See github issue #14121 and PR #15297
|
||||
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||
if module != "sip":
|
||||
module = module.split(".")[0]
|
||||
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||
f.write("from pkgutil import extend_path\n")
|
||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1278,6 +1278,7 @@ def main_script_replacements(cmd):
|
||||
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
|
||||
if remote_mirror_override:
|
||||
@@ -1287,9 +1288,6 @@ def main_script_replacements(cmd):
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Write out the file describing specs that should be copied
|
||||
copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
|
||||
@@ -1305,21 +1303,17 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
sorted_output = ci_opt.optimizer(sorted_output)
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1330,10 +1324,17 @@ def main_script_replacements(cmd):
|
||||
noop_job["script"] = [
|
||||
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||
]
|
||||
sorted_output = {"unsupported-copy": noop_job}
|
||||
output_object = {"unsupported-copy": noop_job}
|
||||
else:
|
||||
tty.debug("No specs to rebuild, generating no-op job")
|
||||
sorted_output = {"no-specs-to-rebuild": noop_job}
|
||||
output_object = {"no-specs-to-rebuild": noop_job}
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
@@ -1503,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
@@ -1689,7 +1690,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
|
||||
return True
|
||||
|
||||
|
||||
def reproduce_ci_job(url, work_dir):
|
||||
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
|
||||
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
|
||||
attempt to setup an environment in which the failure can be reproduced
|
||||
locally. This entails the following:
|
||||
@@ -1705,6 +1706,11 @@ def reproduce_ci_job(url, work_dir):
|
||||
work_dir = os.path.realpath(work_dir)
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
gpg_path = None
|
||||
if gpg_url:
|
||||
gpg_path = web_util.fetch_url_text(gpg_url, dest_dir=os.path.join(work_dir, "_pgp"))
|
||||
rel_gpg_path = gpg_path.replace(work_dir, "").lstrip(os.path.sep)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
@@ -1797,60 +1803,63 @@ def reproduce_ci_job(url, work_dir):
|
||||
# more faithful reproducer if everything appears to run in the same
|
||||
# absolute path used during the CI build.
|
||||
mount_as_dir = "/work"
|
||||
mounted_workdir = "/reproducer"
|
||||
if repro_details:
|
||||
mount_as_dir = repro_details["ci_project_dir"]
|
||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||
if gpg_path:
|
||||
mounted_gpg_path = os.path.join(mounted_workdir, rel_gpg_path)
|
||||
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
# the bind-mounted reproducer directory.
|
||||
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
# Regular expressions for parsing that HEAD commit. If the pipeline
|
||||
# was on the gitlab spack mirror, it will have been a merge commit made by
|
||||
# gitub and pushed by the sync script. If the pipeline was run on some
|
||||
# environment repo, then the tested spack commit will likely have been
|
||||
# a regular commit.
|
||||
commit_1 = None
|
||||
commit_2 = None
|
||||
commit_regex = re.compile(r"commit\s+([^\s]+)")
|
||||
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
|
||||
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
# Try the more specific merge commit regex first
|
||||
m = merge_commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
# This was a merge commit and we captured the parents
|
||||
commit_1 = m.group(1)
|
||||
commit_2 = m.group(2)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
# Not a merge commit, just get the commit sha
|
||||
m = commit_regex.search(spack_info)
|
||||
if m:
|
||||
commit_1 = m.group(1)
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
setup_result = False
|
||||
if commit_1:
|
||||
if commit_2:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
|
||||
else:
|
||||
setup_result = setup_spack_repro_version(work_dir, commit_1)
|
||||
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
if not setup_result:
|
||||
setup_msg = """
|
||||
This can happen if the spack you are using to run this command is not a git
|
||||
repo, or if it is a git repo, but it does not have the commits needed to
|
||||
recreate the tested merge commit. If you are trying to reproduce a spack
|
||||
PR pipeline job failure, try fetching the latest develop commits from
|
||||
mainline spack and make sure you have the most recent commit of the PR
|
||||
branch in your local spack repo. Then run this command again.
|
||||
Alternatively, you can also manually clone spack if you know the version
|
||||
you want to test.
|
||||
"""
|
||||
tty.error(
|
||||
"Failed to automatically setup the tested version of spack "
|
||||
"in your local reproduction directory."
|
||||
)
|
||||
print(setup_msg)
|
||||
|
||||
# In cases where CI build was run on a shell runner, it might be useful
|
||||
# to see what tags were applied to the job so the user knows what shell
|
||||
@@ -1861,45 +1870,92 @@ def reproduce_ci_job(url, work_dir):
|
||||
job_tags = job_yaml["tags"]
|
||||
tty.msg("Job ran with the following tags: {0}".format(job_tags))
|
||||
|
||||
inst_list = []
|
||||
entrypoint_script = [
|
||||
["git", "config", "--global", "--add", "safe.directory", mount_as_dir],
|
||||
[".", os.path.join(mount_as_dir if job_image else work_dir, "share/spack/setup-env.sh")],
|
||||
["spack", "gpg", "trust", mounted_gpg_path if job_image else gpg_path] if gpg_path else [],
|
||||
["spack", "env", "activate", mounted_env_dir if job_image else repro_dir],
|
||||
[os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script],
|
||||
]
|
||||
|
||||
inst_list = []
|
||||
# Finally, print out some instructions to reproduce the build
|
||||
if job_image:
|
||||
inst_list.append("\nRun the following command:\n\n")
|
||||
inst_list.append(
|
||||
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
|
||||
work_dir, mount_as_dir, job_image
|
||||
)
|
||||
# Allow interactive
|
||||
entrypoint_script.extend(
|
||||
[
|
||||
[
|
||||
"echo",
|
||||
"Re-run install script using:\n\t{0}".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh")
|
||||
if job_image
|
||||
else install_script
|
||||
),
|
||||
],
|
||||
# Allow interactive
|
||||
["exec", "$@"],
|
||||
]
|
||||
)
|
||||
inst_list.append("\nOnce inside the container:\n\n")
|
||||
process_command(
|
||||
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
|
||||
)
|
||||
|
||||
docker_command = [
|
||||
[
|
||||
runtime,
|
||||
"run",
|
||||
"-i",
|
||||
"-t",
|
||||
"--rm",
|
||||
"--name",
|
||||
"spack_reproducer",
|
||||
"-v",
|
||||
":".join([work_dir, mounted_workdir, "Z"]),
|
||||
"-v",
|
||||
":".join(
|
||||
[
|
||||
os.path.join(work_dir, "jobs_scratch_dir"),
|
||||
os.path.join(mount_as_dir, "jobs_scratch_dir"),
|
||||
"Z",
|
||||
]
|
||||
),
|
||||
"-v",
|
||||
":".join([os.path.join(work_dir, "spack"), mount_as_dir, "Z"]),
|
||||
"--entrypoint",
|
||||
os.path.join(mounted_workdir, "entrypoint.sh"),
|
||||
job_image,
|
||||
"bash",
|
||||
]
|
||||
]
|
||||
autostart = autostart and setup_result
|
||||
process_command("start", docker_command, work_dir, run=autostart)
|
||||
|
||||
if not autostart:
|
||||
inst_list.append("\nTo run the docker reproducer:\n\n")
|
||||
inst_list.extend(
|
||||
[
|
||||
" - Start the docker container install",
|
||||
" $ {0}/start.sh".format(work_dir),
|
||||
]
|
||||
)
|
||||
else:
|
||||
process_command("reproducer", entrypoint_script, work_dir, run=False)
|
||||
|
||||
inst_list.append("\nOnce on the tagged runner:\n\n")
|
||||
inst_list.extent(
|
||||
[" - Run the reproducer script", " $ {0}/reproducer.sh".format(work_dir)]
|
||||
)
|
||||
|
||||
if not setup_result:
|
||||
inst_list.append(" - Clone spack and acquire tested commit\n")
|
||||
inst_list.append("{0}".format(spack_info))
|
||||
spack_root = "<spack-clone-path>"
|
||||
else:
|
||||
spack_root = "{0}/spack".format(mount_as_dir)
|
||||
inst_list.append("\n - Clone spack and acquire tested commit")
|
||||
inst_list.append("\n {0}\n".format(spack_info))
|
||||
inst_list.append("\n")
|
||||
inst_list.append("\n Path to clone spack: {0}/spack\n\n".format(work_dir))
|
||||
|
||||
inst_list.append(" - Activate the environment\n\n")
|
||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||
inst_list.append(
|
||||
" $ spack env activate --without-view {0}\n\n".format(
|
||||
mounted_env_dir if job_image else repro_dir
|
||||
)
|
||||
)
|
||||
inst_list.append(" - Run the install script\n\n")
|
||||
inst_list.append(
|
||||
" $ {0}\n".format(
|
||||
os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script
|
||||
)
|
||||
)
|
||||
|
||||
print("".join(inst_list))
|
||||
tty.msg("".join(inst_list))
|
||||
|
||||
|
||||
def process_command(name, commands, repro_dir):
|
||||
def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
|
||||
"""
|
||||
Create a script for and run the command. Copy the script to the
|
||||
reproducibility directory.
|
||||
@@ -1909,6 +1965,7 @@ def process_command(name, commands, repro_dir):
|
||||
commands (list): list of arguments for single command or list of lists of
|
||||
arguments for multiple commands. No shell escape is performed.
|
||||
repro_dir (str): Job reproducibility directory
|
||||
run (bool): Run the script and return the exit code if True
|
||||
|
||||
Returns: the exit code from processing the command
|
||||
"""
|
||||
@@ -1927,7 +1984,8 @@ def process_command(name, commands, repro_dir):
|
||||
with open(script, "w") as fd:
|
||||
fd.write("#!/bin/sh\n\n")
|
||||
fd.write("\n# spack {0} command\n".format(name))
|
||||
fd.write("set -e\n")
|
||||
if exit_on_failure:
|
||||
fd.write("set -e\n")
|
||||
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
|
||||
fd.write("set -x\n")
|
||||
fd.write(full_command)
|
||||
@@ -1938,28 +1996,36 @@ def process_command(name, commands, repro_dir):
|
||||
|
||||
copy_path = os.path.join(repro_dir, script)
|
||||
shutil.copyfile(script, copy_path)
|
||||
st = os.stat(copy_path)
|
||||
os.chmod(copy_path, st.st_mode | stat.S_IEXEC)
|
||||
|
||||
# Run the generated install.sh shell script as if it were being run in
|
||||
# a login shell.
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
exit_code = None
|
||||
if run:
|
||||
try:
|
||||
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||
cmd_process.wait()
|
||||
exit_code = cmd_process.returncode
|
||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||
tty.error("Encountered error running {0} script".format(name))
|
||||
tty.error(err)
|
||||
exit_code = 1
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
else:
|
||||
# Delete the script, it is copied to the destination dir
|
||||
os.remove(script)
|
||||
|
||||
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||
return exit_code
|
||||
|
||||
|
||||
def create_buildcache(
|
||||
input_spec: spack.spec.Spec,
|
||||
*,
|
||||
pr_pipeline: bool,
|
||||
pipeline_mirror_url: Optional[str] = None,
|
||||
buildcache_mirror_url: Optional[str] = None,
|
||||
sign_binaries: bool = False,
|
||||
) -> List[PushResult]:
|
||||
"""Create the buildcache at the provided mirror(s).
|
||||
|
||||
@@ -1967,12 +2033,10 @@ def create_buildcache(
|
||||
input_spec: Installed spec to package and push
|
||||
buildcache_mirror_url: URL for the buildcache mirror
|
||||
pipeline_mirror_url: URL for the pipeline mirror
|
||||
pr_pipeline: True if the CI job is for a PR
|
||||
sign_binaries: Whether or not to sign buildcache entry
|
||||
|
||||
Returns: A list of PushResults, indicating success or failure.
|
||||
"""
|
||||
sign_binaries = pr_pipeline is False and can_sign_binaries()
|
||||
|
||||
results = []
|
||||
|
||||
# Create buildcache in either the main remote mirror, or in the
|
||||
|
||||
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
@@ -383,7 +383,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
deps (bool): Display dependencies with specs
|
||||
long (bool): Display short hashes with specs
|
||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||
namespace (bool): Print namespaces along with names
|
||||
namespaces (bool): Print namespaces along with names
|
||||
show_flags (bool): Show compiler flags with specs
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
@@ -407,7 +407,7 @@ def get_arg(name, default=None):
|
||||
paths = get_arg("paths", False)
|
||||
deps = get_arg("deps", False)
|
||||
hashes = get_arg("long", False)
|
||||
namespace = get_arg("namespace", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
@@ -428,7 +428,7 @@ def get_arg(name, default=None):
|
||||
|
||||
format_string = get_arg("format", None)
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespace else "{name}"
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
@@ -584,14 +584,14 @@ def require_active_env(cmd_name):
|
||||
|
||||
if env:
|
||||
return env
|
||||
else:
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
|
||||
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -68,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,6 +326,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
||||
@@ -2,12 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
@@ -18,7 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -28,7 +30,6 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
@@ -38,8 +39,8 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
setattr(setup_parser, "parser", subparser)
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -105,7 +111,7 @@ def setup_parser(subparser):
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
||||
listcache.add_argument(
|
||||
"-v",
|
||||
"--variants",
|
||||
@@ -149,23 +155,20 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="check single spec instead of release specs file"
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
check_spec_or_specfile.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
)
|
||||
|
||||
@@ -173,16 +176,19 @@ def setup_parser(subparser):
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="download built tarball for spec from mirror"
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"--spec-file",
|
||||
"-p",
|
||||
"--path",
|
||||
required=True,
|
||||
default=None,
|
||||
help="download built tarball for spec (from json or yaml file) from mirror",
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="path to directory where tarball should be downloaded"
|
||||
help="path to directory where tarball should be downloaded",
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -190,32 +196,32 @@ def setup_parser(subparser):
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="spec string for which buildcache name is desired"
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
)
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="path to spec json or yaml file for which buildcache name is desired",
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument("--root-spec", default=None, help="root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="path to json or yaml file containing root spec of dependent spec",
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
default=None,
|
||||
required=True,
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", default=None, help="path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
@@ -257,54 +263,24 @@ def setup_parser(subparser):
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs, spec_file):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if spec_file:
|
||||
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||
|
||||
if specs:
|
||||
constraints = spack.cmd.parse_specs(specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
)
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
@@ -315,7 +291,7 @@ def push_fn(args):
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
_matching_specs(args.specs, args.spec_file),
|
||||
specs,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -326,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -356,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -370,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -423,16 +417,21 @@ def preview_fn(args):
|
||||
def check_fn(args):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
either a single spec from --spec, or else the full set of release specs. this command uses the
|
||||
process exit code to indicate its result, specifically, if the exit code is non-zero, then at
|
||||
least one of the indicated specs needs to be rebuilt
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
|
||||
if not specs:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
@@ -462,26 +461,28 @@ def download_fn(args):
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
if not args.path:
|
||||
tty.msg("No download path provided, exiting")
|
||||
return
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not result:
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
@@ -491,29 +492,24 @@ def save_specfile_fn(args):
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specs:
|
||||
tty.msg("No dependent specs provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
tty.msg("No yaml directory provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = "json"
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,18 +4,21 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package_base import deprecated_version, preferred_version
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
@@ -31,35 +34,38 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version only",
|
||||
help="checksum the latest available version",
|
||||
)
|
||||
sp.add_argument(
|
||||
subparser.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the preferred version only",
|
||||
help="checksum the known Spack preferred version",
|
||||
)
|
||||
subparser.add_argument(
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
)
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
@@ -77,89 +83,174 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict = {}
|
||||
if not args.versions and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
else:
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn("Version {0} is deprecated".format(version))
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn(f"Version {version} is deprecated")
|
||||
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
url_dict = remote_versions
|
||||
|
||||
if not url_dict:
|
||||
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
print_checksum_status(pkg, version_hashes)
|
||||
sys.exit(0)
|
||||
|
||||
# convert dict into package.py version statements
|
||||
version_lines = get_version_lines(version_hashes, url_dict)
|
||||
print()
|
||||
print(version_lines)
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
# Make sure we also have a newline after the last version
|
||||
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||
versions.append("\n")
|
||||
# We need to insert the versions in reversed order
|
||||
versions.reverse()
|
||||
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||
version_line = None
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for i in range(len(lines)):
|
||||
# Black is drunk, so this is what it looks like for now
|
||||
# See https://github.com/psf/black/issues/2156 for more information
|
||||
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||
i
|
||||
].startswith(" version("):
|
||||
version_line = i
|
||||
break
|
||||
|
||||
if version_line is not None:
|
||||
for v in versions:
|
||||
lines.insert(version_line, v)
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
"""
|
||||
Verify checksums present in version_hashes against those present
|
||||
in the package's instructions.
|
||||
|
||||
with open(filename, "w") as f:
|
||||
f.writelines(lines)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
||||
|
||||
msg = "opening editor to verify"
|
||||
"""
|
||||
results = []
|
||||
num_verified = 0
|
||||
failed = False
|
||||
|
||||
if not sys.stdout.isatty():
|
||||
msg = "please verify"
|
||||
max_len = max(len(str(v)) for v in version_hashes)
|
||||
num_total = len(version_hashes)
|
||||
|
||||
tty.info(
|
||||
"Added {0} new versions to {1}, "
|
||||
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||
)
|
||||
for version, sha in version_hashes.items():
|
||||
if version not in pkg.versions:
|
||||
msg = "No previous checksum"
|
||||
status = "-"
|
||||
|
||||
elif sha == pkg.versions[version]["sha256"]:
|
||||
msg = "Correct"
|
||||
status = "="
|
||||
num_verified += 1
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
else:
|
||||
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||
msg = sha
|
||||
status = "x"
|
||||
failed = True
|
||||
|
||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
||||
|
||||
# Display table of checksum results.
|
||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
||||
|
||||
# Terminate at the end of function to prevent additional output.
|
||||
if failed:
|
||||
print()
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
# Split rendered version lines into tuple of (version, version_line)
|
||||
# We reverse sort here to make sure the versions match the version_lines
|
||||
new_versions = []
|
||||
for ver_line in version_lines.split("\n"):
|
||||
match = version_re.match(ver_line)
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -155,11 +156,27 @@ def setup_parser(subparser):
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
default="docker",
|
||||
choices=["docker", "podman"],
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
||||
)
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
@@ -270,6 +287,17 @@ def ci_rebuild(args):
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
gpg_util.list(False, True)
|
||||
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
@@ -394,11 +422,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -655,7 +678,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
pr_pipeline=spack_is_pr_pipeline,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
@@ -699,7 +722,7 @@ def ci_rebuild(args):
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -725,8 +748,18 @@ def ci_reproduce(args):
|
||||
"""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
elif args.gpg_url:
|
||||
gpg_key_url = args.gpg_url
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -114,22 +115,18 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg("Removing all temporary build stages")
|
||||
spack.stage.purge()
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.STORE.layout.root, ".tmp")
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug("Removing {0}".format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -36,13 +36,13 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -331,6 +331,17 @@ def tags():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def namespaces():
|
||||
return Args(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def jobs():
|
||||
return Args(
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
@@ -186,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
@@ -832,13 +833,15 @@ def get_versions(args, name):
|
||||
version = parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
@@ -912,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
|
||||
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
||||
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -133,9 +133,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +144,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
@@ -239,7 +239,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -67,7 +67,7 @@ def setup_parser(subparser):
|
||||
help="do not group specs by arch/compiler",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
@@ -140,9 +140,6 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N", "--namespace", action="store_true", help="show fully qualified package names"
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -230,7 +227,7 @@ def display_env(env, args, decorator, results):
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -271,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
||||
@@ -64,11 +64,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
||||
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -474,7 +473,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
||||
@@ -309,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -321,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -376,7 +377,7 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
||||
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
||||
@@ -42,7 +42,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -73,13 +73,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
@@ -144,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
@@ -67,13 +67,6 @@ def setup_parser(subparser):
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
@@ -84,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
||||
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -2,13 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers.clang
|
||||
@@ -119,108 +115,3 @@ def c23_flag(self):
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||
|
||||
On macOS, not all buildsystems support querying CC and CXX for the
|
||||
compilers to use and instead query the Xcode toolchain for what
|
||||
compiler to run. This side-steps the spack wrappers. In order to inject
|
||||
spack into this setup, we need to copy (a subset of) Xcode.app and
|
||||
replace the compiler executables with symlinks to the spack wrapper.
|
||||
Currently, the stage is used to store the Xcode.app copies. We then set
|
||||
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
|
||||
related tools to use this Xcode.app.
|
||||
"""
|
||||
super().setup_custom_environment(pkg, env)
|
||||
|
||||
if not pkg.use_xcode:
|
||||
# if we do it for all packages, we get into big troubles with MPI:
|
||||
# filter_compilers(self) will use mockup XCode compilers on macOS
|
||||
# with Clang. Those point to Spack's compiler wrappers and
|
||||
# consequently render MPI non-functional outside of Spack.
|
||||
return
|
||||
|
||||
# Use special XCode versions of compiler wrappers when using XCode
|
||||
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
|
||||
xcrun = spack.util.executable.Executable("xcrun")
|
||||
xcode_clang = xcrun("-f", "clang", output=str).strip()
|
||||
xcode_clangpp = xcrun("-f", "clang++", output=str).strip()
|
||||
env.set("SPACK_CC", xcode_clang, force=True)
|
||||
env.set("SPACK_CXX", xcode_clangpp, force=True)
|
||||
|
||||
xcode_select = spack.util.executable.Executable("xcode-select")
|
||||
|
||||
# Get the path of the active developer directory
|
||||
real_root = xcode_select("--print-path", output=str).strip()
|
||||
|
||||
# The path name can be used to determine whether the full Xcode suite
|
||||
# or just the command-line tools are installed
|
||||
if real_root.endswith("Developer"):
|
||||
# The full Xcode suite is installed
|
||||
pass
|
||||
else:
|
||||
if real_root.endswith("CommandLineTools"):
|
||||
# Only the command-line tools are installed
|
||||
msg = "It appears that you have the Xcode command-line tools "
|
||||
msg += "but not the full Xcode suite installed.\n"
|
||||
|
||||
else:
|
||||
# Xcode is not installed
|
||||
msg = "It appears that you do not have Xcode installed.\n"
|
||||
|
||||
msg += "In order to use Spack to build the requested application, "
|
||||
msg += "you need the full Xcode suite. It can be installed "
|
||||
msg += "through the App Store. Make sure you launch the "
|
||||
msg += "application and accept the license agreement.\n"
|
||||
|
||||
raise OSError(msg)
|
||||
|
||||
real_root = os.path.dirname(os.path.dirname(real_root))
|
||||
developer_root = os.path.join(
|
||||
spack.stage.get_stage_root(), "xcode-select", self.name, str(self.version)
|
||||
)
|
||||
xcode_link = os.path.join(developer_root, "Xcode.app")
|
||||
|
||||
if not os.path.exists(developer_root):
|
||||
tty.warn(
|
||||
"Copying Xcode from %s to %s in order to add spack "
|
||||
"wrappers to it. Please do not interrupt." % (real_root, developer_root)
|
||||
)
|
||||
|
||||
# We need to make a new Xcode.app instance, but with symlinks to
|
||||
# the spack wrappers for the compilers it ships. This is necessary
|
||||
# because some projects insist on just asking xcrun and related
|
||||
# tools where the compiler runs. These tools are very hard to trick
|
||||
# as they do realpath and end up ignoring the symlinks in a
|
||||
# "softer" tree of nothing but symlinks in the right places.
|
||||
shutil.copytree(
|
||||
real_root,
|
||||
developer_root,
|
||||
symlinks=True,
|
||||
ignore=shutil.ignore_patterns(
|
||||
"AppleTV*.platform",
|
||||
"Watch*.platform",
|
||||
"iPhone*.platform",
|
||||
"Documentation",
|
||||
"swift*",
|
||||
),
|
||||
)
|
||||
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
for real_dir in real_dirs:
|
||||
dev_dir = os.path.join(developer_root, "Contents", "Developer", real_dir)
|
||||
for fname in os.listdir(dev_dir):
|
||||
if fname in bins:
|
||||
os.unlink(os.path.join(dev_dir, fname))
|
||||
symlink(
|
||||
os.path.join(spack.paths.build_env_path, "cc"),
|
||||
os.path.join(dev_dir, fname),
|
||||
)
|
||||
|
||||
symlink(developer_root, xcode_link)
|
||||
|
||||
env.set("DEVELOPER_DIR", xcode_link)
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -897,12 +899,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +912,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +992,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1091,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1278,9 +1280,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1339,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -90,7 +90,7 @@ def spec_from_entry(entry):
|
||||
name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(entry["name"])
|
||||
|
||||
if "parameters" in entry:
|
||||
variant_strs = list()
|
||||
|
||||
@@ -21,10 +21,11 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, List, NamedTuple, Set, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -141,22 +142,23 @@ class InstallStatuses:
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
elif query_arg is False:
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
elif query_arg is any:
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
elif isinstance(query_arg, InstallStatus):
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
else:
|
||||
try: # Try block catches if it is not an iterable at all
|
||||
if any(type(x) != InstallStatus for x in query_arg):
|
||||
raise TypeError
|
||||
except TypeError:
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
return query_arg
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -306,15 +308,16 @@ def __reduce__(self):
|
||||
|
||||
"""
|
||||
|
||||
#: Data class to configure locks in Database objects
|
||||
#:
|
||||
#: Args:
|
||||
#: enable (bool): whether to enable locks or not.
|
||||
#: database_timeout (int or None): timeout for the database lock
|
||||
#: package_timeout (int or None): timeout for the package lock
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
Args:
|
||||
enable: whether to enable locks or not.
|
||||
database_timeout: timeout for the database lock
|
||||
package_timeout: timeout for the package lock
|
||||
"""
|
||||
|
||||
enable: bool
|
||||
database_timeout: Optional[int]
|
||||
package_timeout: Optional[int]
|
||||
@@ -348,13 +351,230 @@ def lock_configuration(configuration):
|
||||
)
|
||||
|
||||
|
||||
def prefix_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the prefix lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_lock"
|
||||
|
||||
|
||||
def failures_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the failures lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_failures"
|
||||
|
||||
|
||||
class SpecLocker:
|
||||
"""Manages acquiring and releasing read or write locks on concrete specs."""
|
||||
|
||||
def __init__(self, lock_path: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
self.lock_path = pathlib.Path(lock_path)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
# Maps (spec.dag_hash(), spec.name) to the corresponding lock object
|
||||
self.locks: Dict[Tuple[str, str], lk.Lock] = {}
|
||||
|
||||
def lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a lock on a concrete spec.
|
||||
|
||||
The lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``self.lock_path``.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes likelihood of collision is
|
||||
very low AND it gives us readers-writer lock semantics with just a single lockfile, so
|
||||
no cleanup required.
|
||||
"""
|
||||
assert spec.concrete, "cannot lock a non-concrete spec"
|
||||
timeout = timeout or self.default_timeout
|
||||
key = self._lock_key(spec)
|
||||
|
||||
if key not in self.locks:
|
||||
self.locks[key] = self.raw_lock(spec, timeout=timeout)
|
||||
else:
|
||||
self.locks[key].default_timeout = timeout
|
||||
|
||||
return self.locks[key]
|
||||
|
||||
def raw_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a raw lock for a Spec, but doesn't keep track of it."""
|
||||
return lk.Lock(
|
||||
str(self.lock_path),
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
def has_lock(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Returns True if the spec is already managed by this spec locker"""
|
||||
return self._lock_key(spec) in self.locks
|
||||
|
||||
def _lock_key(self, spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
return (spec.dag_hash(), spec.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_lock(self, spec: "spack.spec.Spec") -> Generator["SpecLocker", None, None]:
|
||||
lock = self.lock(spec)
|
||||
lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
lock.release_write()
|
||||
raise
|
||||
else:
|
||||
lock.release_write()
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec") -> Tuple[bool, Optional[lk.Lock]]:
|
||||
key = self._lock_key(spec)
|
||||
lock = self.locks.pop(key, None)
|
||||
return bool(lock), lock
|
||||
|
||||
def clear_all(self, clear_fn: Optional[Callable[[lk.Lock], Any]] = None) -> None:
|
||||
if clear_fn is not None:
|
||||
for lock in self.locks.values():
|
||||
clear_fn(lock)
|
||||
self.locks.clear()
|
||||
|
||||
|
||||
class FailureTracker:
|
||||
"""Tracks installation failures.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file lives alongside the install DB.
|
||||
|
||||
``n`` is the sys.maxsize-bit prefix of the associated DAG hash to make
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a failure lock
|
||||
exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
locked = self.lock_taken(spec)
|
||||
if locked and not force:
|
||||
tty.msg(f"Retaining failure marking for {spec.name} due to lock")
|
||||
return
|
||||
|
||||
if locked:
|
||||
tty.warn(f"Removing failure marking despite lock for {spec.name}")
|
||||
|
||||
succeeded, lock = self.locker.clear(spec)
|
||||
if succeeded and lock is not None:
|
||||
lock.release_write()
|
||||
|
||||
if self.persistent_mark(spec):
|
||||
path = self._path(spec)
|
||||
tty.debug(f"Removing failure marking for {spec.name}")
|
||||
try:
|
||||
path.unlink()
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
f"Unable to remove failure marking for {spec.name} ({str(path)}): {str(err)}"
|
||||
)
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
self.locker.clear_all(
|
||||
clear_fn=lambda x: x.release_write() if x.is_write_locked() else True
|
||||
)
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
if not self.locker.has_lock(spec):
|
||||
try:
|
||||
mark = self.locker.lock(spec)
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(f"PID {os.getpid()} failed to mark install failure for {spec.name}")
|
||||
tty.warn(f"Unable to mark {spec.name} as failed.")
|
||||
|
||||
return self.locker.lock(spec)
|
||||
|
||||
def has_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the spec is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if self.locker.has_lock(spec):
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.lock_taken(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.persistent_mark(spec)
|
||||
|
||||
def lock_taken(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if another process has a failure lock on the spec."""
|
||||
check = self.locker.raw_lock(spec)
|
||||
return check.is_write_locked()
|
||||
|
||||
def persistent_mark(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return self._path(spec).exists()
|
||||
|
||||
def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
assert spec.concrete, "concrete spec required for failure path"
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class Database:
|
||||
#: Per-process lock objects for each install prefix
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Per-process failure (lock) objects for each install prefix
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
|
||||
@@ -392,24 +612,10 @@ def __init__(
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# This is for other classes to use to lock prefix directories.
|
||||
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
|
||||
|
||||
# Ensure a persistent location for dealing with parallel installation
|
||||
# failures (e.g., across near-concurrent processes).
|
||||
self._failure_dir = os.path.join(self.database_directory, "failures")
|
||||
|
||||
# Support special locks for handling parallel installation failures
|
||||
# of a spec.
|
||||
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
# Failed write transactions (interrupted by exceptions) will alert
|
||||
@@ -423,15 +629,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
self.package_lock_timeout = lock_cfg.package_timeout
|
||||
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
timeout_format_str = (
|
||||
"{0}s".format(str(self.package_lock_timeout))
|
||||
if self.package_lock_timeout
|
||||
else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -471,212 +669,6 @@ def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
if not spec.concrete:
|
||||
raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
for pkg_id in list(self._prefix_failures.keys()):
|
||||
lock = self._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
# Remove all failure markings (aka files)
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
for fail_mark in os.listdir(self._failure_dir):
|
||||
try:
|
||||
os.remove(os.path.join(self._failure_dir, fail_mark))
|
||||
except OSError as exc:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
|
||||
)
|
||||
|
||||
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""
|
||||
Remove any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark_failed()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a prefix failure
|
||||
lock exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
failure_locked = self.prefix_failure_locked(spec)
|
||||
if failure_locked and not force:
|
||||
tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
|
||||
return
|
||||
|
||||
if failure_locked:
|
||||
tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
|
||||
|
||||
lock = self._prefix_failures.pop(spec.prefix, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
if self.prefix_failure_marked(spec):
|
||||
try:
|
||||
path = self._failed_spec_path(spec)
|
||||
tty.debug("Removing failure marking for {0}".format(spec.name))
|
||||
os.remove(path)
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking for {0} ({1}): {2}".format(
|
||||
spec.name, path, str(err)
|
||||
)
|
||||
)
|
||||
|
||||
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""
|
||||
Mark a spec as failing to install.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
|
||||
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
|
||||
associated DAG hash to make the likelihood of collision very low with
|
||||
no cleanup required.
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._failed_spec_path(spec)
|
||||
with open(path, "w") as f:
|
||||
spec.to_json(f)
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
err = "Unable to mark {0.name} as failed."
|
||||
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_failures:
|
||||
mark = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
try:
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(
|
||||
"PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
|
||||
)
|
||||
tty.warn(err.format(spec))
|
||||
|
||||
# Whether we or another process marked it as a failure, track it
|
||||
# as such locally.
|
||||
self._prefix_failures[prefix] = mark
|
||||
|
||||
return self._prefix_failures[prefix]
|
||||
|
||||
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the prefix (installation) is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if spec.prefix in self._prefix_failures:
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.prefix_failure_locked(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.prefix_failure_marked(spec)
|
||||
|
||||
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if a process has a failure lock on the spec."""
|
||||
check = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
return check.is_write_locked()
|
||||
|
||||
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return os.path.exists(self._failed_spec_path(spec))
|
||||
|
||||
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Get a lock on a particular spec's installation directory.
|
||||
|
||||
NOTE: The installation directory **does not** need to exist.
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
likelihood of collision is very low AND it gives us
|
||||
readers-writer lock semantics with just a single lockfile, so no
|
||||
cleanup required.
|
||||
"""
|
||||
timeout = timeout or self.package_lock_timeout
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_locks:
|
||||
self._prefix_locks[prefix] = lk.Lock(
|
||||
self.prefix_lock_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
elif timeout != self._prefix_locks[prefix].default_timeout:
|
||||
self._prefix_locks[prefix].default_timeout = timeout
|
||||
|
||||
return self._prefix_locks[prefix]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_read_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_read()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_read()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_read()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_write_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_write()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_write()
|
||||
|
||||
def _write_to_file(self, stream):
|
||||
"""Write out the database in JSON format to the stream passed
|
||||
as argument.
|
||||
|
||||
@@ -33,7 +33,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Optional, Set, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -520,7 +520,8 @@ def _execute_conflicts(pkg):
|
||||
|
||||
# Save in a list the conflicts and the associated custom messages
|
||||
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
|
||||
when_spec_list.append((when_spec, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, msg_with_name))
|
||||
|
||||
return _execute_conflicts
|
||||
|
||||
@@ -663,39 +664,35 @@ def _execute_patch(pkg_or_dep):
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name,
|
||||
default=None,
|
||||
description="",
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None,
|
||||
sticky=False,
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
validator: Optional[Callable[[str, str, Tuple[Any, ...]], None]] = None,
|
||||
when: Optional[Union[str, bool]] = None,
|
||||
sticky: bool = False,
|
||||
):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
"""Define a variant for the package.
|
||||
|
||||
Packager can specify a default value as well as a text description.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant
|
||||
default (str or bool): default value for the variant, if not
|
||||
specified otherwise the default will be False for a boolean
|
||||
variant and 'nothing' for a multi-valued variant
|
||||
description (str): description of the purpose of the variant
|
||||
values (tuple or typing.Callable): either a tuple of strings containing the
|
||||
allowed values, or a callable accepting one value and returning
|
||||
True if it is valid
|
||||
multi (bool): if False only one value per spec is allowed for
|
||||
this variant
|
||||
validator (typing.Callable): optional group validator to enforce additional
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
sticky (bool): the variant should not be changed by the concretizer to
|
||||
find a valid concrete spec.
|
||||
name: Name of the variant
|
||||
default: Default value for the variant, if not specified otherwise the default will be
|
||||
False for a boolean variant and 'nothing' for a multi-valued variant
|
||||
description: Description of the purpose of the variant
|
||||
values: Either a tuple of strings containing the allowed values, or a callable accepting
|
||||
one value and returning True if it is valid
|
||||
multi: If False only one value per spec is allowed for this variant
|
||||
validator: Optional group validator to enforce additional logic. It receives the package
|
||||
name, the variant name and a tuple of values and should raise an instance of SpackError
|
||||
if the group doesn't meet the additional constraints
|
||||
when: Optional condition on which the variant applies
|
||||
sticky: The variant should not be changed by the concretizer to find a valid concrete spec
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
def format_error(msg, pkg):
|
||||
@@ -763,7 +760,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
@@ -900,7 +897,8 @@ def _execute_requires(pkg):
|
||||
|
||||
# Save in a list the requirements and the associated custom messages
|
||||
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
|
||||
when_spec_list.append((when_spec, policy, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, policy, msg_with_name))
|
||||
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -203,7 +203,7 @@ def activate(env, use_env_repo=False):
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(env.repo)
|
||||
spack.repo.PATH.put_first(env.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % env.name)
|
||||
|
||||
@@ -227,7 +227,7 @@ def deactivate():
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
spack.repo.path.remove(_active_environment._repo)
|
||||
spack.repo.PATH.remove(_active_environment._repo)
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
|
||||
@@ -1084,8 +1084,8 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
if list_name == user_speclist_name:
|
||||
if spec.anonymous:
|
||||
raise SpackEnvironmentError("cannot add anonymous specs to an environment")
|
||||
elif not spack.repo.path.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.path.provider_index.providers.keys()
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -1262,7 +1262,7 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
@@ -1490,7 +1490,7 @@ def _concretize_separately(self, tests=False):
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.path.provider_index
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
@@ -1921,16 +1921,17 @@ def install_specs(self, specs=None, **install_args):
|
||||
"Could not install log links for {0}: {1}".format(spec.name, str(e))
|
||||
)
|
||||
|
||||
def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
roots = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
specs = [s for s in traverse.traverse_nodes(roots, key=traverse.by_dag_hash)]
|
||||
specs.sort()
|
||||
return specs
|
||||
def all_specs_generator(self) -> Iterable[Spec]:
|
||||
"""Returns a generator for all concrete specs"""
|
||||
return traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
|
||||
def all_specs(self) -> List[Spec]:
|
||||
"""Returns a list of all concrete specs"""
|
||||
return list(self.all_specs_generator())
|
||||
|
||||
def all_hashes(self):
|
||||
"""Return hashes of all specs."""
|
||||
return [s.dag_hash() for s in self.all_specs()]
|
||||
return [s.dag_hash() for s in self.all_specs_generator()]
|
||||
|
||||
def roots(self):
|
||||
"""Specs explicitly requested by the user *in this environment*.
|
||||
@@ -1993,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2061,7 +2058,7 @@ def matching_spec(self, spec):
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
@@ -2279,7 +2276,7 @@ def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec_node, pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
def manifest_uptodate_or_warn(self):
|
||||
"""Emits a warning if the manifest file is not up-to-date."""
|
||||
@@ -2369,7 +2366,7 @@ def display_specs(concretized_specs):
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.display_format,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
@@ -2447,13 +2444,13 @@ def make_repo_path(root):
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.push_scope(scope)
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
|
||||
@@ -45,6 +45,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for, extension_from_path
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.string import comma_and, quote
|
||||
@@ -1540,7 +1541,7 @@ def for_package_version(pkg, version=None):
|
||||
f"Cannot fetch git version for {pkg.name}. Package has no 'git' attribute"
|
||||
)
|
||||
# Populate the version with comparisons to other commits
|
||||
version.attach_git_lookup_from_package(pkg.name)
|
||||
version.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(pkg.name))
|
||||
|
||||
# For GitVersion, we have no way to determine whether a ref is a branch or tag
|
||||
# Fortunately, we handle branches and tags identically, except tags are
|
||||
|
||||
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
||||
@@ -535,7 +535,7 @@ def edge_entry(self, edge):
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
|
||||
@@ -49,7 +49,7 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
@@ -1147,12 +1147,12 @@ def write_test_result(self, spec, result):
|
||||
def write_reproducibility_data(self):
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.path.dump_provenance(spec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
for vspec in spec.package.virtuals_provided:
|
||||
repo_cache_path = self.stage.repo.join(vspec.name)
|
||||
if not os.path.exists(repo_cache_path):
|
||||
try:
|
||||
spack.repo.path.dump_provenance(vspec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(vspec, repo_cache_path)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass # not all virtuals have package files
|
||||
|
||||
|
||||
@@ -519,13 +519,6 @@ def _try_install_from_binary_cache(
|
||||
)
|
||||
|
||||
|
||||
def clear_failures() -> None:
|
||||
"""
|
||||
Remove all failure tracking markers for the Spack instance.
|
||||
"""
|
||||
spack.store.STORE.db.clear_all_failures()
|
||||
|
||||
|
||||
def combine_phase_logs(phase_log_files: List[str], log_path: str) -> None:
|
||||
"""
|
||||
Read set or list of logs and combine them into one file.
|
||||
@@ -597,7 +590,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
if node is spec:
|
||||
spack.repo.path.dump_provenance(node, dest_pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
|
||||
@@ -1126,15 +1119,13 @@ class PackageInstaller:
|
||||
instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []):
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
Return:
|
||||
PackageInstaller: instance
|
||||
"""
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
@@ -1287,7 +1278,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
dep_id = package_id(dep_pkg)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.db.prefix_failed(dep):
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
@@ -1502,7 +1493,7 @@ def _ensure_locked(
|
||||
if lock is None:
|
||||
tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.db.prefix_lock(pkg.spec, timeout)
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
@@ -1627,12 +1618,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.db.clear_failure(dep, force=False)
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.STORE.db.clear_failure(request.spec, force=True)
|
||||
spack.store.STORE.failure_tracker.clear(request.spec, force=True)
|
||||
|
||||
# If not installing dependencies, then determine their
|
||||
# installation status before proceeding
|
||||
@@ -1888,7 +1879,7 @@ def _update_failed(
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.db.mark_failed(task.pkg.spec)
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
@@ -2074,7 +2065,7 @@ def install(self) -> None:
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.db.prefix_failed(spec):
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
self._update_failed(task)
|
||||
|
||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
@@ -442,7 +442,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
if spec:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
versions = pkg_cls.versions.get(spec.version, {})
|
||||
ext = versions.get("extension", None)
|
||||
# If the spec does not explicitly specify an extension (the default case),
|
||||
@@ -474,7 +474,7 @@ def get_all_versions(specs):
|
||||
"""
|
||||
version_specs = []
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# Skip any package that has no known versions.
|
||||
if not pkg_cls.versions:
|
||||
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
||||
|
||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
@@ -143,7 +143,7 @@ def hierarchy_tokens(self):
|
||||
|
||||
# Check if all the tokens in the hierarchy are virtual specs.
|
||||
# If not warn the user and raise an error.
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.path.is_virtual(t)]
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.PATH.is_virtual(t)]
|
||||
if not_virtual:
|
||||
msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n"
|
||||
msg += "Please check the 'modules.yaml' configuration files"
|
||||
|
||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
if not type(original_method) == SpecMultiMethod:
|
||||
if not isinstance(original_method, SpecMultiMethod):
|
||||
original_method = SpecMultiMethod(original_method)
|
||||
|
||||
if self.spec is not None:
|
||||
|
||||
@@ -514,6 +514,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
# These are default values for instance variables.
|
||||
#
|
||||
|
||||
# Declare versions dictionary as placeholder for values.
|
||||
# This allows analysis tools to correctly interpret the class attributes.
|
||||
versions: dict
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
virtual = False
|
||||
@@ -528,10 +532,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: By default do not run tests within package's install()
|
||||
run_tests = False
|
||||
|
||||
# FIXME: this is a bad object-oriented design, should be moved to Clang.
|
||||
#: By default do not setup mockup XCode on macOS with Clang
|
||||
use_xcode = False
|
||||
|
||||
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||
keep_werror: Optional[str] = None
|
||||
@@ -665,7 +665,7 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
|
||||
super().__init__()
|
||||
@@ -728,11 +728,11 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
if spack.repo.path.is_virtual(name):
|
||||
if spack.repo.PATH.is_virtual(name):
|
||||
if virtuals is not None:
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.path.providers_for(name)
|
||||
providers = spack.repo.PATH.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.setdefault(cls.name, set()).add(name)
|
||||
@@ -756,7 +756,7 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
try:
|
||||
dep_cls = spack.repo.path.get_pkg_class(dep_name)
|
||||
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# log unknown packages
|
||||
missing.setdefault(cls.name, set()).add(dep_name)
|
||||
@@ -2209,7 +2209,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
pkg = None
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.STORE.db.prefix_write_lock(spec):
|
||||
with spack.store.STORE.prefix_locker.write_lock(spec):
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -2459,8 +2459,8 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
if not isinstance(pos, spack.spec.Spec):
|
||||
pos = spack.spec.Spec(pos)
|
||||
|
||||
if spack.repo.path.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.path.providers_for(pos.name))
|
||||
if spack.repo.PATH.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||
continue
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
||||
@@ -147,7 +147,7 @@ def preferred_variants(cls, pkg_name):
|
||||
variants = " ".join(variants)
|
||||
|
||||
# Only return variants that are actually supported by the package
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||
return dict(
|
||||
(name, variant) for name, variant in spec.variants.items() if name in pkg_cls.variants
|
||||
@@ -162,7 +162,7 @@ def spec_externals(spec):
|
||||
from spack.util.module_cmd import path_from_modules # noqa: F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
@@ -199,7 +199,7 @@ def is_spec_buildable(spec):
|
||||
so_far = all_buildable # the default "so far"
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# check whether any providers for this package override the default
|
||||
|
||||
@@ -288,9 +288,6 @@ def next_spec(
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
@@ -306,13 +303,12 @@ def all_specs(self) -> List[spack.spec.Spec]:
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
__slots__ = "ctx", "has_compiler", "has_version"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
@@ -343,7 +339,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -353,7 +348,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -367,7 +361,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
@@ -378,25 +371,21 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
@@ -411,12 +400,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
@@ -238,7 +238,7 @@ def to_dict(self):
|
||||
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
repository = repository or spack.repo.PATH
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
|
||||
@@ -10,11 +10,12 @@
|
||||
dependencies.
|
||||
"""
|
||||
import os
|
||||
from pathlib import PurePath
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
#: This file lives in $prefix/lib/spack/spack/__file__
|
||||
prefix = llnl.util.filesystem.ancestor(__file__, 4)
|
||||
prefix = str(PurePath(llnl.util.filesystem.ancestor(__file__, 4)))
|
||||
|
||||
#: synonym for prefix
|
||||
spack_root = prefix
|
||||
@@ -88,7 +89,7 @@ def _get_user_cache_path():
|
||||
return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
|
||||
|
||||
|
||||
user_cache_path = _get_user_cache_path()
|
||||
user_cache_path = str(PurePath(_get_user_cache_path()))
|
||||
|
||||
#: junit, cdash, etc. reports about builds
|
||||
reports_path = os.path.join(user_cache_path, "reports")
|
||||
|
||||
@@ -64,7 +64,7 @@ def use_platform(new_platform):
|
||||
host = _PickleableCallable(new_platform)
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
yield new_platform
|
||||
@@ -73,5 +73,5 @@ def use_platform(new_platform):
|
||||
host = original_host_fn
|
||||
|
||||
# Clear configuration and compiler caches
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.compilers._cache_config_files = []
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
import traceback
|
||||
import types
|
||||
import uuid
|
||||
from typing import Dict, Union
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
@@ -149,7 +149,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# If it's a module in some repo, or if it is the repo's
|
||||
# namespace, let the repo handle it.
|
||||
for repo in path.repos:
|
||||
for repo in PATH.repos:
|
||||
# We are using the namespace of the repo and the repo contains the package
|
||||
if namespace == repo.full_namespace:
|
||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||
@@ -163,7 +163,7 @@ def compute_loader(self, fullname):
|
||||
|
||||
# No repo provides the namespace, but it is a valid prefix of
|
||||
# something in the RepoPath.
|
||||
if path.by_namespace.is_prefix(fullname):
|
||||
if PATH.by_namespace.is_prefix(fullname):
|
||||
return SpackNamespaceLoader()
|
||||
|
||||
return None
|
||||
@@ -184,9 +184,9 @@ def compute_loader(self, fullname):
|
||||
def packages_path():
|
||||
"""Get the test repo if it is active, otherwise the builtin repo."""
|
||||
try:
|
||||
return spack.repo.path.get_repo("builtin.mock").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin.mock").packages_path
|
||||
except spack.repo.UnknownNamespaceError:
|
||||
return spack.repo.path.get_repo("builtin").packages_path
|
||||
return spack.repo.PATH.get_repo("builtin").packages_path
|
||||
|
||||
|
||||
class GitExe:
|
||||
@@ -282,7 +282,7 @@ def add_package_to_git_stage(packages):
|
||||
git = GitExe()
|
||||
|
||||
for pkg_name in packages:
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
if not os.path.isfile(filename):
|
||||
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
|
||||
|
||||
@@ -387,7 +387,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
|
||||
# Warn about invalid names that look like packages.
|
||||
if not nm.valid_module_name(pkg_name):
|
||||
if not pkg_name.startswith("."):
|
||||
if not pkg_name.startswith(".") and pkg_name != "repo.yaml":
|
||||
tty.warn(
|
||||
'Skipping package at {0}. "{1}" is not '
|
||||
"a valid Spack module name.".format(pkg_dir, pkg_name)
|
||||
@@ -422,7 +422,7 @@ def _create_new_cache(self) -> Dict[str, os.stat_result]:
|
||||
def last_mtime(self):
|
||||
return max(sinfo.st_mtime for sinfo in self._packages_to_stats.values())
|
||||
|
||||
def modified_since(self, since):
|
||||
def modified_since(self, since: float) -> List[str]:
|
||||
return [name for name, sinfo in self._packages_to_stats.items() if sinfo.st_mtime > since]
|
||||
|
||||
def __getitem__(self, item):
|
||||
@@ -548,35 +548,34 @@ class RepoIndex:
|
||||
when they're needed.
|
||||
|
||||
``Indexers`` should be added to the ``RepoIndex`` using
|
||||
``add_index(name, indexer)``, and they should support the interface
|
||||
``add_indexer(name, indexer)``, and they should support the interface
|
||||
defined by ``Indexer``, so that the ``RepoIndex`` can read, generate,
|
||||
and update stored indices.
|
||||
|
||||
Generated indexes are accessed by name via ``__getitem__()``.
|
||||
Generated indexes are accessed by name via ``__getitem__()``."""
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, package_checker, namespace, cache):
|
||||
def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
if sys.platform == "win32":
|
||||
self.packages_path = spack.util.path.convert_to_posix_path(self.packages_path)
|
||||
self.namespace = namespace
|
||||
|
||||
self.indexers = {}
|
||||
self.indexes = {}
|
||||
self.indexers: Dict[str, Indexer] = {}
|
||||
self.indexes: Dict[str, Any] = {}
|
||||
self.cache = cache
|
||||
|
||||
def add_indexer(self, name, indexer):
|
||||
def add_indexer(self, name: str, indexer: Indexer):
|
||||
"""Add an indexer to the repo index.
|
||||
|
||||
Arguments:
|
||||
name (str): name of this indexer
|
||||
|
||||
indexer (object): an object that supports create(), read(),
|
||||
write(), and get_index() operations
|
||||
|
||||
"""
|
||||
name: name of this indexer
|
||||
indexer: object implementing the ``Indexer`` interface"""
|
||||
self.indexers[name] = indexer
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -597,17 +596,15 @@ def _build_all_indexes(self):
|
||||
because the main bottleneck here is loading all the packages. It
|
||||
can take tens of seconds to regenerate sequentially, and we'd
|
||||
rather only pay that cost once rather than on several
|
||||
invocations.
|
||||
|
||||
"""
|
||||
invocations."""
|
||||
for name, indexer in self.indexers.items():
|
||||
self.indexes[name] = self._build_index(name, indexer)
|
||||
|
||||
def _build_index(self, name, indexer):
|
||||
def _build_index(self, name: str, indexer: Indexer):
|
||||
"""Determine which packages need an update, and update indexes."""
|
||||
|
||||
# Filename of the provider index cache (we assume they're all json)
|
||||
cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
|
||||
cache_filename = f"{name}/{self.namespace}-index.json"
|
||||
|
||||
# Compute which packages needs to be updated in the cache
|
||||
index_mtime = self.cache.mtime(cache_filename)
|
||||
@@ -631,8 +628,7 @@ def _build_index(self, name, indexer):
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
for pkg_name in needs_update:
|
||||
namespaced_name = "%s.%s" % (self.namespace, pkg_name)
|
||||
indexer.update(namespaced_name)
|
||||
indexer.update(f"{self.namespace}.{pkg_name}")
|
||||
|
||||
indexer.write(new)
|
||||
|
||||
@@ -651,7 +647,7 @@ class RepoPath:
|
||||
"""
|
||||
|
||||
def __init__(self, *repos, **kwargs):
|
||||
cache = kwargs.get("cache", spack.caches.misc_cache)
|
||||
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||
self.repos = []
|
||||
self.by_namespace = nm.NamespaceTrie()
|
||||
|
||||
@@ -970,7 +966,7 @@ def check(condition, msg):
|
||||
|
||||
# Indexes for this repository, computed lazily
|
||||
self._repo_index = None
|
||||
self._cache = cache or spack.caches.misc_cache
|
||||
self._cache = cache or spack.caches.MISC_CACHE
|
||||
|
||||
def real_name(self, import_name):
|
||||
"""Allow users to import Spack packages using Python identifiers.
|
||||
@@ -1361,7 +1357,7 @@ def create_or_construct(path, namespace=None):
|
||||
|
||||
def _path(configuration=None):
|
||||
"""Get the singleton RepoPath instance for Spack."""
|
||||
configuration = configuration or spack.config.config
|
||||
configuration = configuration or spack.config.CONFIG
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
@@ -1378,7 +1374,7 @@ def create(configuration):
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
path: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
@@ -1387,7 +1383,7 @@ def create(configuration):
|
||||
|
||||
def all_package_names(include_virtuals=False):
|
||||
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
|
||||
return path.all_package_names(include_virtuals)
|
||||
return PATH.all_package_names(include_virtuals)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -1402,21 +1398,21 @@ def use_repositories(*paths_and_repos, **kwargs):
|
||||
Returns:
|
||||
Corresponding RepoPath object
|
||||
"""
|
||||
global path
|
||||
global PATH
|
||||
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||
override = kwargs.get("override", True)
|
||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||
repos_key = "repos:" if override else "repos"
|
||||
spack.config.config.push_scope(
|
||||
spack.config.CONFIG.push_scope(
|
||||
spack.config.InternalConfigScope(name=scope_name, data={repos_key: paths})
|
||||
)
|
||||
path, saved = create(configuration=spack.config.config), path
|
||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||
try:
|
||||
yield path
|
||||
yield PATH
|
||||
finally:
|
||||
spack.config.config.remove_scope(scope_name=scope_name)
|
||||
path = saved
|
||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||
PATH = saved
|
||||
|
||||
|
||||
class MockRepositoryBuilder:
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"""Schema for a buildcache spec.yaml file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
|
||||
:lines: 14-
|
||||
:lines: 13-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
@@ -16,15 +16,8 @@
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"buildinfo": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"required": ["relative_prefix"],
|
||||
"properties": {
|
||||
"relative_prefix": {"type": "string"},
|
||||
"relative_rpaths": {"type": "boolean"},
|
||||
},
|
||||
},
|
||||
# `buildinfo` is no longer needed as of Spack 0.21
|
||||
"buildinfo": {"type": "object"},
|
||||
"spec": {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
|
||||
@@ -28,6 +28,12 @@
|
||||
"unify": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
|
||||
},
|
||||
"duplicates": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user