Compare commits
385 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d367b4285a | ||
|
|
260e735425 | ||
|
|
ca872f9c34 | ||
|
|
b72a268bc5 | ||
|
|
818195a3bd | ||
|
|
679d41ea66 | ||
|
|
86216cc36e | ||
|
|
ecb7ad493f | ||
|
|
fb1e81657c | ||
|
|
34e4c62e8c | ||
|
|
acb02326aa | ||
|
|
c1756257c2 | ||
|
|
1ee7c735ec | ||
|
|
22deed708e | ||
|
|
6693dc5eb8 | ||
|
|
396f219011 | ||
|
|
a3ecd7efed | ||
|
|
ae5511afd6 | ||
|
|
78fe2c63fa | ||
|
|
f4f396745e | ||
|
|
f3c6d892b1 | ||
|
|
2f5988cec7 | ||
|
|
44922f734d | ||
|
|
144e657c58 | ||
|
|
6f48fe2b6f | ||
|
|
fcd03adc02 | ||
|
|
0620b954be | ||
|
|
6174b829f7 | ||
|
|
0d4b1c6a73 | ||
|
|
fb9797bd67 | ||
|
|
4eee3c12c1 | ||
|
|
3e5f9a2138 | ||
|
|
8295a45999 | ||
|
|
5138c71d34 | ||
|
|
eef9939c21 | ||
|
|
ffddaabaa0 | ||
|
|
f664d1edaa | ||
|
|
6d5d1562bd | ||
|
|
70c71e8f93 | ||
|
|
d9d1eb24f9 | ||
|
|
cef59ad0bf | ||
|
|
a1e117a98b | ||
|
|
cb855d5ffd | ||
|
|
3866ff0096 | ||
|
|
6dc167e43d | ||
|
|
0fd085be8e | ||
|
|
74fba221f1 | ||
|
|
deeeb86067 | ||
|
|
98daf5b7ec | ||
|
|
8a3d98b632 | ||
|
|
0cc945b367 | ||
|
|
e732155e8c | ||
|
|
c07fb833a9 | ||
|
|
7d566b481f | ||
|
|
a72e5e762e | ||
|
|
0eb22ef770 | ||
|
|
95f78440f1 | ||
|
|
74a51aba50 | ||
|
|
b370ecfbda | ||
|
|
04d55b7600 | ||
|
|
d695438851 | ||
|
|
f0447d63ad | ||
|
|
e8a7a04f14 | ||
|
|
23316f0352 | ||
|
|
b3433cb872 | ||
|
|
349ba83bc6 | ||
|
|
ecfd9ef12b | ||
|
|
4502351659 | ||
|
|
8a08f09ac0 | ||
|
|
60ecd0374e | ||
|
|
52ccee79d8 | ||
|
|
7f0f1b63d6 | ||
|
|
b65f1f22ec | ||
|
|
e9efa1df75 | ||
|
|
884a5b8b07 | ||
|
|
91d674f5d0 | ||
|
|
76fbb8cd8f | ||
|
|
0f3f2a8024 | ||
|
|
5a5f774369 | ||
|
|
f5212ae139 | ||
|
|
4b618704bf | ||
|
|
46285d9725 | ||
|
|
36852fe348 | ||
|
|
8914d26867 | ||
|
|
fdea5e7624 | ||
|
|
ca1e4d54b5 | ||
|
|
656528bbbb | ||
|
|
4d42e9d1f3 | ||
|
|
d058c1d649 | ||
|
|
43854fc2ec | ||
|
|
6a2149df6e | ||
|
|
af38d097ac | ||
|
|
e67dca73d1 | ||
|
|
2e6ed1e707 | ||
|
|
53d2ffaf83 | ||
|
|
a95e061fed | ||
|
|
e01b9b38ef | ||
|
|
eac15badd3 | ||
|
|
806b8aa966 | ||
|
|
9e5ca525f7 | ||
|
|
5ea4322f88 | ||
|
|
4ca2d8bc19 | ||
|
|
e0059ef961 | ||
|
|
7d9fad9576 | ||
|
|
553277a84f | ||
|
|
00a3ebd0bb | ||
|
|
ffc9060e11 | ||
|
|
31d5f56913 | ||
|
|
bfdebae831 | ||
|
|
aa83fa44e1 | ||
|
|
e56291dd45 | ||
|
|
2f52545214 | ||
|
|
5090023e3a | ||
|
|
d355880110 | ||
|
|
1a0434b808 | ||
|
|
c3eec8a36f | ||
|
|
25b8cf93d2 | ||
|
|
34ff7605e6 | ||
|
|
e026fd3613 | ||
|
|
3f5f4cfe26 | ||
|
|
74fe9ccef3 | ||
|
|
fd5a8b2075 | ||
|
|
33793445cf | ||
|
|
f4a144c8ac | ||
|
|
6c439ec022 | ||
|
|
209409189a | ||
|
|
ff900566e0 | ||
|
|
a954a0bb9f | ||
|
|
c21e00f504 | ||
|
|
9ae1317e79 | ||
|
|
9f1a30d3b5 | ||
|
|
1340995249 | ||
|
|
afebc11742 | ||
|
|
34e9fc612c | ||
|
|
1d8ff7f742 | ||
|
|
0e27f05611 | ||
|
|
19aaa97ff2 | ||
|
|
990309355f | ||
|
|
2cb66e6e44 | ||
|
|
cfaade098a | ||
|
|
ed65532e27 | ||
|
|
696d4a1b85 | ||
|
|
8def75b414 | ||
|
|
5389db821d | ||
|
|
0d5ae3a809 | ||
|
|
b61ad8d2a8 | ||
|
|
b35db020eb | ||
|
|
ca1d15101e | ||
|
|
c9ec5fb9ac | ||
|
|
71abb8c7f0 | ||
|
|
4dafae8d17 | ||
|
|
b2b00df5cc | ||
|
|
114e5d4767 | ||
|
|
fd70e7fb31 | ||
|
|
77760c8ea4 | ||
|
|
737a6dcc73 | ||
|
|
3826fe3765 | ||
|
|
edb11941b2 | ||
|
|
1bd58a8026 | ||
|
|
f8e0c8caed | ||
|
|
d0412c1578 | ||
|
|
ec500adb50 | ||
|
|
30f5c74614 | ||
|
|
713eb210ac | ||
|
|
a022e45866 | ||
|
|
82685a68d9 | ||
|
|
b19691d503 | ||
|
|
54ea860b37 | ||
|
|
fb598baa53 | ||
|
|
02763e967a | ||
|
|
2846be315b | ||
|
|
4818b75814 | ||
|
|
b613bf3855 | ||
|
|
3347372a7b | ||
|
|
c417a77a19 | ||
|
|
90d0d0176c | ||
|
|
72b9f89504 | ||
|
|
a89f1b1bf4 | ||
|
|
c6e26251a1 | ||
|
|
190a1bf523 | ||
|
|
e381e166ec | ||
|
|
2f145b2684 | ||
|
|
4c7748e954 | ||
|
|
86485dea14 | ||
|
|
00f8f5898a | ||
|
|
f41d7a89f3 | ||
|
|
4f07205c63 | ||
|
|
08f9c7670e | ||
|
|
b451791336 | ||
|
|
47f176d635 | ||
|
|
b6ae751657 | ||
|
|
9bb5cffc73 | ||
|
|
135b44ca59 | ||
|
|
d3aca68e8f | ||
|
|
fb83f8ef31 | ||
|
|
f69c18a922 | ||
|
|
b95a9d2e47 | ||
|
|
def4d19980 | ||
|
|
1db91e0ccd | ||
|
|
34ebe7f53c | ||
|
|
d07d5410f3 | ||
|
|
1db73eb1f2 | ||
|
|
2da34de519 | ||
|
|
d237430f47 | ||
|
|
3f0adae9ef | ||
|
|
3b4d7bf119 | ||
|
|
b3087b32c6 | ||
|
|
ad9c90cb2e | ||
|
|
1b0e113a9d | ||
|
|
6df5738482 | ||
|
|
927d831612 | ||
|
|
3f3c75e56a | ||
|
|
9733bb3da8 | ||
|
|
1de5117ef1 | ||
|
|
cf8f44ae5a | ||
|
|
006e69265e | ||
|
|
eaec3062a1 | ||
|
|
d5eb5106b0 | ||
|
|
9f8edbf6bf | ||
|
|
a4301badef | ||
|
|
4565811556 | ||
|
|
b94d54e4d9 | ||
|
|
a410b22098 | ||
|
|
c1a73878ea | ||
|
|
ae553051c8 | ||
|
|
b94e22b284 | ||
|
|
e25dcf73cd | ||
|
|
b7cc4bd247 | ||
|
|
22c95923e3 | ||
|
|
c050b99a06 | ||
|
|
60f82685ae | ||
|
|
27ab53b68a | ||
|
|
907a80ca71 | ||
|
|
a53cc93016 | ||
|
|
6ad0dc3722 | ||
|
|
87d4bdaa02 | ||
|
|
36394aab2f | ||
|
|
358947fc03 | ||
|
|
477a3c0ef6 | ||
|
|
c6c5e11353 | ||
|
|
29e2997bd5 | ||
|
|
41bd6a75d5 | ||
|
|
0976ad3184 | ||
|
|
fc1d9ba550 | ||
|
|
61f0088a27 | ||
|
|
c202a045e6 | ||
|
|
843e1e80f0 | ||
|
|
643c028308 | ||
|
|
d823037c40 | ||
|
|
4d945be955 | ||
|
|
a4ac3f2767 | ||
|
|
6e31676b29 | ||
|
|
1fff0241f2 | ||
|
|
a2a52dfb21 | ||
|
|
f0ed159a1b | ||
|
|
9bf7fa0067 | ||
|
|
fbaea0336e | ||
|
|
1673d3e322 | ||
|
|
c7cca3aa8d | ||
|
|
da46b63a34 | ||
|
|
c882214273 | ||
|
|
2bacab0402 | ||
|
|
0681d9a157 | ||
|
|
887847610e | ||
|
|
282a01ef76 | ||
|
|
151c551781 | ||
|
|
abbd1abc1a | ||
|
|
49c505cc14 | ||
|
|
237a56a305 | ||
|
|
7e7e6c2797 | ||
|
|
e67c61aac0 | ||
|
|
1b1ed1b1fa | ||
|
|
ec0e51316b | ||
|
|
533821e46f | ||
|
|
6c5d125cb0 | ||
|
|
668fb1201f | ||
|
|
f7918fd8ab | ||
|
|
fc1996e0fa | ||
|
|
ed3aaafd73 | ||
|
|
63bb2c9bad | ||
|
|
a67455707a | ||
|
|
09ca71dbe0 | ||
|
|
ea082539e4 | ||
|
|
143146f4f3 | ||
|
|
ee6ae402aa | ||
|
|
0b26b26821 | ||
|
|
c764f9b1ab | ||
|
|
db19d83ea7 | ||
|
|
24256be6d6 | ||
|
|
633723236e | ||
|
|
381f31e69e | ||
|
|
9438cac219 | ||
|
|
85cf66f650 | ||
|
|
f3c080e546 | ||
|
|
37634f8b08 | ||
|
|
2ae8bbce9e | ||
|
|
b8bfaf65bf | ||
|
|
7968cb7fa2 | ||
|
|
ebc2efdfd2 | ||
|
|
ff07fd5ccb | ||
|
|
3f83ef6566 | ||
|
|
554ce7f063 | ||
|
|
23963779f4 | ||
|
|
45c5af10c3 | ||
|
|
532a37e7ba | ||
|
|
aeb9a92845 | ||
|
|
a3c7ad7669 | ||
|
|
b99288dcae | ||
|
|
01b7cc5106 | ||
|
|
f5888d8127 | ||
|
|
77c838ca93 | ||
|
|
11e538d962 | ||
|
|
7d444038ee | ||
|
|
c24471834b | ||
|
|
b1e33ae37b | ||
|
|
c36617f9da | ||
|
|
deadb64206 | ||
|
|
9eaa88e467 | ||
|
|
bd58801415 | ||
|
|
548a9de671 | ||
|
|
8e7c53a8ba | ||
|
|
5e630174a1 | ||
|
|
175a65dfba | ||
|
|
39d4c402d5 | ||
|
|
e51748ee8f | ||
|
|
f9457fa80b | ||
|
|
4cc2ca3e2e | ||
|
|
3843001004 | ||
|
|
e24bb5dd1c | ||
|
|
f6013114eb | ||
|
|
bdca875eb3 | ||
|
|
af8c392de2 | ||
|
|
9aa3b4619b | ||
|
|
3d733da70a | ||
|
|
cda99b792c | ||
|
|
9834bad82e | ||
|
|
3453259c98 | ||
|
|
ee243b84eb | ||
|
|
5080e2cb45 | ||
|
|
f42ef7aea7 | ||
|
|
41793673d9 | ||
|
|
8b6a6982ee | ||
|
|
ee74ca6391 | ||
|
|
7165e70186 | ||
|
|
97d632a161 | ||
|
|
571919992d | ||
|
|
99112ad2ad | ||
|
|
75c70c395d | ||
|
|
960bdfe612 | ||
|
|
97892bda18 | ||
|
|
cead6ef98d | ||
|
|
5d70c0f100 | ||
|
|
361632fc4b | ||
|
|
6576655137 | ||
|
|
feb26efecd | ||
|
|
4752d1cde3 | ||
|
|
a07afa6e1a | ||
|
|
7327d2913a | ||
|
|
8f8a1f7f52 | ||
|
|
eb8d836e76 | ||
|
|
bad8495e16 | ||
|
|
43de7f4881 | ||
|
|
84585ac575 | ||
|
|
86f9d3865b | ||
|
|
834e7b2b0a | ||
|
|
c14f23ddaa | ||
|
|
49f3681a12 | ||
|
|
19e1d10cdf | ||
|
|
7caf2a512d | ||
|
|
1f6e3cc8cb | ||
|
|
169c4245e0 | ||
|
|
ee1982010f | ||
|
|
dd396c4a76 | ||
|
|
235802013d | ||
|
|
e1f07e98ae | ||
|
|
60aee6f535 | ||
|
|
2510dc9e6e | ||
|
|
5607dd259b | ||
|
|
7bd5d1fd3c | ||
|
|
f6104cc3cb | ||
|
|
b54d286b4a | ||
|
|
ea9c488897 | ||
|
|
ba1d295023 | ||
|
|
27f04b3544 | ||
|
|
8cd9497522 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -95,7 +95,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
uses: docker/setup-buildx-action@885d1462b80bc1c1c7f0b00334ad271f09369c55 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
11
.github/workflows/unit_tests.yaml
vendored
11
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,7 +152,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -165,6 +165,7 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -186,7 +187,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
6
.github/workflows/windows_python.yml
vendored
6
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@f43a0e5ff2bd294095638e18286ca9a3d1956744
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
|
||||
32
SECURITY.md
32
SECURITY.md
@@ -2,24 +2,26 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for the following releases.
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
|
||||
@@ -51,65 +51,43 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
@@ -39,6 +39,20 @@ function Read-SpackArgs {
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
@@ -79,7 +93,7 @@ function Invoke-SpackEnv {
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
@@ -90,8 +104,8 @@ function Invoke-SpackEnv {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
@@ -107,8 +121,9 @@ function Invoke-SpackLoad {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
# python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -36,3 +36,9 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
||||
@@ -60,6 +60,7 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -32,9 +32,14 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -44,7 +49,7 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -104,11 +104,13 @@ Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activat
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
|
||||
113
lib/spack/docs/gpu_configuration.rst
Normal file
113
lib/spack/docs/gpu_configuration.rst
Normal file
@@ -0,0 +1,113 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
@@ -77,6 +77,7 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib")
|
||||
depends_on("zlib-api")
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -4773,17 +4773,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix/bin/reframe``
|
||||
* ``self.prefix.bin.reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -4927,7 +4927,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5147,8 +5147,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5226,11 +5226,10 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5254,7 +5253,7 @@ Saving build-time files
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5262,10 +5261,15 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5286,21 +5290,18 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
self.cache_extra_test_sources(srcs)
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5326,9 +5327,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5347,7 +5348,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5394,7 +5395,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5444,7 +5445,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5494,9 +5495,12 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5536,7 +5540,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -6071,7 +6075,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinx==7.2.5
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.4.1
|
||||
sphinx-rtd-theme==1.2.2
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.15.1
|
||||
urllib3==2.0.3
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.1.0
|
||||
flake8==6.0.0
|
||||
mypy==1.4.1
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
|
||||
@@ -18,11 +18,13 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
@@ -101,7 +103,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
@@ -169,7 +171,7 @@ def rename(src, dst):
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -566,7 +568,7 @@ def set_install_permissions(path):
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
# this function will be invoked on the target. If the file is outside a
|
||||
# Spack-maintained prefix, the permissions should not be modified.
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
return
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0o755)
|
||||
@@ -635,7 +637,7 @@ def chmod_x(entry, perms):
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link."""
|
||||
if os.path.islink(dest):
|
||||
if islink(dest):
|
||||
return
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
@@ -721,26 +723,12 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -763,6 +751,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -770,6 +760,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -783,6 +775,11 @@ def copy_tree(
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
# symlinks at the end.
|
||||
links = []
|
||||
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
@@ -805,7 +802,7 @@ def copy_tree(
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True,
|
||||
):
|
||||
if os.path.islink(s):
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
@@ -819,7 +816,9 @@ def escaped_path(path):
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
symlink(target, d)
|
||||
links.append((target, d, s))
|
||||
continue
|
||||
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -834,9 +833,17 @@ def escaped_path(path):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -847,12 +854,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1256,7 +1272,12 @@ def traverse_tree(
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||
paths too.
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1283,11 +1304,24 @@ def traverse_tree(
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||
# but only do this if the ignore is defined.
|
||||
if ignore is not None:
|
||||
if islink(source_child) and not follow_links:
|
||||
target = readlink(source_child)
|
||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||
if any(map(ignore, all_parents)):
|
||||
tty.warn(
|
||||
f"Skipping {source_path} because the source or a part of the source's "
|
||||
f"path is included in the ignores."
|
||||
)
|
||||
continue
|
||||
|
||||
# Treat as a directory
|
||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1313,7 +1347,11 @@ def traverse_tree(
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls."""
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
@@ -1528,7 +1566,7 @@ def remove_if_dead_link(path):
|
||||
Parameters:
|
||||
path (str): The potential dead link
|
||||
"""
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
if islink(path) and not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@@ -1587,7 +1625,7 @@ def remove_linked_tree(path):
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if os.path.islink(path):
|
||||
if islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
@@ -1754,9 +1792,14 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
return _find_recursive(root, files)
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
return _find_non_recursive(root, files)
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -2688,7 +2731,7 @@ def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
if os.path.isfile(entry) or islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
||||
@@ -143,7 +143,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
|
||||
@@ -2,77 +2,188 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
|
||||
from llnl.util import lang
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from spack.error import SpackError
|
||||
from spack.util.path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a symbolic link.
|
||||
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
Create a link.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise SymlinkError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link patg ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
Windows-only methods will return false for other operating systems.
|
||||
|
||||
For Windows: spack considers symlinks, hard links, and junctions to
|
||||
all be links, so if any of those are True, return True.
|
||||
|
||||
Args:
|
||||
path (str): path to check if it is a link.
|
||||
|
||||
Returns:
|
||||
bool - whether the path is any kind link or not.
|
||||
"""
|
||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||
|
||||
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
def _windows_is_hardlink(path: str) -> bool:
|
||||
"""Determines if a path is a windows hard link. This is accomplished
|
||||
by looking at the number of links using os.stat. A non-hard-linked file
|
||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||
larger than 1. Note that both the original and hard-linked file will
|
||||
return True because they share the same inode.
|
||||
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
Args:
|
||||
path (str): Windows path to check for a hard link
|
||||
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
Returns:
|
||||
bool - Whether the path is a hard link or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||
return False
|
||||
|
||||
CreateHardLink(link, path)
|
||||
return os.stat(path).st_nlink > 1
|
||||
|
||||
|
||||
def _windows_is_junction(path: str) -> bool:
|
||||
"""Determines if a path is a windows junction. A junction can be
|
||||
determined using a bitwise AND operation between the file's
|
||||
attribute bitmask and the known junction bitmask (0x400).
|
||||
|
||||
Args:
|
||||
path (str): A non-file path
|
||||
|
||||
Returns:
|
||||
bool - whether the path is a junction or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||
return False
|
||||
|
||||
import ctypes.wintypes
|
||||
|
||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
invalid_file_attributes = 0xFFFFFFFF
|
||||
reparse_point = 0x400
|
||||
file_attr = get_file_attributes(str(path))
|
||||
|
||||
if file_attr == invalid_file_attributes:
|
||||
return False
|
||||
|
||||
return file_attr & reparse_point > 0
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _win32_can_symlink():
|
||||
def _windows_can_symlink() -> bool:
|
||||
"""
|
||||
Determines if windows is able to make a symlink depending on
|
||||
the system configuration and the level of the user's permissions.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||
return False
|
||||
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
dpath = os.path.join(tempdir, "dpath")
|
||||
fpath = os.path.join(tempdir, "fpath.txt")
|
||||
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
dlink = os.path.join(tempdir, "dlink")
|
||||
flink = os.path.join(tempdir, "flink.txt")
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -96,24 +207,136 @@ def _win32_can_symlink():
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _win32_is_junction(path):
|
||||
def _windows_create_link(source: str, link: str):
|
||||
"""
|
||||
Determines if a path is a win32 junction
|
||||
Attempts to create a Hard Link or Junction as an alternative
|
||||
to a symbolic link. This is called when symbolic links cannot
|
||||
be created.
|
||||
"""
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||
elif os.path.isdir(source):
|
||||
_windows_create_junction(source=source, link=link)
|
||||
elif os.path.isfile(source):
|
||||
_windows_create_hard_link(path=source, link=link)
|
||||
else:
|
||||
raise SymlinkError(
|
||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||
)
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
def _windows_create_junction(source: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a junction,
|
||||
then create the junction.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
import subprocess
|
||||
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
return False
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a hard
|
||||
link, then create the hard link.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise SymlinkError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
"""Find all of the files that point to the same inode as the link"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||
link = os.path.abspath(link)
|
||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||
|
||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||
links.remove(link)
|
||||
if len(links) == 1:
|
||||
return links.pop()
|
||||
elif len(links) > 1:
|
||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||
else:
|
||||
raise SymlinkError("Cannot determine hard link source path.")
|
||||
|
||||
|
||||
def _windows_read_junction(link: str):
|
||||
"""Find the path that a junction points to."""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||
|
||||
link = os.path.abspath(link)
|
||||
link_basename = os.path.basename(link)
|
||||
link_parent = os.path.dirname(link)
|
||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||
if matches:
|
||||
return matches.group(1)
|
||||
else:
|
||||
raise SymlinkError("Could not find junction path.")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(SpackError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
@@ -780,7 +780,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if type(self.logfile) == io.StringIO:
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,18 +709,33 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
@@ -729,7 +744,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
@@ -772,7 +787,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
||||
@@ -9,7 +9,6 @@
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -876,32 +875,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
for file in file_list:
|
||||
contents = read_method(file)
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if file.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif file.endswith(".json"):
|
||||
fetched_spec = Spec.from_json(contents)
|
||||
else:
|
||||
continue
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
@@ -2383,22 +2368,12 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
spec: The spec being searched for
|
||||
"""
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
||||
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
|
||||
@@ -476,15 +476,22 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -185,6 +186,7 @@ def pytest_root_spec() -> str:
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
@@ -1027,7 +1027,7 @@ def get_cmake_prefix_path(pkg):
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
@@ -1048,12 +1048,12 @@ def _setup_pkg_and_run(
|
||||
pkg, dirty=kwargs.get("dirty", False), context=context
|
||||
)
|
||||
return_value = function(pkg, kwargs)
|
||||
child_pipe.send(return_value)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
child_pipe.send(e)
|
||||
write_pipe.send(e)
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
@@ -1102,10 +1102,10 @@ def _setup_pkg_and_run(
|
||||
context,
|
||||
package_context,
|
||||
)
|
||||
child_pipe.send(ce)
|
||||
write_pipe.send(ce)
|
||||
|
||||
finally:
|
||||
child_pipe.close()
|
||||
write_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
@@ -1149,7 +1149,7 @@ def child_fun():
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
@@ -1174,7 +1174,7 @@ def child_fun():
|
||||
serialized_pkg,
|
||||
function,
|
||||
kwargs,
|
||||
child_pipe,
|
||||
write_pipe,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
@@ -1183,6 +1183,12 @@ def child_fun():
|
||||
|
||||
p.start()
|
||||
|
||||
# We close the writable end of the pipe now to be sure that p is the
|
||||
# only process which owns a handle for it. This ensures that when p
|
||||
# closes its handle for the writable end, read_pipe.recv() will
|
||||
# promptly report the readable end as being ready.
|
||||
write_pipe.close()
|
||||
|
||||
except InstallError as e:
|
||||
e.pkg = pkg
|
||||
raise
|
||||
@@ -1192,7 +1198,16 @@ def child_fun():
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
child_result = parent_pipe.recv()
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
return f"{typ} {abs(p.exitcode)}"
|
||||
|
||||
try:
|
||||
child_result = read_pipe.recv()
|
||||
except EOFError:
|
||||
p.join()
|
||||
raise InstallError(f"The process has stopped unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
@@ -1212,6 +1227,10 @@ def child_fun():
|
||||
child_result.print_context()
|
||||
raise child_result
|
||||
|
||||
# Fallback. Usually caught beforehand in EOFError above.
|
||||
if p.exitcode != 0:
|
||||
raise InstallError(f"The process failed unexpectedly ({exitcode_msg(p)})")
|
||||
|
||||
return child_result
|
||||
|
||||
|
||||
@@ -1256,9 +1275,8 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
@@ -55,7 +55,8 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
||||
@@ -248,7 +248,8 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
generator = generator or "Unix Makefiles"
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
|
||||
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers("adamjstewart")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -201,7 +201,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
@@ -301,7 +301,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
|
||||
@@ -140,8 +140,6 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
||||
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache: Union[
|
||||
MISC_CACHE: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache: Union[
|
||||
FETCH_CACHE: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
||||
@@ -535,7 +535,7 @@ def __job_name(name, suffix=""):
|
||||
"""Compute the name of a named job with appropriate suffix.
|
||||
Valid suffixes are either '-remove' or empty string or None
|
||||
"""
|
||||
assert type(name) == str
|
||||
assert isinstance(name, str)
|
||||
|
||||
jname = name
|
||||
if suffix:
|
||||
@@ -885,7 +885,7 @@ def generate_gitlab_ci_yaml(
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if type(s) == cfg.ImmutableConfigScope
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1504,7 +1504,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
job_pkg = pkg_cls(job_spec)
|
||||
tty.debug("job package: {0}".format(job_pkg))
|
||||
except AssertionError:
|
||||
|
||||
@@ -291,7 +291,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -342,9 +342,9 @@ def iter_groups(specs, indent, all_headers):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture if architecture else "no arch",
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
f"{compiler.display_str}" if compiler else "no compiler",
|
||||
)
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -68,11 +69,10 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +169,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +186,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.config.clear_caches()
|
||||
spack.config.CONFIG.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,6 +326,7 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -78,6 +79,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
help="stop pushing on first failure (default is best effort)",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -149,12 +155,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
@@ -297,6 +302,7 @@ def push_fn(args):
|
||||
tty.info(f"Selected {len(specs)} specs to push to {url}")
|
||||
|
||||
skipped = []
|
||||
failed = []
|
||||
|
||||
# tty printing
|
||||
color = clr.get_color_when()
|
||||
@@ -327,11 +333,17 @@ def push_fn(args):
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
|
||||
raise
|
||||
failed.append((format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
|
||||
elif len(skipped) == len(specs):
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
|
||||
tty.info("All specs are already in the buildcache. Use --force to overwrite them.")
|
||||
else:
|
||||
tty.info(
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
@@ -341,6 +353,17 @@ def push_fn(args):
|
||||
)
|
||||
)
|
||||
|
||||
if failed:
|
||||
if len(failed) == 1:
|
||||
raise failed[0][1]
|
||||
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
|
||||
@@ -83,7 +83,7 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
@@ -210,7 +210,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
|
||||
@@ -289,6 +289,10 @@ def ci_rebuild(args):
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
@@ -418,11 +422,6 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -679,7 +678,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=sign_binaries,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
|
||||
@@ -17,6 +17,7 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -117,15 +118,15 @@ def clean(parser, args):
|
||||
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.fetch_cache.destroy()
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.installer.clear_failures()
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.misc_cache.destroy()
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
||||
@@ -36,13 +36,13 @@
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.in"),
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
@@ -812,6 +812,9 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
|
||||
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
|
||||
|
||||
writer = BashCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -36,7 +35,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -50,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -60,7 +59,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -71,7 +70,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -93,7 +92,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
@@ -186,7 +185,7 @@ def compiler_list(args):
|
||||
os_str = os
|
||||
if target:
|
||||
os_str += "-%s" % target
|
||||
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
|
||||
cname = "%s{%s} %s" % (spack.spec.COMPILER_COLOR, name, os_str)
|
||||
tty.hline(colorize(cname), char="-")
|
||||
colify(reversed(sorted(c.spec.display_str for c in compilers)))
|
||||
|
||||
|
||||
@@ -13,12 +13,11 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
||||
@@ -27,13 +27,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -55,7 +54,7 @@ def setup_parser(subparser):
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
@@ -64,7 +63,7 @@ def setup_parser(subparser):
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.section_schemas,
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -146,10 +145,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.config.print_section(section)
|
||||
spack.config.CONFIG.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -162,7 +161,7 @@ def config_get(args):
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -181,7 +180,7 @@ def config_edit(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -194,7 +193,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -251,19 +250,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -302,7 +301,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -325,7 +324,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -337,13 +336,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -457,7 +456,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
||||
@@ -915,11 +915,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.path.first_repo()
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
|
||||
@@ -98,7 +98,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
||||
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.path
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
|
||||
@@ -239,6 +239,13 @@ def env_deactivate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to activate the environment",
|
||||
)
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -27,7 +28,6 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -47,7 +47,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -133,9 +133,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +144,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -165,7 +165,7 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
@@ -239,7 +239,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.path.all_package_classes())
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
||||
@@ -268,7 +268,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
||||
@@ -64,11 +64,11 @@ def section_title(s):
|
||||
|
||||
|
||||
def version(s):
|
||||
return spack.spec.version_color + s + plain_format
|
||||
return spack.spec.VERSION_COLOR + s + plain_format
|
||||
|
||||
|
||||
def variant(s):
|
||||
return spack.spec.enabled_variant_color + s + plain_format
|
||||
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
|
||||
|
||||
|
||||
class VariantFormatter:
|
||||
@@ -349,7 +349,7 @@ def print_virtuals(pkg):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
||||
@@ -107,7 +107,7 @@ def match(p, f):
|
||||
if f.match(p):
|
||||
return True
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(p)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(p)
|
||||
if pkg_cls.__doc__:
|
||||
return f.match(pkg_cls.__doc__)
|
||||
return False
|
||||
@@ -159,7 +159,7 @@ def get_dependencies(pkg):
|
||||
@formatter
|
||||
def version_json(pkg_names, out):
|
||||
"""Print all packages with their latest versions."""
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
out.write("[\n")
|
||||
|
||||
@@ -201,7 +201,7 @@ def html(pkg_names, out):
|
||||
"""
|
||||
|
||||
# Read in all packages
|
||||
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
|
||||
pkg_classes = [spack.repo.PATH.get_pkg_class(name) for name in pkg_names]
|
||||
|
||||
# Start at 2 because the title of the page from Sphinx is id1.
|
||||
span_id = 2
|
||||
@@ -313,13 +313,13 @@ def list(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
sorted_packages = [p for p in sorted_packages if p in packages_with_tags]
|
||||
|
||||
if args.update:
|
||||
# change output stream if user asked for update
|
||||
if os.path.exists(args.update):
|
||||
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
|
||||
if os.path.getmtime(args.update) > spack.repo.PATH.last_mtime():
|
||||
tty.msg("File is up to date: %s" % args.update)
|
||||
return
|
||||
|
||||
|
||||
@@ -52,6 +52,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
||||
@@ -109,7 +109,7 @@ def location(parser, args):
|
||||
return
|
||||
|
||||
if args.packages:
|
||||
print(spack.repo.path.first_repo().root)
|
||||
print(spack.repo.PATH.first_repo().root)
|
||||
return
|
||||
|
||||
if args.stages:
|
||||
@@ -135,7 +135,7 @@ def location(parser, args):
|
||||
|
||||
# Package dir just needs the spec name
|
||||
if args.package_dir:
|
||||
print(spack.repo.path.dirname_for_package_name(spec.name))
|
||||
print(spack.repo.PATH.dirname_for_package_name(spec.name))
|
||||
return
|
||||
|
||||
# Either concretize or filter from already concretized environment
|
||||
|
||||
@@ -54,11 +54,11 @@ def setup_parser(subparser):
|
||||
|
||||
def packages_to_maintainers(package_names=None):
|
||||
if not package_names:
|
||||
package_names = spack.repo.path.all_package_names()
|
||||
package_names = spack.repo.PATH.all_package_names()
|
||||
|
||||
pkg_to_users = defaultdict(lambda: set())
|
||||
for name in package_names:
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
pkg_to_users[name].add(user)
|
||||
|
||||
@@ -67,8 +67,8 @@ def packages_to_maintainers(package_names=None):
|
||||
|
||||
def maintainers_to_packages(users=None):
|
||||
user_to_pkgs = defaultdict(lambda: [])
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
for user in cls.maintainers:
|
||||
lower_users = [u.lower() for u in users]
|
||||
if not users or user.lower() in lower_users:
|
||||
@@ -80,8 +80,8 @@ def maintainers_to_packages(users=None):
|
||||
def maintained_packages():
|
||||
maintained = []
|
||||
unmaintained = []
|
||||
for name in spack.repo.path.all_package_names():
|
||||
cls = spack.repo.path.get_pkg_class(name)
|
||||
for name in spack.repo.PATH.all_package_names():
|
||||
cls = spack.repo.PATH.get_pkg_class(name)
|
||||
if cls.maintainers:
|
||||
maintained.append(name)
|
||||
else:
|
||||
|
||||
@@ -90,7 +90,6 @@ def setup_parser(subparser):
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Add
|
||||
add_parser = sp.add_parser("add", help=mirror_add.__doc__)
|
||||
@@ -99,7 +98,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -119,7 +118,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -138,7 +137,7 @@ def setup_parser(subparser):
|
||||
set_url_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -167,7 +166,7 @@ def setup_parser(subparser):
|
||||
set_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -178,7 +177,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -474,7 +473,7 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||
mirror_stats.next_spec(pkg_obj.spec)
|
||||
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||
|
||||
@@ -309,7 +309,7 @@ def refresh(module_type, specs, args):
|
||||
|
||||
# Skip unknown packages.
|
||||
writers = [
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
|
||||
cls(spec, args.module_set_name) for spec in specs if spack.repo.PATH.exists(spec.name)
|
||||
]
|
||||
|
||||
# Filter excluded packages early
|
||||
@@ -321,12 +321,13 @@ def refresh(module_type, specs, args):
|
||||
file2writer[item.layout.filename].append(item)
|
||||
|
||||
if len(file2writer) != len(writers):
|
||||
spec_fmt_str = "{name}@={version}%{compiler}/{hash:7} {variants} arch={arch}"
|
||||
message = "Name clashes detected in module files:\n"
|
||||
for filename, writer_list in file2writer.items():
|
||||
if len(writer_list) > 1:
|
||||
message += "\nfile: {0}\n".format(filename)
|
||||
for x in writer_list:
|
||||
message += "spec: {0}\n".format(x.spec.format())
|
||||
message += "spec: {0}\n".format(x.spec.format(spec_fmt_str))
|
||||
tty.error(message)
|
||||
tty.error("Operation aborted")
|
||||
raise SystemExit(1)
|
||||
@@ -376,7 +377,7 @@ def refresh(module_type, specs, args):
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
|
||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
|
||||
@@ -143,7 +143,7 @@ def pkg_source(args):
|
||||
tty.die("spack pkg source requires exactly one spec")
|
||||
|
||||
spec = specs[0]
|
||||
filename = spack.repo.path.filename_for_package_name(spec.name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(spec.name)
|
||||
|
||||
# regular source dump -- just get the package and print its contents
|
||||
if args.canonical:
|
||||
@@ -184,7 +184,7 @@ def pkg_grep(args, unknown_args):
|
||||
grouper = lambda e: e[0] // 500
|
||||
|
||||
# set up iterator and save the first group to ensure we don't end up with a group of size 1
|
||||
groups = itertools.groupby(enumerate(spack.repo.path.all_package_paths()), grouper)
|
||||
groups = itertools.groupby(enumerate(spack.repo.PATH.all_package_paths()), grouper)
|
||||
if not groups:
|
||||
return 0 # no packages to search
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def providers(parser, args):
|
||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||
valid_virtuals = sorted(spack.repo.PATH.provider_index.providers.keys())
|
||||
|
||||
buffer = io.StringIO()
|
||||
isatty = sys.stdout.isatty()
|
||||
@@ -53,5 +53,5 @@ def providers(parser, args):
|
||||
for spec in specs:
|
||||
if sys.stdout.isatty():
|
||||
print("{0}:".format(spec))
|
||||
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
|
||||
spack.cmd.display_specs(sorted(spack.repo.PATH.providers_for(spec)))
|
||||
print("")
|
||||
|
||||
@@ -20,7 +20,6 @@
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Create
|
||||
create_parser = sp.add_parser("create", help=repo_create.__doc__)
|
||||
@@ -45,7 +44,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -56,7 +55,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -69,7 +68,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=scopes_metavar,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
|
||||
def _show_patch(sha256):
|
||||
"""Show a record from the patch index."""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
data = patches.get(sha256)
|
||||
|
||||
if not data:
|
||||
@@ -47,7 +47,7 @@ def _show_patch(sha256):
|
||||
owner = rec["owner"]
|
||||
|
||||
if "relative_path" in rec:
|
||||
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
|
||||
pkg_dir = spack.repo.PATH.get_pkg_class(owner).package_dir
|
||||
path = os.path.join(pkg_dir, rec["relative_path"])
|
||||
print(" path: %s" % path)
|
||||
else:
|
||||
@@ -60,7 +60,7 @@ def _show_patch(sha256):
|
||||
|
||||
def resource_list(args):
|
||||
"""list all resources known to spack (currently just patches)"""
|
||||
patches = spack.repo.path.patch_index.index
|
||||
patches = spack.repo.PATH.patch_index.index
|
||||
for sha256 in patches:
|
||||
if args.only_hashes:
|
||||
print(sha256)
|
||||
|
||||
@@ -137,7 +137,7 @@ def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -77,7 +77,7 @@ def setup_parser(subparser):
|
||||
def spec(parser, args):
|
||||
install_status_fn = spack.spec.Spec.install_status
|
||||
|
||||
fmt = spack.spec.display_format
|
||||
fmt = spack.spec.DISPLAY_FORMAT
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
|
||||
@@ -68,7 +68,7 @@ def tags(parser, args):
|
||||
return
|
||||
|
||||
# unique list of available tags
|
||||
available_tags = sorted(spack.repo.path.tag_index.keys())
|
||||
available_tags = sorted(spack.repo.PATH.tag_index.keys())
|
||||
if not available_tags:
|
||||
tty.msg("No tagged packages")
|
||||
return
|
||||
|
||||
@@ -228,7 +228,7 @@ def create_reporter(args, specs_to_test, test_suite):
|
||||
|
||||
def test_list(args):
|
||||
"""list installed packages with available tests"""
|
||||
tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
tagged = set(spack.repo.PATH.packages_with_tags(*args.tag)) if args.tag else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
tests = spack.install_test.test_functions(pkg_class)
|
||||
@@ -237,7 +237,7 @@ def has_test_and_tags(pkg_class):
|
||||
if args.list_all:
|
||||
report_packages = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
for pkg_class in spack.repo.PATH.all_package_classes()
|
||||
if has_test_and_tags(pkg_class)
|
||||
]
|
||||
|
||||
|
||||
@@ -209,12 +209,11 @@ def unit_test(parser, args, unknown_args):
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
if sys.platform != "win32":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
spack.bootstrap.ensure_environment_dependencies()
|
||||
import pytest
|
||||
|
||||
if args.pytest_help:
|
||||
# make the pytest.main help output more accurate
|
||||
|
||||
@@ -51,6 +51,13 @@ def setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to load the package",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print pwsh commands to load the package",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="unload all loaded Spack packages"
|
||||
|
||||
@@ -155,7 +155,7 @@ def url_list(args):
|
||||
urls = set()
|
||||
|
||||
# Gather set of URLs from all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
url = getattr(pkg_cls, "url", None)
|
||||
urls = url_list_parsing(args, urls, url, pkg_cls)
|
||||
|
||||
@@ -192,7 +192,7 @@ def url_summary(args):
|
||||
tty.msg("Generating a summary of URL parsing in Spack...")
|
||||
|
||||
# Loop through all packages
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
urls = set()
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
|
||||
@@ -336,7 +336,7 @@ def add(self, pkg_name, fetcher):
|
||||
version_stats = UrlStats()
|
||||
resource_stats = UrlStats()
|
||||
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in pkg_cls.versions:
|
||||
|
||||
@@ -45,7 +45,7 @@ def setup_parser(subparser):
|
||||
|
||||
def versions(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
@@ -135,7 +135,7 @@ def _init_compiler_config(*, scope):
|
||||
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
@@ -169,7 +169,7 @@ def remove_compiler_from_config(compiler_spec, scope=None):
|
||||
"""
|
||||
candidate_scopes = [scope]
|
||||
if scope is None:
|
||||
candidate_scopes = spack.config.config.scopes.keys()
|
||||
candidate_scopes = spack.config.CONFIG.scopes.keys()
|
||||
|
||||
removal_happened = False
|
||||
for current_scope in candidate_scopes:
|
||||
@@ -523,7 +523,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.config
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
|
||||
@@ -29,6 +29,90 @@
|
||||
}
|
||||
|
||||
|
||||
class CmdCall:
|
||||
"""Compose a call to `cmd` for an ordered series of cmd commands/scripts"""
|
||||
|
||||
def __init__(self, *cmds):
|
||||
if not cmds:
|
||||
raise RuntimeError(
|
||||
"""Attempting to run commands from CMD without specifying commands.
|
||||
Please add commands to be run."""
|
||||
)
|
||||
self._cmds = cmds
|
||||
|
||||
def __call__(self):
|
||||
out = subprocess.check_output(self.cmd_line, stderr=subprocess.STDOUT) # novermin
|
||||
return out.decode("utf-16le", errors="replace") # novermin
|
||||
|
||||
@property
|
||||
def cmd_line(self):
|
||||
base_call = "cmd /u /c "
|
||||
commands = " && ".join([x.command_str() for x in self._cmds])
|
||||
# If multiple commands are being invoked by a single subshell
|
||||
# they must be encapsulated by a double quote. Always double
|
||||
# quote to be sure of proper handling
|
||||
# cmd will properly resolve nested double quotes as needed
|
||||
#
|
||||
# `set`` writes out the active env to the subshell stdout,
|
||||
# and in this context we are always trying to obtain env
|
||||
# state so it should always be appended
|
||||
return base_call + f'"{commands} && set"'
|
||||
|
||||
|
||||
class VarsInvocation:
|
||||
def __init__(self, script):
|
||||
self._script = script
|
||||
|
||||
def command_str(self):
|
||||
return f'"{self._script}"'
|
||||
|
||||
@property
|
||||
def script(self):
|
||||
return self._script
|
||||
|
||||
|
||||
class VCVarsInvocation(VarsInvocation):
|
||||
def __init__(self, script, arch, msvc_version):
|
||||
super(VCVarsInvocation, self).__init__(script)
|
||||
self._arch = arch
|
||||
self._msvc_version = msvc_version
|
||||
|
||||
@property
|
||||
def sdk_ver(self):
|
||||
"""Accessor for Windows SDK version property
|
||||
|
||||
Note: This property may not be set by
|
||||
the calling context and as such this property will
|
||||
return an empty string
|
||||
|
||||
This property will ONLY be set if the SDK package
|
||||
is a dependency somewhere in the Spack DAG of the package
|
||||
for which we are constructing an MSVC compiler env.
|
||||
Otherwise this property should be unset to allow the VCVARS
|
||||
script to use its internal heuristics to determine appropriate
|
||||
SDK version
|
||||
"""
|
||||
if getattr(self, "_sdk_ver", None):
|
||||
return self._sdk_ver + ".0"
|
||||
return ""
|
||||
|
||||
@sdk_ver.setter
|
||||
def sdk_ver(self, val):
|
||||
self._sdk_ver = val
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
return self._arch
|
||||
|
||||
@property
|
||||
def vcvars_ver(self):
|
||||
return f"-vcvars_ver={self._msvc_version}"
|
||||
|
||||
def command_str(self):
|
||||
script = super(VCVarsInvocation, self).command_str()
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
|
||||
@@ -75,22 +159,48 @@ class Msvc(Compiler):
|
||||
# file based on compiler executable path.
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(args[0].version) for pth in args[3]]
|
||||
args[3][:] = new_pth
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
super().__init__(*args, **kwargs)
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
self.vcvars_call = VCVarsInvocation(vcvars_script_path, arch, self.msvc_version)
|
||||
env_cmds.append(self.vcvars_call)
|
||||
# Below is a check for a valid fortran path
|
||||
# paths has c, cxx, fc, and f77 paths in that order
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
# If this found, it sets all the vars
|
||||
self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
|
||||
else:
|
||||
# To use the MSVC compilers, VCVARS must be invoked
|
||||
# VCVARS is located at a fixed location, referencable
|
||||
# idiomatically by the following relative path from the
|
||||
# compiler.
|
||||
# Spack first finds the compilers via VSWHERE
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
|
||||
self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
# env first
|
||||
env_cmds.extend(
|
||||
[VarsInvocation(oneapi_version_setvars), VarsInvocation(oneapi_root_setvars)]
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
@@ -119,16 +229,30 @@ def platform_toolset_ver(self):
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
# ignore_errors below is true here due to ifx's
|
||||
# non zero return code if it is not provided
|
||||
# and input file
|
||||
return Version(
|
||||
re.search(
|
||||
Msvc.version_regex,
|
||||
spack.compiler.get_compiler_version_output(self.cc, version_arg=None),
|
||||
spack.compiler.get_compiler_version_output(
|
||||
compiler, version_arg=None, ignore_errors=True
|
||||
),
|
||||
).group(1)
|
||||
)
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
return self._compiler_version(self.cc)
|
||||
|
||||
@property
|
||||
def ifx_version(self):
|
||||
"""Ifx compiler version associated with this version of MSVC"""
|
||||
return self._compiler_version(self.fc)
|
||||
|
||||
@property
|
||||
def vs_root(self):
|
||||
# The MSVC install root is located at a fix level above the compiler
|
||||
@@ -146,27 +270,12 @@ def setup_custom_environment(self, pkg, env):
|
||||
# output, sort into dictionary, use that to make the build
|
||||
# environment.
|
||||
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
arch = arch.replace("-", "_")
|
||||
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||
# version, if needed by spec
|
||||
sdk_ver = (
|
||||
""
|
||||
if "win-sdk" not in pkg.spec or pkg.name == "win-sdk"
|
||||
else pkg.spec["win-sdk"].version.string + ".0"
|
||||
)
|
||||
# provide vcvars with msvc version selected by concretization,
|
||||
# not whatever it happens to pick up on the system (highest available version)
|
||||
out = subprocess.check_output( # novermin
|
||||
'cmd /u /c "{}" {} {} {} && set'.format(
|
||||
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
||||
),
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
if sys.version_info[0] >= 3:
|
||||
out = out.decode("utf-16le", errors="replace") # novermin
|
||||
if pkg.name != "win-sdk" and "win-sdk" in pkg.spec:
|
||||
self.vcvars_call.sdk_ver = pkg.spec["win-sdk"].version.string
|
||||
|
||||
out = self.msvc_compiler_environment()
|
||||
int_env = dict(
|
||||
(key, value)
|
||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.error
|
||||
import spack.platforms
|
||||
@@ -37,7 +38,6 @@
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
@@ -76,7 +76,7 @@ class Concretizer:
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not config.get(
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
@@ -113,7 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.path.providers_for(spec)
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
||||
@@ -47,6 +47,8 @@
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.cdash
|
||||
import spack.schema.ci
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
@@ -64,7 +66,7 @@
|
||||
from spack.util.cpus import cpus_available
|
||||
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
SECTION_SCHEMAS = {
|
||||
"compilers": spack.schema.compilers.schema,
|
||||
"concretizer": spack.schema.concretizer.schema,
|
||||
"mirrors": spack.schema.mirrors.schema,
|
||||
@@ -80,16 +82,16 @@
|
||||
|
||||
# Same as above, but including keys for environments
|
||||
# this allows us to unify config reading between configs and environments
|
||||
all_schemas = copy.deepcopy(section_schemas)
|
||||
all_schemas.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
_ALL_SCHEMAS = copy.deepcopy(SECTION_SCHEMAS)
|
||||
_ALL_SCHEMAS.update({spack.schema.env.TOP_LEVEL_KEY: spack.schema.env.schema})
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
CONFIGURATION_DEFAULTS_PATH = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
#: This ensures that Spack will still work even if config.yaml in
|
||||
#: the defaults scope is removed.
|
||||
config_defaults = {
|
||||
CONFIG_DEFAULTS = {
|
||||
"config": {
|
||||
"debug": False,
|
||||
"connect_timeout": 10,
|
||||
@@ -105,10 +107,10 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = "overrides-"
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
@@ -134,7 +136,7 @@ def get_section_filename(self, section):
|
||||
def get_section(self, section):
|
||||
if section not in self.sections:
|
||||
path = self.get_section_filename(section)
|
||||
schema = section_schemas[section]
|
||||
schema = SECTION_SCHEMAS[section]
|
||||
data = read_config_file(path, schema)
|
||||
self.sections[section] = data
|
||||
return self.sections[section]
|
||||
@@ -145,7 +147,7 @@ def _write_section(self, section):
|
||||
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, section_schemas[section])
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
mkdirp(self.path)
|
||||
@@ -317,7 +319,7 @@ def __init__(self, name, data=None):
|
||||
data = InternalConfigScope._process_dict_keyname_overrides(data)
|
||||
for section in data:
|
||||
dsec = data[section]
|
||||
validate({section: dsec}, section_schemas[section])
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section):
|
||||
@@ -333,7 +335,7 @@ def _write_section(self, section):
|
||||
"""This only validates, as the data is already in memory."""
|
||||
data = self.get_section(section)
|
||||
if data is not None:
|
||||
validate(data, section_schemas[section])
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(data, self.name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -430,7 +432,7 @@ def file_scopes(self) -> List[ConfigScope]:
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) == ConfigScope or type(s) == SingleFileScope)
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
@@ -711,11 +713,11 @@ def override(path_or_scope, value=None):
|
||||
"""
|
||||
if isinstance(path_or_scope, ConfigScope):
|
||||
overrides = path_or_scope
|
||||
config.push_scope(path_or_scope)
|
||||
CONFIG.push_scope(path_or_scope)
|
||||
else:
|
||||
base_name = overrides_base_name
|
||||
base_name = _OVERRIDES_BASE_NAME
|
||||
# Ensure the new override gets a unique scope name
|
||||
current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
|
||||
current_overrides = [s.name for s in CONFIG.matching_scopes(r"^{0}".format(base_name))]
|
||||
num_overrides = len(current_overrides)
|
||||
while True:
|
||||
scope_name = "{0}{1}".format(base_name, num_overrides)
|
||||
@@ -725,19 +727,19 @@ def override(path_or_scope, value=None):
|
||||
break
|
||||
|
||||
overrides = InternalConfigScope(scope_name)
|
||||
config.push_scope(overrides)
|
||||
config.set(path_or_scope, value, scope=scope_name)
|
||||
CONFIG.push_scope(overrides)
|
||||
CONFIG.set(path_or_scope, value, scope=scope_name)
|
||||
|
||||
try:
|
||||
yield config
|
||||
yield CONFIG
|
||||
finally:
|
||||
scope = config.remove_scope(overrides.name)
|
||||
scope = CONFIG.remove_scope(overrides.name)
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line
|
||||
#: set by ``spack.main.main()``.
|
||||
command_line_scopes: List[str] = []
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(cfg, scope_type, name, path):
|
||||
@@ -781,14 +783,14 @@ def create():
|
||||
cfg = Configuration()
|
||||
|
||||
# first do the builtin, hardcoded defaults
|
||||
builtin = InternalConfigScope("_builtin", config_defaults)
|
||||
builtin = InternalConfigScope("_builtin", CONFIG_DEFAULTS)
|
||||
cfg.push_scope(builtin)
|
||||
|
||||
# Builtin paths to configuration files in Spack
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
CONFIGURATION_DEFAULTS_PATH
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -815,7 +817,7 @@ def create():
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, command_line_scopes)
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
@@ -825,7 +827,7 @@ def create():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
CONFIG: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
@@ -838,7 +840,7 @@ def add_from_file(filename, scope=None):
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
if section in SECTION_SCHEMAS.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
@@ -849,7 +851,7 @@ def add_from_file(filename, scope=None):
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
CONFIG.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
@@ -861,6 +863,7 @@ def add(fullpath, scope=None):
|
||||
has_existing_value = True
|
||||
path = ""
|
||||
override = False
|
||||
value = syaml.load_config(components[-1])
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = "::" if override else ":" if path else ""
|
||||
@@ -881,14 +884,14 @@ def add(fullpath, scope=None):
|
||||
existing = get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1 : -1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if override:
|
||||
path += "::"
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = fullpath.rpartition(":")
|
||||
value = syaml.load_config(value)
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
@@ -897,12 +900,12 @@ def add(fullpath, scope=None):
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
CONFIG.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
return CONFIG.get(path, default, scope)
|
||||
|
||||
|
||||
def set(path, value, scope=None):
|
||||
@@ -910,26 +913,26 @@ def set(path, value, scope=None):
|
||||
|
||||
Accepts the path syntax described in ``get()``.
|
||||
"""
|
||||
return config.set(path, value, scope)
|
||||
return CONFIG.set(path, value, scope)
|
||||
|
||||
|
||||
def add_default_platform_scope(platform):
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(configuration_defaults_path[1], platform)
|
||||
config.push_scope(ConfigScope(plat_name, plat_path))
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes():
|
||||
"""Convenience function to get list of configuration scopes."""
|
||||
return config.scopes
|
||||
return CONFIG.scopes
|
||||
|
||||
|
||||
def _validate_section_name(section):
|
||||
"""Exit if the section is not a valid section."""
|
||||
if section not in section_schemas:
|
||||
if section not in SECTION_SCHEMAS:
|
||||
raise ConfigSectionError(
|
||||
"Invalid config section: '%s'. Options are: %s"
|
||||
% (section, " ".join(section_schemas.keys()))
|
||||
% (section, " ".join(SECTION_SCHEMAS.keys()))
|
||||
)
|
||||
|
||||
|
||||
@@ -990,7 +993,7 @@ def read_config_file(filename, schema=None):
|
||||
if data:
|
||||
if not schema:
|
||||
key = next(iter(data))
|
||||
schema = all_schemas[key]
|
||||
schema = _ALL_SCHEMAS[key]
|
||||
validate(data, schema)
|
||||
return data
|
||||
|
||||
@@ -1089,7 +1092,7 @@ def get_valid_type(path):
|
||||
test_data = {component: test_data}
|
||||
|
||||
try:
|
||||
validate(test_data, section_schemas[section])
|
||||
validate(test_data, SECTION_SCHEMAS[section])
|
||||
except (ConfigFormatError, AttributeError) as e:
|
||||
jsonschema_error = e.validation_error
|
||||
if jsonschema_error.validator == "type":
|
||||
@@ -1229,11 +1232,17 @@ def they_are(t):
|
||||
return copy.copy(source)
|
||||
|
||||
|
||||
#
|
||||
# Process a path argument to config.set() that may contain overrides ('::' or
|
||||
# trailing ':')
|
||||
#
|
||||
def process_config_path(path):
|
||||
"""Process a path argument to config.set() that may contain overrides ('::' or
|
||||
trailing ':')
|
||||
|
||||
Note: quoted value path components will be processed as a single value (escaping colons)
|
||||
quoted path components outside of the value will be considered ill formed and will
|
||||
raise.
|
||||
e.g. `this:is:a:path:'value:with:colon'` will yield:
|
||||
|
||||
[this, is, a, path, value:with:colon]
|
||||
"""
|
||||
result = []
|
||||
if path.startswith(":"):
|
||||
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
|
||||
@@ -1260,6 +1269,16 @@ def process_config_path(path):
|
||||
front = syaml.syaml_str(front)
|
||||
front.append = True
|
||||
|
||||
quote = "['\"]"
|
||||
not_quote = "[^'\"]"
|
||||
|
||||
if re.match(f"^{quote}", path):
|
||||
m = re.match(rf"^{quote}({not_quote}+){quote}$", path)
|
||||
if not m:
|
||||
raise ValueError("Quotes indicate value, but there are additional path entries")
|
||||
result.append(m.group(1))
|
||||
break
|
||||
|
||||
result.append(front)
|
||||
return result
|
||||
|
||||
@@ -1278,9 +1297,9 @@ def default_modify_scope(section="config"):
|
||||
If this is not 'compilers', a general (non-platform) scope is used.
|
||||
"""
|
||||
if section == "compilers":
|
||||
return spack.config.config.highest_precedence_scope().name
|
||||
return CONFIG.highest_precedence_scope().name
|
||||
else:
|
||||
return spack.config.config.highest_precedence_non_platform_scope().name
|
||||
return CONFIG.highest_precedence_non_platform_scope().name
|
||||
|
||||
|
||||
def default_list_scope():
|
||||
@@ -1337,18 +1356,18 @@ def use_configuration(*scopes_or_paths):
|
||||
Returns:
|
||||
Configuration object associated with the scopes passed as arguments
|
||||
"""
|
||||
global config
|
||||
global CONFIG
|
||||
|
||||
# Normalize input and construct a Configuration object
|
||||
configuration = _config_from(scopes_or_paths)
|
||||
config.clear_caches(), configuration.clear_caches()
|
||||
CONFIG.clear_caches(), configuration.clear_caches()
|
||||
|
||||
saved_config, config = config, configuration
|
||||
saved_config, CONFIG = CONFIG, configuration
|
||||
|
||||
try:
|
||||
yield configuration
|
||||
finally:
|
||||
config = saved_config
|
||||
CONFIG = saved_config
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"""Writers for different kind of recipes and related
|
||||
convenience functions.
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from collections import namedtuple
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
@@ -159,13 +159,13 @@ def depfile(self):
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
Run = collections.namedtuple("Run", ["image"])
|
||||
Run = namedtuple("Run", ["image"])
|
||||
return Run(image=self.final_image)
|
||||
|
||||
@tengine.context_property
|
||||
def build(self):
|
||||
"""Information related to the build image."""
|
||||
Build = collections.namedtuple("Build", ["image"])
|
||||
Build = namedtuple("Build", ["image"])
|
||||
return Build(image=self.build_image)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -176,12 +176,13 @@ def strip(self):
|
||||
@tengine.context_property
|
||||
def paths(self):
|
||||
"""Important paths in the image"""
|
||||
Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
|
||||
Paths = namedtuple("Paths", ["environment", "store", "view_parent", "view", "former_view"])
|
||||
return Paths(
|
||||
environment="/opt/spack-environment",
|
||||
store="/opt/software",
|
||||
hidden_view="/opt/._view",
|
||||
view="/opt/view",
|
||||
view_parent="/opt/views",
|
||||
view="/opt/views/view",
|
||||
former_view="/opt/view", # /opt/view -> /opt/views/view for backward compatibility
|
||||
)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -257,7 +258,7 @@ def _package_info_from(self, package_list):
|
||||
|
||||
update, install, clean = commands_for(os_pkg_manager)
|
||||
|
||||
Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
Packages = namedtuple("Packages", ["update", "install", "list", "clean"])
|
||||
return Packages(update=update, install=install, list=package_list, clean=clean)
|
||||
|
||||
def _os_pkg_manager(self):
|
||||
@@ -273,7 +274,7 @@ def _os_pkg_manager(self):
|
||||
|
||||
@tengine.context_property
|
||||
def extra_instructions(self):
|
||||
Extras = collections.namedtuple("Extra", ["build", "final"])
|
||||
Extras = namedtuple("Extra", ["build", "final"])
|
||||
extras = self.container_config.get("extra_instructions", {})
|
||||
build, final = extras.get("build", None), extras.get("final", None)
|
||||
return Extras(build=build, final=final)
|
||||
@@ -295,7 +296,7 @@ def bootstrap(self):
|
||||
context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
|
||||
bootstrap_recipe = env.get_template(template_path).render(**context)
|
||||
|
||||
Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
|
||||
Bootstrap = namedtuple("Bootstrap", ["image", "recipe"])
|
||||
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
|
||||
|
||||
@tengine.context_property
|
||||
@@ -303,7 +304,7 @@ def render_phase(self):
|
||||
render_bootstrap = bool(self.bootstrap_image)
|
||||
render_build = not (self.last_phase == "bootstrap")
|
||||
render_final = self.last_phase in (None, "final")
|
||||
Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
Render = namedtuple("Render", ["bootstrap", "build", "final"])
|
||||
return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
|
||||
|
||||
def __call__(self):
|
||||
|
||||
@@ -90,7 +90,7 @@ def spec_from_entry(entry):
|
||||
name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
|
||||
)
|
||||
|
||||
pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(entry["name"])
|
||||
|
||||
if "parameters" in entry:
|
||||
variant_strs = list()
|
||||
|
||||
@@ -21,10 +21,11 @@
|
||||
import contextlib
|
||||
import datetime
|
||||
import os
|
||||
import pathlib
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, List, NamedTuple, Set, Type, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, NamedTuple, Set, Type, Union
|
||||
|
||||
try:
|
||||
import uuid
|
||||
@@ -141,22 +142,23 @@ class InstallStatuses:
|
||||
def canonicalize(cls, query_arg):
|
||||
if query_arg is True:
|
||||
return [cls.INSTALLED]
|
||||
elif query_arg is False:
|
||||
if query_arg is False:
|
||||
return [cls.MISSING]
|
||||
elif query_arg is any:
|
||||
if query_arg is any:
|
||||
return [cls.INSTALLED, cls.DEPRECATED, cls.MISSING]
|
||||
elif isinstance(query_arg, InstallStatus):
|
||||
if isinstance(query_arg, InstallStatus):
|
||||
return [query_arg]
|
||||
else:
|
||||
try: # Try block catches if it is not an iterable at all
|
||||
if any(type(x) != InstallStatus for x in query_arg):
|
||||
raise TypeError
|
||||
except TypeError:
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
return query_arg
|
||||
try:
|
||||
statuses = list(query_arg)
|
||||
if all(isinstance(x, InstallStatus) for x in statuses):
|
||||
return statuses
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
raise TypeError(
|
||||
"installation query must be `any`, boolean, "
|
||||
"InstallStatus, or iterable of InstallStatus"
|
||||
)
|
||||
|
||||
|
||||
class InstallRecord:
|
||||
@@ -306,15 +308,16 @@ def __reduce__(self):
|
||||
|
||||
"""
|
||||
|
||||
#: Data class to configure locks in Database objects
|
||||
#:
|
||||
#: Args:
|
||||
#: enable (bool): whether to enable locks or not.
|
||||
#: database_timeout (int or None): timeout for the database lock
|
||||
#: package_timeout (int or None): timeout for the package lock
|
||||
|
||||
|
||||
class LockConfiguration(NamedTuple):
|
||||
"""Data class to configure locks in Database objects
|
||||
|
||||
Args:
|
||||
enable: whether to enable locks or not.
|
||||
database_timeout: timeout for the database lock
|
||||
package_timeout: timeout for the package lock
|
||||
"""
|
||||
|
||||
enable: bool
|
||||
database_timeout: Optional[int]
|
||||
package_timeout: Optional[int]
|
||||
@@ -348,13 +351,230 @@ def lock_configuration(configuration):
|
||||
)
|
||||
|
||||
|
||||
def prefix_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the prefix lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_lock"
|
||||
|
||||
|
||||
def failures_lock_path(root_dir: Union[str, pathlib.Path]) -> pathlib.Path:
|
||||
"""Returns the path of the failures lock file, given the root directory.
|
||||
|
||||
Args:
|
||||
root_dir: root directory containing the database directory
|
||||
"""
|
||||
return pathlib.Path(root_dir) / _DB_DIRNAME / "prefix_failures"
|
||||
|
||||
|
||||
class SpecLocker:
|
||||
"""Manages acquiring and releasing read or write locks on concrete specs."""
|
||||
|
||||
def __init__(self, lock_path: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
self.lock_path = pathlib.Path(lock_path)
|
||||
self.default_timeout = default_timeout
|
||||
|
||||
# Maps (spec.dag_hash(), spec.name) to the corresponding lock object
|
||||
self.locks: Dict[Tuple[str, str], lk.Lock] = {}
|
||||
|
||||
def lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a lock on a concrete spec.
|
||||
|
||||
The lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``self.lock_path``.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes likelihood of collision is
|
||||
very low AND it gives us readers-writer lock semantics with just a single lockfile, so
|
||||
no cleanup required.
|
||||
"""
|
||||
assert spec.concrete, "cannot lock a non-concrete spec"
|
||||
timeout = timeout or self.default_timeout
|
||||
key = self._lock_key(spec)
|
||||
|
||||
if key not in self.locks:
|
||||
self.locks[key] = self.raw_lock(spec, timeout=timeout)
|
||||
else:
|
||||
self.locks[key].default_timeout = timeout
|
||||
|
||||
return self.locks[key]
|
||||
|
||||
def raw_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Returns a raw lock for a Spec, but doesn't keep track of it."""
|
||||
return lk.Lock(
|
||||
str(self.lock_path),
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
def has_lock(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Returns True if the spec is already managed by this spec locker"""
|
||||
return self._lock_key(spec) in self.locks
|
||||
|
||||
def _lock_key(self, spec: "spack.spec.Spec") -> Tuple[str, str]:
|
||||
return (spec.dag_hash(), spec.name)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def write_lock(self, spec: "spack.spec.Spec") -> Generator["SpecLocker", None, None]:
|
||||
lock = self.lock(spec)
|
||||
lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
lock.release_write()
|
||||
raise
|
||||
else:
|
||||
lock.release_write()
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec") -> Tuple[bool, Optional[lk.Lock]]:
|
||||
key = self._lock_key(spec)
|
||||
lock = self.locks.pop(key, None)
|
||||
return bool(lock), lock
|
||||
|
||||
def clear_all(self, clear_fn: Optional[Callable[[lk.Lock], Any]] = None) -> None:
|
||||
if clear_fn is not None:
|
||||
for lock in self.locks.values():
|
||||
clear_fn(lock)
|
||||
self.locks.clear()
|
||||
|
||||
|
||||
class FailureTracker:
|
||||
"""Tracks installation failures.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file lives alongside the install DB.
|
||||
|
||||
``n`` is the sys.maxsize-bit prefix of the associated DAG hash to make
|
||||
the likelihood of collision very low with no cleanup required.
|
||||
"""
|
||||
|
||||
def __init__(self, root_dir: Union[str, pathlib.Path], default_timeout: Optional[float]):
|
||||
#: Ensure a persistent location for dealing with parallel installation
|
||||
#: failures (e.g., across near-concurrent processes).
|
||||
self.dir = pathlib.Path(root_dir) / _DB_DIRNAME / "failures"
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.locker = SpecLocker(failures_lock_path(root_dir), default_timeout=default_timeout)
|
||||
|
||||
def clear(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""Removes any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a failure lock
|
||||
exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
locked = self.lock_taken(spec)
|
||||
if locked and not force:
|
||||
tty.msg(f"Retaining failure marking for {spec.name} due to lock")
|
||||
return
|
||||
|
||||
if locked:
|
||||
tty.warn(f"Removing failure marking despite lock for {spec.name}")
|
||||
|
||||
succeeded, lock = self.locker.clear(spec)
|
||||
if succeeded and lock is not None:
|
||||
lock.release_write()
|
||||
|
||||
if self.persistent_mark(spec):
|
||||
path = self._path(spec)
|
||||
tty.debug(f"Removing failure marking for {spec.name}")
|
||||
try:
|
||||
path.unlink()
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
f"Unable to remove failure marking for {spec.name} ({str(path)}): {str(err)}"
|
||||
)
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
self.locker.clear_all(
|
||||
clear_fn=lambda x: x.release_write() if x.is_write_locked() else True
|
||||
)
|
||||
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
try:
|
||||
for fail_mark in os.listdir(str(self.dir)):
|
||||
try:
|
||||
(self.dir / fail_mark).unlink()
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking file {fail_mark}: {str(exc)}")
|
||||
except OSError as exc:
|
||||
tty.warn(f"Unable to remove failure marking files: {str(exc)}")
|
||||
|
||||
def mark(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""Marks a spec as failing to install.
|
||||
|
||||
Args:
|
||||
spec: spec that failed to install
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._path(spec)
|
||||
path.write_text(spec.to_json())
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
if not self.locker.has_lock(spec):
|
||||
try:
|
||||
mark = self.locker.lock(spec)
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(f"PID {os.getpid()} failed to mark install failure for {spec.name}")
|
||||
tty.warn(f"Unable to mark {spec.name} as failed.")
|
||||
|
||||
return self.locker.lock(spec)
|
||||
|
||||
def has_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the spec is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if self.locker.has_lock(spec):
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.lock_taken(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.persistent_mark(spec)
|
||||
|
||||
def lock_taken(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if another process has a failure lock on the spec."""
|
||||
check = self.locker.raw_lock(spec)
|
||||
return check.is_write_locked()
|
||||
|
||||
def persistent_mark(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return self._path(spec).exists()
|
||||
|
||||
def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
assert spec.concrete, "concrete spec required for failure path"
|
||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class Database:
|
||||
#: Per-process lock objects for each install prefix
|
||||
_prefix_locks: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Per-process failure (lock) objects for each install prefix
|
||||
_prefix_failures: Dict[str, lk.Lock] = {}
|
||||
|
||||
#: Fields written for each install record
|
||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||
|
||||
@@ -392,24 +612,10 @@ def __init__(
|
||||
self._verifier_path = os.path.join(self.database_directory, "index_verifier")
|
||||
self._lock_path = os.path.join(self.database_directory, "lock")
|
||||
|
||||
# This is for other classes to use to lock prefix directories.
|
||||
self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
|
||||
|
||||
# Ensure a persistent location for dealing with parallel installation
|
||||
# failures (e.g., across near-concurrent processes).
|
||||
self._failure_dir = os.path.join(self.database_directory, "failures")
|
||||
|
||||
# Support special locks for handling parallel installation failures
|
||||
# of a spec.
|
||||
self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
|
||||
|
||||
# Create needed directories and files
|
||||
if not is_upstream and not os.path.exists(self.database_directory):
|
||||
fs.mkdirp(self.database_directory)
|
||||
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
self.last_seen_verifier = ""
|
||||
# Failed write transactions (interrupted by exceptions) will alert
|
||||
@@ -423,15 +629,7 @@ def __init__(
|
||||
|
||||
# initialize rest of state.
|
||||
self.db_lock_timeout = lock_cfg.database_timeout
|
||||
self.package_lock_timeout = lock_cfg.package_timeout
|
||||
|
||||
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
|
||||
timeout_format_str = (
|
||||
"{0}s".format(str(self.package_lock_timeout))
|
||||
if self.package_lock_timeout
|
||||
else "No timeout"
|
||||
)
|
||||
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
|
||||
|
||||
self.lock: Union[ForbiddenLock, lk.Lock]
|
||||
if self.is_upstream:
|
||||
@@ -471,212 +669,6 @@ def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
if not spec.concrete:
|
||||
raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
|
||||
|
||||
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
|
||||
|
||||
def clear_all_failures(self) -> None:
|
||||
"""Force remove install failure tracking files."""
|
||||
tty.debug("Releasing prefix failure locks")
|
||||
for pkg_id in list(self._prefix_failures.keys()):
|
||||
lock = self._prefix_failures.pop(pkg_id, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
# Remove all failure markings (aka files)
|
||||
tty.debug("Removing prefix failure tracking files")
|
||||
for fail_mark in os.listdir(self._failure_dir):
|
||||
try:
|
||||
os.remove(os.path.join(self._failure_dir, fail_mark))
|
||||
except OSError as exc:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
|
||||
)
|
||||
|
||||
def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
|
||||
"""
|
||||
Remove any persistent and cached failure tracking for the spec.
|
||||
|
||||
see `mark_failed()`.
|
||||
|
||||
Args:
|
||||
spec: the spec whose failure indicators are being removed
|
||||
force: True if the failure information should be cleared when a prefix failure
|
||||
lock exists for the file, or False if the failure should not be cleared (e.g.,
|
||||
it may be associated with a concurrent build)
|
||||
"""
|
||||
failure_locked = self.prefix_failure_locked(spec)
|
||||
if failure_locked and not force:
|
||||
tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
|
||||
return
|
||||
|
||||
if failure_locked:
|
||||
tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
|
||||
|
||||
lock = self._prefix_failures.pop(spec.prefix, None)
|
||||
if lock:
|
||||
lock.release_write()
|
||||
|
||||
if self.prefix_failure_marked(spec):
|
||||
try:
|
||||
path = self._failed_spec_path(spec)
|
||||
tty.debug("Removing failure marking for {0}".format(spec.name))
|
||||
os.remove(path)
|
||||
except OSError as err:
|
||||
tty.warn(
|
||||
"Unable to remove failure marking for {0} ({1}): {2}".format(
|
||||
spec.name, path, str(err)
|
||||
)
|
||||
)
|
||||
|
||||
def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
|
||||
"""
|
||||
Mark a spec as failing to install.
|
||||
|
||||
Prefix failure marking takes the form of a byte range lock on the nth
|
||||
byte of a file for coordinating between concurrent parallel build
|
||||
processes and a persistent file, named with the full hash and
|
||||
containing the spec, in a subdirectory of the database to enable
|
||||
persistence across overlapping but separate related build processes.
|
||||
|
||||
The failure lock file, ``spack.store.STORE.db.prefix_failures``, lives
|
||||
alongside the install DB. ``n`` is the sys.maxsize-bit prefix of the
|
||||
associated DAG hash to make the likelihood of collision very low with
|
||||
no cleanup required.
|
||||
"""
|
||||
# Dump the spec to the failure file for (manual) debugging purposes
|
||||
path = self._failed_spec_path(spec)
|
||||
with open(path, "w") as f:
|
||||
spec.to_json(f)
|
||||
|
||||
# Also ensure a failure lock is taken to prevent cleanup removal
|
||||
# of failure status information during a concurrent parallel build.
|
||||
err = "Unable to mark {0.name} as failed."
|
||||
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_failures:
|
||||
mark = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
try:
|
||||
mark.acquire_write()
|
||||
except lk.LockTimeoutError:
|
||||
# Unlikely that another process failed to install at the same
|
||||
# time but log it anyway.
|
||||
tty.debug(
|
||||
"PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
|
||||
)
|
||||
tty.warn(err.format(spec))
|
||||
|
||||
# Whether we or another process marked it as a failure, track it
|
||||
# as such locally.
|
||||
self._prefix_failures[prefix] = mark
|
||||
|
||||
return self._prefix_failures[prefix]
|
||||
|
||||
def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if the prefix (installation) is marked as failed."""
|
||||
# The failure was detected in this process.
|
||||
if spec.prefix in self._prefix_failures:
|
||||
return True
|
||||
|
||||
# The failure was detected by a concurrent process (e.g., an srun),
|
||||
# which is expected to be holding a write lock if that is the case.
|
||||
if self.prefix_failure_locked(spec):
|
||||
return True
|
||||
|
||||
# Determine if the spec may have been marked as failed by a separate
|
||||
# spack build process running concurrently.
|
||||
return self.prefix_failure_marked(spec)
|
||||
|
||||
def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Return True if a process has a failure lock on the spec."""
|
||||
check = lk.Lock(
|
||||
self.prefix_fail_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=self.package_lock_timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
|
||||
return check.is_write_locked()
|
||||
|
||||
def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
|
||||
"""Determine if the spec has a persistent failure marking."""
|
||||
return os.path.exists(self._failed_spec_path(spec))
|
||||
|
||||
def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
|
||||
"""Get a lock on a particular spec's installation directory.
|
||||
|
||||
NOTE: The installation directory **does not** need to exist.
|
||||
|
||||
Prefix lock is a byte range lock on the nth byte of a file.
|
||||
|
||||
The lock file is ``spack.store.STORE.db.prefix_lock`` -- the DB
|
||||
tells us what to call it and it lives alongside the install DB.
|
||||
|
||||
n is the sys.maxsize-bit prefix of the DAG hash. This makes
|
||||
likelihood of collision is very low AND it gives us
|
||||
readers-writer lock semantics with just a single lockfile, so no
|
||||
cleanup required.
|
||||
"""
|
||||
timeout = timeout or self.package_lock_timeout
|
||||
prefix = spec.prefix
|
||||
if prefix not in self._prefix_locks:
|
||||
self._prefix_locks[prefix] = lk.Lock(
|
||||
self.prefix_lock_path,
|
||||
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
|
||||
length=1,
|
||||
default_timeout=timeout,
|
||||
desc=spec.name,
|
||||
)
|
||||
elif timeout != self._prefix_locks[prefix].default_timeout:
|
||||
self._prefix_locks[prefix].default_timeout = timeout
|
||||
|
||||
return self._prefix_locks[prefix]
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_read_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_read()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_read()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_read()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prefix_write_lock(self, spec):
|
||||
prefix_lock = self.prefix_lock(spec)
|
||||
prefix_lock.acquire_write()
|
||||
|
||||
try:
|
||||
yield self
|
||||
except lk.LockError:
|
||||
# This addresses the case where a nested lock attempt fails inside
|
||||
# of this context manager
|
||||
raise
|
||||
except (Exception, KeyboardInterrupt):
|
||||
prefix_lock.release_write()
|
||||
raise
|
||||
else:
|
||||
prefix_lock.release_write()
|
||||
|
||||
def _write_to_file(self, stream):
|
||||
"""Write out the database in JSON format to the stream passed
|
||||
as argument.
|
||||
|
||||
@@ -33,7 +33,7 @@ class OpenMpi(Package):
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import List, Optional, Set, Union
|
||||
from typing import Any, Callable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -42,6 +42,7 @@ class OpenMpi(Package):
|
||||
import spack.patch
|
||||
import spack.spec
|
||||
import spack.url
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency, canonical_deptype, default_deptype
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
@@ -407,10 +408,7 @@ def version(
|
||||
|
||||
def _execute_version(pkg, ver, **kwargs):
|
||||
if (
|
||||
any(
|
||||
s in kwargs
|
||||
for s in ("sha256", "sha384", "sha512", "md5", "sha1", "sha224", "checksum")
|
||||
)
|
||||
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||
and hasattr(pkg, "has_code")
|
||||
and not pkg.has_code
|
||||
):
|
||||
@@ -520,7 +518,8 @@ def _execute_conflicts(pkg):
|
||||
|
||||
# Save in a list the conflicts and the associated custom messages
|
||||
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
|
||||
when_spec_list.append((when_spec, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, msg_with_name))
|
||||
|
||||
return _execute_conflicts
|
||||
|
||||
@@ -663,39 +662,35 @@ def _execute_patch(pkg_or_dep):
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name,
|
||||
default=None,
|
||||
description="",
|
||||
values=None,
|
||||
multi=None,
|
||||
validator=None,
|
||||
when=None,
|
||||
sticky=False,
|
||||
name: str,
|
||||
default: Optional[Any] = None,
|
||||
description: str = "",
|
||||
values: Optional[Union[collections.abc.Sequence, Callable[[Any], bool]]] = None,
|
||||
multi: Optional[bool] = None,
|
||||
validator: Optional[Callable[[str, str, Tuple[Any, ...]], None]] = None,
|
||||
when: Optional[Union[str, bool]] = None,
|
||||
sticky: bool = False,
|
||||
):
|
||||
"""Define a variant for the package. Packager can specify a default
|
||||
value as well as a text description.
|
||||
"""Define a variant for the package.
|
||||
|
||||
Packager can specify a default value as well as a text description.
|
||||
|
||||
Args:
|
||||
name (str): name of the variant
|
||||
default (str or bool): default value for the variant, if not
|
||||
specified otherwise the default will be False for a boolean
|
||||
variant and 'nothing' for a multi-valued variant
|
||||
description (str): description of the purpose of the variant
|
||||
values (tuple or typing.Callable): either a tuple of strings containing the
|
||||
allowed values, or a callable accepting one value and returning
|
||||
True if it is valid
|
||||
multi (bool): if False only one value per spec is allowed for
|
||||
this variant
|
||||
validator (typing.Callable): optional group validator to enforce additional
|
||||
logic. It receives the package name, the variant name and a tuple
|
||||
of values and should raise an instance of SpackError if the group
|
||||
doesn't meet the additional constraints
|
||||
when (spack.spec.Spec, bool): optional condition on which the
|
||||
variant applies
|
||||
sticky (bool): the variant should not be changed by the concretizer to
|
||||
find a valid concrete spec.
|
||||
name: Name of the variant
|
||||
default: Default value for the variant, if not specified otherwise the default will be
|
||||
False for a boolean variant and 'nothing' for a multi-valued variant
|
||||
description: Description of the purpose of the variant
|
||||
values: Either a tuple of strings containing the allowed values, or a callable accepting
|
||||
one value and returning True if it is valid
|
||||
multi: If False only one value per spec is allowed for this variant
|
||||
validator: Optional group validator to enforce additional logic. It receives the package
|
||||
name, the variant name and a tuple of values and should raise an instance of SpackError
|
||||
if the group doesn't meet the additional constraints
|
||||
when: Optional condition on which the variant applies
|
||||
sticky: The variant should not be changed by the concretizer to find a valid concrete spec
|
||||
|
||||
Raises:
|
||||
DirectiveError: if arguments passed to the directive are invalid
|
||||
DirectiveError: If arguments passed to the directive are invalid
|
||||
"""
|
||||
|
||||
def format_error(msg, pkg):
|
||||
@@ -763,7 +758,7 @@ def _execute_variant(pkg):
|
||||
when_spec = make_when_spec(when)
|
||||
when_specs = [when_spec]
|
||||
|
||||
if not re.match(spack.spec.identifier_re, name):
|
||||
if not re.match(spack.spec.IDENTIFIER_RE, name):
|
||||
directive = "variant"
|
||||
msg = "Invalid variant name in {0}: '{1}'"
|
||||
raise DirectiveError(directive, msg.format(pkg.name, name))
|
||||
@@ -900,7 +895,8 @@ def _execute_requires(pkg):
|
||||
|
||||
# Save in a list the requirements and the associated custom messages
|
||||
when_spec_list = pkg.requirements.setdefault(tuple(requirement_specs), [])
|
||||
when_spec_list.append((when_spec, policy, msg))
|
||||
msg_with_name = f"{pkg.name}: {msg}" if msg is not None else msg
|
||||
when_spec_list.append((when_spec, policy, msg_with_name))
|
||||
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -104,7 +105,7 @@ def relative_path_for_spec(self, spec):
|
||||
|
||||
projection = spack.projections.get_projection(self.projections, spec)
|
||||
path = spec.format(projection)
|
||||
return path
|
||||
return str(Path(path))
|
||||
|
||||
def write_spec(self, spec, path):
|
||||
"""Write a spec out to a file."""
|
||||
|
||||
@@ -203,7 +203,7 @@ def activate(env, use_env_repo=False):
|
||||
env.store_token = spack.store.reinitialize()
|
||||
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(env.repo)
|
||||
spack.repo.PATH.put_first(env.repo)
|
||||
|
||||
tty.debug("Using environment '%s'" % env.name)
|
||||
|
||||
@@ -227,7 +227,7 @@ def deactivate():
|
||||
|
||||
# use _repo so we only remove if a repo was actually constructed
|
||||
if _active_environment._repo:
|
||||
spack.repo.path.remove(_active_environment._repo)
|
||||
spack.repo.PATH.remove(_active_environment._repo)
|
||||
|
||||
tty.debug("Deactivated environment '%s'" % _active_environment.name)
|
||||
|
||||
@@ -1084,8 +1084,8 @@ def add(self, user_spec, list_name=user_speclist_name):
|
||||
if list_name == user_speclist_name:
|
||||
if spec.anonymous:
|
||||
raise SpackEnvironmentError("cannot add anonymous specs to an environment")
|
||||
elif not spack.repo.path.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.path.provider_index.providers.keys()
|
||||
elif not spack.repo.PATH.exists(spec.name) and not spec.abstract_hash:
|
||||
virtuals = spack.repo.PATH.provider_index.providers.keys()
|
||||
if spec.name not in virtuals:
|
||||
msg = "no such package: %s" % spec.name
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -1262,7 +1262,7 @@ def develop(self, spec: Spec, path: str, clone: bool = False) -> bool:
|
||||
# better if we can create the `source_path` directly into its final
|
||||
# destination.
|
||||
abspath = spack.util.path.canonicalize_path(path, default_wd=self.path)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
@@ -1490,7 +1490,7 @@ def _concretize_separately(self, tests=False):
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.path.provider_index
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
@@ -1994,14 +1994,10 @@ def get_one_by_hash(self, dag_hash):
|
||||
|
||||
def all_matching_specs(self, *specs: spack.spec.Spec) -> List[Spec]:
|
||||
"""Returns all concretized specs in the environment satisfying any of the input specs"""
|
||||
# Look up abstract hashes ahead of time, to avoid O(n^2) traversal.
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
|
||||
# Avoid double lookup by directly calling _satisfies.
|
||||
return [
|
||||
s
|
||||
for s in traverse.traverse_nodes(self.concrete_roots(), key=traverse.by_dag_hash)
|
||||
if any(s._satisfies(t) for t in specs)
|
||||
if any(s.satisfies(t) for t in specs)
|
||||
]
|
||||
|
||||
@spack.repo.autospec
|
||||
@@ -2062,7 +2058,7 @@ def matching_spec(self, spec):
|
||||
# If multiple root specs match, it is assumed that the abstract
|
||||
# spec will most-succinctly summarize the difference between them
|
||||
# (and the user can enter one of these to disambiguate)
|
||||
fmt_str = "{hash:7} " + spack.spec.default_format
|
||||
fmt_str = "{hash:7} " + spack.spec.DEFAULT_FORMAT
|
||||
color = clr.get_color_when()
|
||||
match_strings = [
|
||||
f"Root spec {abstract.format(color=color)}\n {concrete.format(fmt_str, color=color)}"
|
||||
@@ -2280,7 +2276,7 @@ def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec_node, pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
def manifest_uptodate_or_warn(self):
|
||||
"""Emits a warning if the manifest file is not up-to-date."""
|
||||
@@ -2370,7 +2366,7 @@ def display_specs(concretized_specs):
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.display_format,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
@@ -2448,13 +2444,13 @@ def make_repo_path(root):
|
||||
def prepare_config_scope(env):
|
||||
"""Add env's scope to the global configuration search path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.push_scope(scope)
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
|
||||
|
||||
def deactivate_config_scope(env):
|
||||
"""Remove any scopes from env from the global config path."""
|
||||
for scope in env.config_scopes():
|
||||
spack.config.config.remove_scope(scope.name)
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
|
||||
|
||||
def manifest_file(env_name_or_dir):
|
||||
@@ -2668,6 +2664,26 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.yaml_content = with_defaults_added
|
||||
self.changed = False
|
||||
|
||||
def _all_matches(self, user_spec: str) -> List[str]:
|
||||
"""Maps the input string to the first equivalent user spec in the manifest,
|
||||
and returns it.
|
||||
|
||||
Args:
|
||||
user_spec: user spec to be found
|
||||
|
||||
Raises:
|
||||
ValueError: if no equivalent match is found
|
||||
"""
|
||||
result = []
|
||||
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||
result.append(yaml_spec_str)
|
||||
|
||||
if not result:
|
||||
raise ValueError(f"cannot find a spec equivalent to {user_spec}")
|
||||
|
||||
return result
|
||||
|
||||
def add_user_spec(self, user_spec: str) -> None:
|
||||
"""Appends the user spec passed as input to the list of root specs.
|
||||
|
||||
@@ -2688,8 +2704,9 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
SpackEnvironmentError: when the user spec is not in the list
|
||||
"""
|
||||
try:
|
||||
self.pristine_configuration["specs"].remove(user_spec)
|
||||
self.configuration["specs"].remove(user_spec)
|
||||
for key in self._all_matches(user_spec):
|
||||
self.pristine_configuration["specs"].remove(key)
|
||||
self.configuration["specs"].remove(key)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
|
||||
@@ -43,7 +43,7 @@ def activate_header(env, shell, prompt=None):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "$Env:SPACK_ENV=%s\n" % env.path
|
||||
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
|
||||
else:
|
||||
if "color" in os.getenv("TERM", "") and prompt:
|
||||
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
|
||||
@@ -82,7 +82,7 @@ def deactivate_header(shell):
|
||||
# TODO: despacktivate
|
||||
# TODO: prompt
|
||||
elif shell == "pwsh":
|
||||
cmds += "Remove-Item Env:SPACK_ENV"
|
||||
cmds += "Set-Item -Path Env:SPACK_ENV\n"
|
||||
else:
|
||||
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
|
||||
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
|
||||
|
||||
@@ -590,9 +590,9 @@ def print_status(self, *specs, **kwargs):
|
||||
print()
|
||||
|
||||
header = "%s{%s} / %s{%s}" % (
|
||||
spack.spec.architecture_color,
|
||||
spack.spec.ARCHITECTURE_COLOR,
|
||||
architecture,
|
||||
spack.spec.compiler_color,
|
||||
spack.spec.COMPILER_COLOR,
|
||||
compiler,
|
||||
)
|
||||
tty.hline(colorize(header), char="-")
|
||||
|
||||
@@ -535,7 +535,7 @@ def edge_entry(self, edge):
|
||||
|
||||
def _static_edges(specs, deptype):
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
|
||||
|
||||
for parent_name, dependencies in possible.items():
|
||||
|
||||
@@ -49,7 +49,7 @@ def __call__(self, spec):
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
@@ -1147,12 +1147,12 @@ def write_test_result(self, spec, result):
|
||||
def write_reproducibility_data(self):
|
||||
for spec in self.specs:
|
||||
repo_cache_path = self.stage.repo.join(spec.name)
|
||||
spack.repo.path.dump_provenance(spec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(spec, repo_cache_path)
|
||||
for vspec in spec.package.virtuals_provided:
|
||||
repo_cache_path = self.stage.repo.join(vspec.name)
|
||||
if not os.path.exists(repo_cache_path):
|
||||
try:
|
||||
spack.repo.path.dump_provenance(vspec, repo_cache_path)
|
||||
spack.repo.PATH.dump_provenance(vspec, repo_cache_path)
|
||||
except spack.repo.UnknownPackageError:
|
||||
pass # not all virtuals have package files
|
||||
|
||||
|
||||
@@ -519,13 +519,6 @@ def _try_install_from_binary_cache(
|
||||
)
|
||||
|
||||
|
||||
def clear_failures() -> None:
|
||||
"""
|
||||
Remove all failure tracking markers for the Spack instance.
|
||||
"""
|
||||
spack.store.STORE.db.clear_all_failures()
|
||||
|
||||
|
||||
def combine_phase_logs(phase_log_files: List[str], log_path: str) -> None:
|
||||
"""
|
||||
Read set or list of logs and combine them into one file.
|
||||
@@ -597,9 +590,11 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
||||
# Get the location of the package in the dest repo.
|
||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||
if node is spec:
|
||||
spack.repo.path.dump_provenance(node, dest_pkg_dir)
|
||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||
elif source_pkg_dir:
|
||||
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||
fs.install_tree(
|
||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
|
||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
@@ -1126,15 +1121,13 @@ class PackageInstaller:
|
||||
instance.
|
||||
"""
|
||||
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []):
|
||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
||||
"""Initialize the installer.
|
||||
|
||||
Args:
|
||||
installs (list): list of tuples, where each
|
||||
tuple consists of a package (PackageBase) and its associated
|
||||
install arguments (dict)
|
||||
Return:
|
||||
PackageInstaller: instance
|
||||
"""
|
||||
# List of build requests
|
||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
||||
@@ -1287,7 +1280,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
dep_id = package_id(dep_pkg)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.db.prefix_failed(dep):
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
action = "'spack install' the dependency"
|
||||
msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg), pkg=dep_pkg)
|
||||
@@ -1325,7 +1318,6 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
||||
"""
|
||||
Check the database and leftover installation directories/files and
|
||||
prepare for a new install attempt for an uninstalled package.
|
||||
|
||||
Preparation includes cleaning up installation and stage directories
|
||||
and ensuring the database is up-to-date.
|
||||
|
||||
@@ -1502,7 +1494,7 @@ def _ensure_locked(
|
||||
if lock is None:
|
||||
tty.debug(msg.format("Acquiring", desc, pkg_id, pretty_seconds(timeout or 0)))
|
||||
op = "acquire"
|
||||
lock = spack.store.STORE.db.prefix_lock(pkg.spec, timeout)
|
||||
lock = spack.store.STORE.prefix_locker.lock(pkg.spec, timeout)
|
||||
if timeout != lock.default_timeout:
|
||||
tty.warn(
|
||||
"Expected prefix lock timeout {0}, not {1}".format(
|
||||
@@ -1627,12 +1619,12 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
# Clear any persistent failure markings _unless_ they are
|
||||
# associated with another process in this parallel build
|
||||
# of the spec.
|
||||
spack.store.STORE.db.clear_failure(dep, force=False)
|
||||
spack.store.STORE.failure_tracker.clear(dep, force=False)
|
||||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.STORE.db.clear_failure(request.spec, force=True)
|
||||
spack.store.STORE.failure_tracker.clear(request.spec, force=True)
|
||||
|
||||
# If not installing dependencies, then determine their
|
||||
# installation status before proceeding
|
||||
@@ -1888,7 +1880,7 @@ def _update_failed(
|
||||
err = "" if exc is None else ": {0}".format(str(exc))
|
||||
tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
|
||||
if mark:
|
||||
self.failed[pkg_id] = spack.store.STORE.db.mark_failed(task.pkg.spec)
|
||||
self.failed[pkg_id] = spack.store.STORE.failure_tracker.mark(task.pkg.spec)
|
||||
else:
|
||||
self.failed[pkg_id] = None
|
||||
task.status = STATUS_FAILED
|
||||
@@ -2074,7 +2066,7 @@ def install(self) -> None:
|
||||
|
||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||
# assume using a separate (failed) prefix lock file.
|
||||
if pkg_id in self.failed or spack.store.STORE.db.prefix_failed(spec):
|
||||
if pkg_id in self.failed or spack.store.STORE.failure_tracker.has_failed(spec):
|
||||
term_status.clear()
|
||||
tty.warn("{0} failed to install".format(pkg_id))
|
||||
self._update_failed(task)
|
||||
@@ -2403,7 +2395,9 @@ def _install_source(self) -> None:
|
||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||
tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
|
||||
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
fs.install_tree(
|
||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
||||
)
|
||||
|
||||
def _real_install(self) -> None:
|
||||
import spack.builder
|
||||
|
||||
@@ -51,7 +51,7 @@
|
||||
stat_names = pstats.Stats.sort_arg_dict_default
|
||||
|
||||
#: top-level aliases for Spack commands
|
||||
aliases = {"rm": "remove"}
|
||||
aliases = {"concretise": "concretize", "containerise": "containerize", "rm": "remove"}
|
||||
|
||||
#: help levels in order of detail (i.e., number of commands shown)
|
||||
levels = ["short", "long"]
|
||||
@@ -602,10 +602,10 @@ def setup_main_options(args):
|
||||
|
||||
key = syaml.syaml_str("repos")
|
||||
key.override = True
|
||||
spack.config.config.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
spack.config.CONFIG.scopes["command_line"].sections["repos"] = syaml.syaml_dict(
|
||||
[(key, [spack.paths.mock_packages_path])]
|
||||
)
|
||||
spack.repo.path = spack.repo.create(spack.config.config)
|
||||
spack.repo.PATH = spack.repo.create(spack.config.CONFIG)
|
||||
|
||||
# If the user asked for it, don't check ssl certs.
|
||||
if args.insecure:
|
||||
@@ -930,7 +930,7 @@ def _main(argv=None):
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.command_line_scopes = args.config_scopes
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
@@ -442,7 +442,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
if spec:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
versions = pkg_cls.versions.get(spec.version, {})
|
||||
ext = versions.get("extension", None)
|
||||
# If the spec does not explicitly specify an extension (the default case),
|
||||
@@ -474,7 +474,7 @@ def get_all_versions(specs):
|
||||
"""
|
||||
version_specs = []
|
||||
for spec in specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
# Skip any package that has no known versions.
|
||||
if not pkg_cls.versions:
|
||||
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
||||
|
||||
@@ -178,7 +178,7 @@ def merge_config_rules(configuration, spec):
|
||||
if spec.satisfies(constraint):
|
||||
if hasattr(constraint, "override") and constraint.override:
|
||||
spec_configuration = {}
|
||||
update_dictionary_extending_lists(spec_configuration, action)
|
||||
update_dictionary_extending_lists(spec_configuration, copy.deepcopy(action))
|
||||
|
||||
# Transform keywords for dependencies or prerequisites into a list of spec
|
||||
|
||||
@@ -833,7 +833,7 @@ def ensure_modules_are_enabled_or_warn():
|
||||
return
|
||||
|
||||
# Check if we have custom TCL module sections
|
||||
for scope in spack.config.config.file_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# Skip default configuration
|
||||
if scope.name.startswith("default"):
|
||||
continue
|
||||
|
||||
@@ -143,7 +143,7 @@ def hierarchy_tokens(self):
|
||||
|
||||
# Check if all the tokens in the hierarchy are virtual specs.
|
||||
# If not warn the user and raise an error.
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.path.is_virtual(t)]
|
||||
not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.PATH.is_virtual(t)]
|
||||
if not_virtual:
|
||||
msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n"
|
||||
msg += "Please check the 'modules.yaml' configuration files"
|
||||
|
||||
@@ -236,7 +236,7 @@ def install(self, prefix):
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
if not type(original_method) == SpecMultiMethod:
|
||||
if not isinstance(original_method, SpecMultiMethod):
|
||||
original_method = SpecMultiMethod(original_method)
|
||||
|
||||
if self.spec is not None:
|
||||
|
||||
@@ -665,7 +665,7 @@ def __init__(self, spec):
|
||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||
|
||||
if self.is_extension:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||
pkg_cls(self.extendee_spec)._check_extendable()
|
||||
|
||||
super().__init__()
|
||||
@@ -728,11 +728,11 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
# expand virtuals if enabled, otherwise just stop at virtuals
|
||||
if spack.repo.path.is_virtual(name):
|
||||
if spack.repo.PATH.is_virtual(name):
|
||||
if virtuals is not None:
|
||||
virtuals.add(name)
|
||||
if expand_virtuals:
|
||||
providers = spack.repo.path.providers_for(name)
|
||||
providers = spack.repo.PATH.providers_for(name)
|
||||
dep_names = [spec.name for spec in providers]
|
||||
else:
|
||||
visited.setdefault(cls.name, set()).add(name)
|
||||
@@ -756,7 +756,7 @@ def possible_dependencies(
|
||||
continue
|
||||
|
||||
try:
|
||||
dep_cls = spack.repo.path.get_pkg_class(dep_name)
|
||||
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# log unknown packages
|
||||
missing.setdefault(cls.name, set()).add(dep_name)
|
||||
@@ -2209,7 +2209,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
pkg = None
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.STORE.db.prefix_write_lock(spec):
|
||||
with spack.store.STORE.prefix_locker.write_lock(spec):
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -2459,8 +2459,8 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
|
||||
if not isinstance(pos, spack.spec.Spec):
|
||||
pos = spack.spec.Spec(pos)
|
||||
|
||||
if spack.repo.path.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.path.providers_for(pos.name))
|
||||
if spack.repo.PATH.is_virtual(pos.name):
|
||||
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
|
||||
continue
|
||||
else:
|
||||
packages.append(pos.package_class)
|
||||
|
||||
@@ -147,7 +147,7 @@ def preferred_variants(cls, pkg_name):
|
||||
variants = " ".join(variants)
|
||||
|
||||
# Only return variants that are actually supported by the package
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
|
||||
return dict(
|
||||
(name, variant) for name, variant in spec.variants.items() if name in pkg_cls.variants
|
||||
@@ -162,7 +162,7 @@ def spec_externals(spec):
|
||||
from spack.util.module_cmd import path_from_modules # noqa: F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
@@ -199,7 +199,7 @@ def is_spec_buildable(spec):
|
||||
so_far = all_buildable # the default "so far"
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# check whether any providers for this package override the default
|
||||
|
||||
@@ -288,9 +288,6 @@ def next_spec(
|
||||
)
|
||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, deptypes=(), virtuals=())
|
||||
|
||||
else:
|
||||
@@ -306,13 +303,12 @@ def all_specs(self) -> List[spack.spec.Spec]:
|
||||
class SpecNodeParser:
|
||||
"""Parse a single spec node from a stream of tokens"""
|
||||
|
||||
__slots__ = "ctx", "has_compiler", "has_version", "has_hash"
|
||||
__slots__ = "ctx", "has_compiler", "has_version"
|
||||
|
||||
def __init__(self, ctx):
|
||||
self.ctx = ctx
|
||||
self.has_compiler = False
|
||||
self.has_version = False
|
||||
self.has_hash = False
|
||||
|
||||
def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spack.spec.Spec]:
|
||||
"""Parse a single spec node from a stream of tokens
|
||||
@@ -343,7 +339,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
while True:
|
||||
if self.ctx.accept(TokenType.COMPILER):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -353,7 +348,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||
self.has_compiler = True
|
||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_compiler:
|
||||
raise spack.spec.DuplicateCompilerSpecError(
|
||||
f"{initial_spec} cannot have multiple compilers"
|
||||
@@ -367,7 +361,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
elif self.ctx.accept(TokenType.VERSION) or self.ctx.accept(
|
||||
TokenType.VERSION_HASH_PAIR
|
||||
):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
if self.has_version:
|
||||
raise spack.spec.MultipleVersionError(
|
||||
f"{initial_spec} cannot have multiple versions"
|
||||
@@ -378,25 +371,21 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
initial_spec.attach_git_version_lookup()
|
||||
self.has_version = True
|
||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0] == "+"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[1:].strip(), variant_value, propagate=False
|
||||
)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||
initial_spec._add_flag(
|
||||
self.ctx.current_token.value[2:].strip(), variant_value, propagate=True
|
||||
)
|
||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
initial_spec._add_flag(name, value, propagate=False)
|
||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
||||
self.hash_not_parsed_or_raise(initial_spec, self.ctx.current_token.value)
|
||||
name, value = self.ctx.current_token.value.split("==", maxsplit=1)
|
||||
name = name.strip("'\" ")
|
||||
value = value.strip("'\" ")
|
||||
@@ -411,12 +400,6 @@ def parse(self, initial_spec: Optional[spack.spec.Spec] = None) -> Optional[spac
|
||||
|
||||
return initial_spec
|
||||
|
||||
def hash_not_parsed_or_raise(self, spec, addition):
|
||||
if not self.has_hash:
|
||||
return
|
||||
|
||||
raise spack.spec.RedundantSpecError(spec, addition)
|
||||
|
||||
|
||||
class FileParser:
|
||||
"""Parse a single spec from a JSON or YAML file"""
|
||||
|
||||
@@ -238,7 +238,7 @@ def to_dict(self):
|
||||
|
||||
def from_dict(dictionary, repository=None):
|
||||
"""Create a patch from json dictionary."""
|
||||
repository = repository or spack.repo.path
|
||||
repository = repository or spack.repo.PATH
|
||||
owner = dictionary.get("owner")
|
||||
if "owner" not in dictionary:
|
||||
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user