Compare commits
456 Commits
fix/spack-
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9c5a70ab6c | ||
|
|
5ef58144cb | ||
|
|
41ddbdfd90 | ||
|
|
66924c85a3 | ||
|
|
a4c3fc138c | ||
|
|
62ed2a07a7 | ||
|
|
e7aec9e872 | ||
|
|
b065c3e11e | ||
|
|
88b357c453 | ||
|
|
bb7299c04a | ||
|
|
7a5bddfd15 | ||
|
|
50fe769f82 | ||
|
|
29d39d1adf | ||
|
|
8dde7f3975 | ||
|
|
0cd038273e | ||
|
|
1f5bfe80ed | ||
|
|
4d2611ad8a | ||
|
|
21a97dad31 | ||
|
|
338a01ca6d | ||
|
|
392396ded4 | ||
|
|
a336e0edb7 | ||
|
|
9426fefa00 | ||
|
|
812192eef5 | ||
|
|
b8c8e80965 | ||
|
|
77fd5d8414 | ||
|
|
82050ed371 | ||
|
|
a7381a9413 | ||
|
|
b932783d4d | ||
|
|
0b51f25034 | ||
|
|
d6a182fb5d | ||
|
|
e8635adb21 | ||
|
|
f242e0fd0c | ||
|
|
67b5f6b838 | ||
|
|
9d16f17463 | ||
|
|
f44f5b0db0 | ||
|
|
39ace5fc45 | ||
|
|
0601d6a0c5 | ||
|
|
11869ff872 | ||
|
|
6753605807 | ||
|
|
918db85737 | ||
|
|
1184de8352 | ||
|
|
2470fde5d9 | ||
|
|
abfff43976 | ||
|
|
230687a501 | ||
|
|
5ff8908ff3 | ||
|
|
882e09e50b | ||
|
|
6753f4a7cb | ||
|
|
1dc63dbea6 | ||
|
|
b9dfae4722 | ||
|
|
70412612c7 | ||
|
|
cd741c368c | ||
|
|
16a7bef456 | ||
|
|
85f62728c6 | ||
|
|
092dc96e6c | ||
|
|
2bb20caa5f | ||
|
|
00bcf935e8 | ||
|
|
3751372396 | ||
|
|
e6afeca92f | ||
|
|
35b9307af6 | ||
|
|
567f728579 | ||
|
|
404c5c29a1 | ||
|
|
63712ba6c6 | ||
|
|
ef62d47dc7 | ||
|
|
a4594857fc | ||
|
|
e77572b753 | ||
|
|
8c84c5ff66 | ||
|
|
5d8beaf0ed | ||
|
|
ac405f3d79 | ||
|
|
2e30553310 | ||
|
|
85a61772d8 | ||
|
|
4007f8726d | ||
|
|
a097f7791b | ||
|
|
3d4d89b2c0 | ||
|
|
e461234865 | ||
|
|
2c1d5f9844 | ||
|
|
c4b682b983 | ||
|
|
de0b784d5a | ||
|
|
5f38afdfc7 | ||
|
|
ac67c6e34b | ||
|
|
72deb53832 | ||
|
|
7c87253fd8 | ||
|
|
1136aedd08 | ||
|
|
24e1b56268 | ||
|
|
eef6a79b35 | ||
|
|
556a36cbd7 | ||
|
|
8aa490d6b7 | ||
|
|
d9d085da10 | ||
|
|
d88d720577 | ||
|
|
1d670ae744 | ||
|
|
35ad6f52c1 | ||
|
|
b61bae7640 | ||
|
|
8b7abace8b | ||
|
|
5cf98d9564 | ||
|
|
973a961cb5 | ||
|
|
868d0cb957 | ||
|
|
497f3a3832 | ||
|
|
9843f41bce | ||
|
|
e54fefc2b7 | ||
|
|
90c0889533 | ||
|
|
6696e82ce7 | ||
|
|
dcc55d53db | ||
|
|
92000e81b8 | ||
|
|
125175ae25 | ||
|
|
f60e548a0d | ||
|
|
04dc16a6b1 | ||
|
|
27b90e38db | ||
|
|
7e5ce3ba48 | ||
|
|
f5f7cfdc8f | ||
|
|
3e1a562312 | ||
|
|
ce4d962faa | ||
|
|
b9816a97fc | ||
|
|
f7b9c30456 | ||
|
|
884620a38a | ||
|
|
7503a41773 | ||
|
|
9a5fc6b4a3 | ||
|
|
a31aeed167 | ||
|
|
71f542a951 | ||
|
|
322bd48788 | ||
|
|
b752fa59d4 | ||
|
|
d53e4cc426 | ||
|
|
ee4b7fa3a1 | ||
|
|
d6f02c86d9 | ||
|
|
62efde8e3c | ||
|
|
bda1d94d49 | ||
|
|
3f472039c5 | ||
|
|
912ef34206 | ||
|
|
9c88a48a73 | ||
|
|
4bf5cc9a9a | ||
|
|
08834e2b03 | ||
|
|
8020a111df | ||
|
|
86fb547f7c | ||
|
|
b9556c7c44 | ||
|
|
7bdb106b1b | ||
|
|
2b191cd7f4 | ||
|
|
774f0a4e60 | ||
|
|
faf11efa72 | ||
|
|
5a99142b41 | ||
|
|
a3aca0242a | ||
|
|
72f276fab3 | ||
|
|
21139945df | ||
|
|
900bd2f477 | ||
|
|
29d4a5af44 | ||
|
|
dd9b7ed6a7 | ||
|
|
09ff74be62 | ||
|
|
a94ebfea11 | ||
|
|
8f5fe1d123 | ||
|
|
d4fb58efa3 | ||
|
|
ce900346cc | ||
|
|
7cb64e465f | ||
|
|
eb70c9f5b9 | ||
|
|
a28405700e | ||
|
|
f8f4d94d7a | ||
|
|
32dfb522d6 | ||
|
|
c61c707aa5 | ||
|
|
60d10848c8 | ||
|
|
dcd6b530f9 | ||
|
|
419f0742a0 | ||
|
|
c99174798b | ||
|
|
8df2a4b511 | ||
|
|
c174cf6830 | ||
|
|
5eebd65366 | ||
|
|
625f5323c0 | ||
|
|
e05a32cead | ||
|
|
c69af5d1e5 | ||
|
|
1ac2ee8043 | ||
|
|
36af1c1c73 | ||
|
|
e2fa087002 | ||
|
|
df02bfbad2 | ||
|
|
fecb63843e | ||
|
|
b33e2d09d3 | ||
|
|
f8054aa21a | ||
|
|
8f3a2acc54 | ||
|
|
d1a20908b8 | ||
|
|
dd781f7368 | ||
|
|
9bcc43c4c1 | ||
|
|
77c83af17d | ||
|
|
574bd2db99 | ||
|
|
a76f37da96 | ||
|
|
9e75f3ec0a | ||
|
|
4d42d45897 | ||
|
|
a4b4bfda73 | ||
|
|
1bcdd3a57e | ||
|
|
297a3a1bc9 | ||
|
|
8d01e8c978 | ||
|
|
6be28aa303 | ||
|
|
5e38310515 | ||
|
|
ddfed65485 | ||
|
|
2a16d8bfa8 | ||
|
|
6a40a50a29 | ||
|
|
b2924f68c0 | ||
|
|
41ffe36636 | ||
|
|
24edc72252 | ||
|
|
83b38a26a0 | ||
|
|
914d785e3b | ||
|
|
f99f642fa8 | ||
|
|
e0bf3667e3 | ||
|
|
a24ca50fed | ||
|
|
51e9f37252 | ||
|
|
453900c884 | ||
|
|
4696459d2d | ||
|
|
ad1e3231e5 | ||
|
|
2ef7eb1826 | ||
|
|
fe86019f9a | ||
|
|
9dbb18219f | ||
|
|
451a977de0 | ||
|
|
e604929a4c | ||
|
|
9d591f9f7c | ||
|
|
f8ad915100 | ||
|
|
cbbabe6920 | ||
|
|
81fe460194 | ||
|
|
b894f996c0 | ||
|
|
1ce09847d9 | ||
|
|
722d401394 | ||
|
|
e6f04d5ef9 | ||
|
|
b8e3ecbf00 | ||
|
|
d189387c24 | ||
|
|
9e96ddc5ae | ||
|
|
543bd189af | ||
|
|
43291aa723 | ||
|
|
d0589285f7 | ||
|
|
d079aaa083 | ||
|
|
6c65977e0d | ||
|
|
1b5d786cf5 | ||
|
|
4cf00645bd | ||
|
|
e9149cfc3c | ||
|
|
a5c8111076 | ||
|
|
c3576f712d | ||
|
|
410e6a59b7 | ||
|
|
bd2b2fb75a | ||
|
|
7ae318efd0 | ||
|
|
73e9d56647 | ||
|
|
f87a752b63 | ||
|
|
ae2fec30c3 | ||
|
|
1af5564cbe | ||
|
|
a8f057a701 | ||
|
|
7f3dd38ccc | ||
|
|
8e9adefcd5 | ||
|
|
d276f9700f | ||
|
|
4f111659ec | ||
|
|
eaf330f2a8 | ||
|
|
cdaeb74dc7 | ||
|
|
fbaac46604 | ||
|
|
7f6210ee90 | ||
|
|
63f6e6079a | ||
|
|
d4fd6caae0 | ||
|
|
fd3c18b6fd | ||
|
|
725f427f25 | ||
|
|
32b3e91ef7 | ||
|
|
b7e4602268 | ||
|
|
4a98d4db93 | ||
|
|
9d6bf373be | ||
|
|
cff35c4987 | ||
|
|
d594f84b8f | ||
|
|
f8f01c336c | ||
|
|
12e3665df3 | ||
|
|
fa4778b9fc | ||
|
|
66d297d420 | ||
|
|
56251c11f3 | ||
|
|
40bf9a179b | ||
|
|
095aba0b9f | ||
|
|
4270136598 | ||
|
|
f73d7d2dce | ||
|
|
567566da08 | ||
|
|
30a9ab749d | ||
|
|
8160a96b27 | ||
|
|
10414d3e6c | ||
|
|
1d96c09094 | ||
|
|
e7112fbc6a | ||
|
|
b79761b7eb | ||
|
|
3381899c69 | ||
|
|
c7cf5eabc1 | ||
|
|
d88fa5cf8e | ||
|
|
2ed0e3d737 | ||
|
|
506a40cac1 | ||
|
|
447739fcef | ||
|
|
e60f6f4a6e | ||
|
|
7df35d0da0 | ||
|
|
71b035ece1 | ||
|
|
86a134235e | ||
|
|
24cd0da7fb | ||
|
|
762833a663 | ||
|
|
636d479e5f | ||
|
|
f2184f26fa | ||
|
|
e1686eef7c | ||
|
|
314893982e | ||
|
|
9ab6c30a3d | ||
|
|
ddf94291d4 | ||
|
|
5d1038c512 | ||
|
|
2e40c88d50 | ||
|
|
2bcba57757 | ||
|
|
37330e5e2b | ||
|
|
b4411cf2db | ||
|
|
65d1ae083c | ||
|
|
0b8faa3918 | ||
|
|
f077c7e33b | ||
|
|
9d7410d22e | ||
|
|
e295730d0e | ||
|
|
868327ee14 | ||
|
|
f5430b16bc | ||
|
|
2446695113 | ||
|
|
e0c6cca65c | ||
|
|
84ed4cd331 | ||
|
|
f6d50f790e | ||
|
|
d3c3d23d1e | ||
|
|
33752c2b55 | ||
|
|
26759249ca | ||
|
|
8b4cbbe7b3 | ||
|
|
be71f9fdc4 | ||
|
|
05c1e7ecc2 | ||
|
|
f7afd67a26 | ||
|
|
d22bdc1c4e | ||
|
|
540f9eefb7 | ||
|
|
2db5bca778 | ||
|
|
bcd05407b8 | ||
|
|
b35ec605fe | ||
|
|
0a353abc42 | ||
|
|
e178c58847 | ||
|
|
d7297e67a5 | ||
|
|
ee8addf04a | ||
|
|
fd3cd3a1c6 | ||
|
|
e585aeb883 | ||
|
|
1f43384db4 | ||
|
|
814b328fca | ||
|
|
125206d44d | ||
|
|
a081b875b4 | ||
|
|
a16ee3348b | ||
|
|
d654d6b1f4 | ||
|
|
9b4ca0be40 | ||
|
|
dc71dcfdc2 | ||
|
|
1f31c3374c | ||
|
|
27aeb6e293 | ||
|
|
715214c1a1 | ||
|
|
b471d62dbd | ||
|
|
a5f62889ca | ||
|
|
2a942d98e3 | ||
|
|
4a4077d4ef | ||
|
|
c0fcccc232 | ||
|
|
0b2cbfefce | ||
|
|
c499514322 | ||
|
|
ae392b5935 | ||
|
|
62e9bb5d51 | ||
|
|
6cd948184e | ||
|
|
44ff24f558 | ||
|
|
c657dfb768 | ||
|
|
f2e410d95a | ||
|
|
df443a38d6 | ||
|
|
74fe498cb8 | ||
|
|
5f13a48bf2 | ||
|
|
c4824f7fd2 | ||
|
|
49a8634584 | ||
|
|
eac5ea869f | ||
|
|
f5946c4621 | ||
|
|
8564ab19c3 | ||
|
|
aae7a22d39 | ||
|
|
09cea230b4 | ||
|
|
a1f34ec58b | ||
|
|
4d7cd4c0bf | ||
|
|
4adbfa3835 | ||
|
|
8a1b69c1d3 | ||
|
|
a1d69f8661 | ||
|
|
e05dbc529e | ||
|
|
99d33bf1f2 | ||
|
|
bd1918cd71 | ||
|
|
2a967c7df4 | ||
|
|
7596aac958 | ||
|
|
c73ded8ed6 | ||
|
|
df1d783035 | ||
|
|
47051c3690 | ||
|
|
3fd83c637d | ||
|
|
ef5afb66da | ||
|
|
ecc4336bf9 | ||
|
|
d2ed217796 | ||
|
|
272c7c069a | ||
|
|
23f16041cd | ||
|
|
e2329adac0 | ||
|
|
4ec788ca12 | ||
|
|
c1cea9d304 | ||
|
|
5c96e67bb1 | ||
|
|
7008bb6335 | ||
|
|
14561fafff | ||
|
|
89bf1edb6e | ||
|
|
cc85dc05b7 | ||
|
|
ae171f8b83 | ||
|
|
578dd18b34 | ||
|
|
a7a51ee5cf | ||
|
|
960cc90667 | ||
|
|
dea44bad8b | ||
|
|
e37870ff43 | ||
|
|
3751642a27 | ||
|
|
0f386697c6 | ||
|
|
67ce103b2c | ||
|
|
a8c9fa0e45 | ||
|
|
b56a133fce | ||
|
|
f0b3d33145 | ||
|
|
32564da9d0 | ||
|
|
8f2faf65dc | ||
|
|
1d59637051 | ||
|
|
97dc353cb0 | ||
|
|
beebe2c9d3 | ||
|
|
2eb7add8c4 | ||
|
|
a9fea9f611 | ||
|
|
9b62a9c238 | ||
|
|
f7eb0ccfc9 | ||
|
|
a0aa35667c | ||
|
|
b1d4fd14bc | ||
|
|
7e8415a3a6 | ||
|
|
7f4f42894d | ||
|
|
4e876b4014 | ||
|
|
77a8a4fe08 | ||
|
|
597e5a4e5e | ||
|
|
3c31c32f62 | ||
|
|
3a93a716e4 | ||
|
|
82229a0784 | ||
|
|
5d846a69d1 | ||
|
|
d21aa1cc12 | ||
|
|
7896ff51f6 | ||
|
|
5849a24a74 | ||
|
|
38c49d6b82 | ||
|
|
0d8900986d | ||
|
|
62554cebc4 | ||
|
|
067155cff5 | ||
|
|
08e68d779f | ||
|
|
05b04cd4c3 | ||
|
|
be48f762a9 | ||
|
|
de5b4840e9 | ||
|
|
20f9884445 | ||
|
|
deb78bcd93 | ||
|
|
06239de0e9 | ||
|
|
1f904c38b3 | ||
|
|
f2d0ba8fcc | ||
|
|
49d3eb1723 | ||
|
|
7c5439f48a | ||
|
|
7f2cedd31f | ||
|
|
d47951a1e3 | ||
|
|
f2bd0c5cf1 | ||
|
|
4362382223 | ||
|
|
ba4859b33d | ||
|
|
e8472714ef | ||
|
|
ee6960e53e | ||
|
|
dad266c955 | ||
|
|
7a234ce00a | ||
|
|
a0c2ed97c8 | ||
|
|
a3aa5b59cd | ||
|
|
f7dbb59d13 | ||
|
|
0df27bc0f7 | ||
|
|
877e09dcc1 | ||
|
|
c4439e86a2 | ||
|
|
aa00dcac96 | ||
|
|
4c9a946b3b | ||
|
|
0c6e6ad226 | ||
|
|
bf8f32443f | ||
|
|
c2eef8bab2 | ||
|
|
2df4b307d7 | ||
|
|
3c57440c10 | ||
|
|
3e6e9829da | ||
|
|
859745f1a9 |
38
.github/workflows/audit.yaml
vendored
38
.github/workflows/audit.yaml
vendored
@@ -17,33 +17,51 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run audits on all the packages in the built-in repository
|
# Run audits on all the packages in the built-in repository
|
||||||
package-audits:
|
package-audits:
|
||||||
runs-on: ${{ matrix.operating_system }}
|
runs-on: ${{ matrix.system.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
system:
|
||||||
|
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||||
|
- { os: ubuntu-latest, shell: bash }
|
||||||
|
- { os: macos-latest, shell: bash }
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||||
|
- name: Setup for Windows run
|
||||||
|
if: runner.os == 'Windows'
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pywin32
|
||||||
- name: Package audits (with coverage)
|
- name: Package audits (with coverage)
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
coverage run $(which spack) audit packages
|
coverage run $(which spack) audit packages
|
||||||
coverage run $(which spack) -d audit externals
|
coverage run $(which spack) -d audit externals
|
||||||
coverage combine
|
coverage combine
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Package audits (without coverage)
|
- name: Package audits (without coverage)
|
||||||
if: ${{ inputs.with_coverage == 'false' }}
|
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) audit packages
|
spack -d audit packages
|
||||||
$(which spack) audit externals
|
spack -d audit externals
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- name: Package audits (without coverage)
|
||||||
|
if: ${{ runner.os == 'Windows' }}
|
||||||
|
run: |
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
spack -d audit packages
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
spack -d audit externals
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
|||||||
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -e
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
|
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack -d solve zlib
|
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
346
.github/workflows/bootstrap.yml
vendored
346
.github/workflows/bootstrap.yml
vendored
@@ -13,118 +13,22 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
fedora-clingo-sources:
|
distros-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: "fedora:latest"
|
container: ${{ matrix.image }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup Fedora
|
||||||
|
if: ${{ matrix.image == 'fedora:latest' }}
|
||||||
run: |
|
run: |
|
||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Checkout
|
- name: Setup OpenSUSE
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
cmake bison
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries-and-patchelf:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
opensuse-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "opensuse/leap:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||||
zypper update -y || zypper update -y
|
zypper update -y || zypper update -y
|
||||||
@@ -133,15 +37,9 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -151,77 +49,100 @@ jobs:
|
|||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-sources:
|
clingo-sources:
|
||||||
runs-on: macos-latest
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison@2.7 tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-clingo-binaries:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.macos-version }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
macos-version: ['macos-11', 'macos-12']
|
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||||
steps:
|
steps:
|
||||||
- name: Install dependencies
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
brew install tree gawk
|
||||||
|
sudo rm -rf $(command -v gpg gpg2)
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
set -ex
|
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
|
||||||
not_found=1
|
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
|
||||||
if $ver_dir/python --version ; then
|
|
||||||
export PYTHON="$ver_dir/python"
|
|
||||||
not_found=0
|
|
||||||
old_path="$PATH"
|
|
||||||
export PATH="$ver_dir:$PATH"
|
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
|
||||||
export PATH="$old_path"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# NOTE: test all pythons that exist, not all do on 12
|
|
||||||
done
|
|
||||||
|
|
||||||
ubuntu-clingo-binaries:
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Setup repo
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
git --version
|
source share/spack/setup-env.sh
|
||||||
. .github/workflows/setup_git.sh
|
spack solve zlib
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
from-binaries:
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
|
steps:
|
||||||
|
- name: Setup macOS
|
||||||
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
brew install tree
|
||||||
|
# Remove GnuPG since we want to bootstrap it
|
||||||
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Setup Ubuntu
|
||||||
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
|
run: |
|
||||||
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: |
|
||||||
|
3.8
|
||||||
|
3.9
|
||||||
|
3.10
|
||||||
|
3.11
|
||||||
|
3.12
|
||||||
|
- name: Set bootstrap sources
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable spack-install
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -e
|
||||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
|
echo "Testing $ver_dir"
|
||||||
if $ver_dir/python --version ; then
|
if $ver_dir/python --version ; then
|
||||||
export PYTHON="$ver_dir/python"
|
export PYTHON="$ver_dir/python"
|
||||||
not_found=0
|
not_found=0
|
||||||
@@ -236,122 +157,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
ubuntu-gnupg-binaries:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
ubuntu-gnupg-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: "ubuntu:latest"
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
env:
|
|
||||||
DEBIAN_FRONTEND: noninteractive
|
|
||||||
run: |
|
|
||||||
apt-get update -y && apt-get upgrade -y
|
|
||||||
apt-get install -y \
|
|
||||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
|
||||||
make patch unzip xz-utils python3 python3-dev tree \
|
|
||||||
gawk
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Setup non-root user
|
|
||||||
run: |
|
|
||||||
# See [1] below
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
useradd spack-test && mkdir -p ~spack-test
|
|
||||||
chown -R spack-test . ~spack-test
|
|
||||||
- name: Setup repo
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/setup_git.sh
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
macos-gnupg-binaries:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
macos-gnupg-sources:
|
|
||||||
runs-on: macos-latest
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
brew install gawk tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
|
|
||||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
|
||||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
|
||||||
# See:
|
|
||||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
|
||||||
# - https://github.com/actions/checkout/issues/760
|
|
||||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
|
||||||
|
|||||||
20
.github/workflows/build-containers.yml
vendored
20
.github/workflows/build-containers.yml
vendored
@@ -45,19 +45,18 @@ jobs:
|
|||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
|
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
|
||||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
|
|
||||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -89,9 +88,9 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@@ -122,3 +121,14 @@ jobs:
|
|||||||
push: ${{ github.event_name != 'pull_request' }}
|
push: ${{ github.event_name != 'pull_request' }}
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
|
merge-dockerfiles:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: deploy-images
|
||||||
|
steps:
|
||||||
|
- name: Merge Artifacts
|
||||||
|
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
|
with:
|
||||||
|
name: dockerfiles
|
||||||
|
pattern: dockerfiles_*
|
||||||
|
delete-merged: true
|
||||||
|
|||||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|||||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
4
.github/workflows/style/requirements.txt
vendored
4
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +1,7 @@
|
|||||||
black==24.4.0
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.9
|
types-six==1.16.21.20240513
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -100,7 +100,7 @@ jobs:
|
|||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -124,7 +124,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -141,7 +141,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -185,7 +185,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -195,10 +195,10 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -223,7 +223,7 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
@@ -35,7 +35,7 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -70,7 +70,7 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
|
|||||||
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -33,7 +33,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -41,7 +41,7 @@ jobs:
|
|||||||
unit-tests-cmd:
|
unit-tests-cmd:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
@@ -59,7 +59,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,windows
|
flags: unittests,windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
build-abseil:
|
build-abseil:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
|||||||
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,3 +1,48 @@
|
|||||||
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||||
|
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||||
|
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||||
|
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||||
|
- OCI buildcaches:
|
||||||
|
- only push in parallel when forking (#42143)
|
||||||
|
- use pickleable errors (#42160)
|
||||||
|
- Fix using sticky variants in externals (#42253)
|
||||||
|
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||||
|
- py-transformers: add v4.35.2 (#41266)
|
||||||
|
- mgard: fix OpenMP on AppleClang (#42933)
|
||||||
|
|
||||||
|
# v0.21.1 (2024-01-11)
|
||||||
|
|
||||||
|
## New features
|
||||||
|
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- spack graph: fix coloring with environments (#41240)
|
||||||
|
- spack info: sort variants in --variants-by-name (#41389)
|
||||||
|
- Spec.format: error on old style format strings (#41934)
|
||||||
|
- ASP-based solver:
|
||||||
|
- fix infinite recursion when computing concretization errors (#41061)
|
||||||
|
- don't error for type mismatch on preferences (#41138)
|
||||||
|
- don't emit spurious debug output (#41218)
|
||||||
|
- Improve the error message for deprecated preferences (#41075)
|
||||||
|
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||||
|
- Fix multi-word aliases (#41126)
|
||||||
|
- Add a warning for unconfigured compiler (#41213)
|
||||||
|
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||||
|
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||||
|
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- root: add a webgui patch to address security issue (#41404)
|
||||||
|
- BerkeleyGW: update source urls (#38218)
|
||||||
|
|
||||||
# v0.21.0 (2023-11-11)
|
# v0.21.0 (2023-11-11)
|
||||||
|
|
||||||
`v0.21.0` is a major feature release.
|
`v0.21.0` is a major feature release.
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
|
|
||||||
Spack is a multi-platform package manager that builds and installs
|
Spack is a multi-platform package manager that builds and installs
|
||||||
multiple versions and configurations of software. It works on Linux,
|
multiple versions and configurations of software. It works on Linux,
|
||||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||||
new version of a package does not break existing installations, so many
|
new version of a package does not break existing installations, so many
|
||||||
configurations of the same package can coexist.
|
configurations of the same package can coexist.
|
||||||
|
|
||||||
|
|||||||
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
|||||||
"unload" {Invoke-SpackLoad}
|
"unload" {Invoke-SpackLoad}
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exit $LASTEXITCODE
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This is the default configuration for Spack's module file generation.
|
|
||||||
#
|
|
||||||
# Settings here are versioned with Spack and are intended to provide
|
|
||||||
# sensible defaults out of the box. Spack maintainers should edit this
|
|
||||||
# file to keep it current.
|
|
||||||
#
|
|
||||||
# Users can override these settings by editing the following files.
|
|
||||||
#
|
|
||||||
# Per-spack-instance settings (overrides defaults):
|
|
||||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
|
||||||
#
|
|
||||||
# Per-user settings (overrides default and site settings):
|
|
||||||
# ~/.spack/modules.yaml
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
modules: {}
|
|
||||||
@@ -18,6 +18,7 @@ packages:
|
|||||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||||
providers:
|
providers:
|
||||||
awk: [gawk]
|
awk: [gawk]
|
||||||
|
armci: [armcimpi]
|
||||||
blas: [openblas, amdblis]
|
blas: [openblas, amdblis]
|
||||||
D: [ldc]
|
D: [ldc]
|
||||||
daal: [intel-oneapi-daal]
|
daal: [intel-oneapi-daal]
|
||||||
@@ -37,10 +38,9 @@ packages:
|
|||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [ gcc-runtime ]
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx, mesa18+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [ intel-oneapi-runtime ]
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
mariadb-client: [mariadb-c-client, mariadb]
|
mariadb-client: [mariadb-c-client, mariadb]
|
||||||
|
|||||||
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{% extends "!layout.html" %}
|
||||||
|
|
||||||
|
{%- block extrahead %}
|
||||||
|
<!-- Google tag (gtag.js) -->
|
||||||
|
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||||
|
<script>
|
||||||
|
window.dataLayer = window.dataLayer || [];
|
||||||
|
function gtag(){dataLayer.push(arguments);}
|
||||||
|
gtag('js', new Date());
|
||||||
|
gtag('config', 'G-S0PQ7WV75K');
|
||||||
|
</script>
|
||||||
|
{% endblock %}
|
||||||
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
|||||||
installed them. As you've seen, spack packages are installed into long
|
installed them. As you've seen, spack packages are installed into long
|
||||||
paths with hashes, and you need a way to get them into your path. The
|
paths with hashes, and you need a way to get them into your path. The
|
||||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||||
described in the next section.
|
described in this section.
|
||||||
|
|
||||||
Some more advanced ways to use Spack packages include:
|
Some more advanced ways to use Spack packages include:
|
||||||
|
|
||||||
@@ -959,7 +959,86 @@ use ``spack find --loaded``.
|
|||||||
You can also use ``spack load --list`` to get the same output, but it
|
You can also use ``spack load --list`` to get the same output, but it
|
||||||
does not have the full set of query options that ``spack find`` offers.
|
does not have the full set of query options that ``spack find`` offers.
|
||||||
|
|
||||||
We'll learn more about Spack's spec syntax in the next section.
|
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||||
|
|
||||||
|
|
||||||
|
.. _extensions:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Python packages and virtual environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack can install a large number of Python packages. Their names are
|
||||||
|
typically prefixed with ``py-``. Installing and using them is no
|
||||||
|
different from any other package:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install py-numpy
|
||||||
|
$ spack load py-numpy
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||||
|
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||||
|
located in the ``PYTHONPATH``.
|
||||||
|
|
||||||
|
Spack is different from other Python package managers in that it installs
|
||||||
|
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||||
|
installs all packages into the same prefix, be it in a virtual environment
|
||||||
|
or not.
|
||||||
|
|
||||||
|
For many users, **virtual environments** are more convenient than repeated
|
||||||
|
``spack load`` commands, particularly when working with multiple Python
|
||||||
|
packages. Fortunately Spack supports environments itself, which together
|
||||||
|
with a view are no different from Python virtual environments.
|
||||||
|
|
||||||
|
The recommended way of working with Python extensions such as ``py-numpy``
|
||||||
|
is through :ref:`Environments <environments>`. The following example creates
|
||||||
|
a Spack environment with ``numpy`` in the current working directory. It also
|
||||||
|
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||||
|
prefix for all packages in the environment.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create --with-view view --dir .
|
||||||
|
$ spack -e . add py-numpy
|
||||||
|
$ spack -e . concretize
|
||||||
|
$ spack -e . install
|
||||||
|
|
||||||
|
Now you can activate the environment and start using the packages:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate .
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The environment view is also a virtual environment, which is useful if you are
|
||||||
|
sharing the environment with others who are unfamiliar with Spack. They can
|
||||||
|
either use the Python executable directly:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ ./view/bin/python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
or use the activation script:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ source ./view/bin/activate
|
||||||
|
$ python3
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
In general, there should not be much difference between ``spack env activate``
|
||||||
|
and using the virtual environment. The main advantage of ``spack env activate``
|
||||||
|
is that it knows about more packages than just Python packages, and it may set
|
||||||
|
additional runtime variables that are not covered by the virtual environment
|
||||||
|
activation script.
|
||||||
|
|
||||||
|
See :ref:`environments` for a more in-depth description of Spack
|
||||||
|
environments and customizations to views.
|
||||||
|
|
||||||
|
|
||||||
.. _sec-specs:
|
.. _sec-specs:
|
||||||
@@ -1354,22 +1433,12 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
|||||||
$ spack install libelf os=ubuntu18.04
|
$ spack install libelf os=ubuntu18.04
|
||||||
$ spack install libelf target=broadwell
|
$ spack install libelf target=broadwell
|
||||||
|
|
||||||
or together by using the reserved keyword ``arch``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install libelf arch=cray-CNL10-haswell
|
|
||||||
|
|
||||||
Normally users don't have to bother specifying the architecture if they
|
Normally users don't have to bother specifying the architecture if they
|
||||||
are installing software for their current host, as in that case the
|
are installing software for their current host, as in that case the
|
||||||
values will be detected automatically. If you need fine-grained control
|
values will be detected automatically. If you need fine-grained control
|
||||||
over which packages use which targets (or over *all* packages' default
|
over which packages use which targets (or over *all* packages' default
|
||||||
target), see :ref:`package-preferences`.
|
target), see :ref:`package-preferences`.
|
||||||
|
|
||||||
.. admonition:: Cray machines
|
|
||||||
|
|
||||||
The situation is a little bit different for Cray machines and a detailed
|
|
||||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
|
||||||
|
|
||||||
.. _support-for-microarchitectures:
|
.. _support-for-microarchitectures:
|
||||||
|
|
||||||
@@ -1705,165 +1774,6 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
|
||||||
.. _extensions:
|
|
||||||
|
|
||||||
---------------------------
|
|
||||||
Extensions & Python support
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
Spack's installation model assumes that each package will live in its
|
|
||||||
own install prefix. However, certain packages are typically installed
|
|
||||||
*within* the directory hierarchy of other packages. For example,
|
|
||||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
|
||||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
|
||||||
|
|
||||||
In Spack, installation prefixes are immutable, so this type of installation
|
|
||||||
is not directly supported. However, it is possible to create views that
|
|
||||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
|
||||||
Views are a convenient way to get a more traditional filesystem structure.
|
|
||||||
Using *extensions*, you can ensure that Python packages always share the
|
|
||||||
same prefix in the view as Python itself. Suppose you have
|
|
||||||
Python installed like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find python
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
python@2.7.8
|
|
||||||
|
|
||||||
.. _cmd-spack-extensions:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
``spack extensions``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
You can find extensions for your Python installation like this:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack extensions python
|
|
||||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
|
||||||
==> 36 extensions:
|
|
||||||
geos py-ipython py-pexpect py-pyside py-sip
|
|
||||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
|
||||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
|
||||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
|
||||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
|
||||||
py-epydoc py-mx py-pylint py-scipy
|
|
||||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
|
||||||
py-h5py py-numpy py-pyqt py-shiboken
|
|
||||||
|
|
||||||
==> 12 installed:
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
|
||||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
|
||||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
|
||||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
|
||||||
|
|
||||||
The extensions are a subset of what's returned by ``spack list``, and
|
|
||||||
they are packages like any other. They are installed into their own
|
|
||||||
prefixes, and you can see this with ``spack find --paths``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find --paths py-numpy
|
|
||||||
==> 1 installed packages.
|
|
||||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
|
||||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
|
||||||
|
|
||||||
However, even though this package is installed, you cannot use it
|
|
||||||
directly when you run ``python``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load python
|
|
||||||
$ python
|
|
||||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
|
||||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
|
||||||
Type "help", "copyright", "credits" or "license" for more information.
|
|
||||||
>>> import numpy
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "<stdin>", line 1, in <module>
|
|
||||||
ImportError: No module named numpy
|
|
||||||
>>>
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Extensions in Environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The recommended way of working with extensions such as ``py-numpy``
|
|
||||||
above is through :ref:`Environments <environments>`. For example,
|
|
||||||
the following creates an environment in the current working directory
|
|
||||||
with a filesystem view in the ``./view`` directory:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create --with-view view --dir .
|
|
||||||
$ spack -e . add py-numpy
|
|
||||||
$ spack -e . concretize
|
|
||||||
$ spack -e . install
|
|
||||||
|
|
||||||
We recommend environments for two reasons. Firstly, environments
|
|
||||||
can be activated (requires :ref:`shell-support`):
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env activate .
|
|
||||||
|
|
||||||
which sets all the right environment variables such as ``PATH`` and
|
|
||||||
``PYTHONPATH``. This ensures that
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
works. Secondly, even without shell support, the view ensures
|
|
||||||
that Python can locate its extensions:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ ./view/bin/python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
See :ref:`environments` for a more in-depth description of Spack
|
|
||||||
environments and customizations to views.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using ``spack load``
|
|
||||||
^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
A more traditional way of using Spack and extensions is ``spack load``
|
|
||||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
|
||||||
in your current shell, and Python itself will be available in the ``PATH``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack load py-numpy
|
|
||||||
$ python
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
The loaded packages can be checked using ``spack find --loaded``
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Loading Extensions via Modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
|
||||||
through your environment modules (using ``environment-modules`` or
|
|
||||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
|
||||||
your current shell.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ module load <name of numpy module>
|
|
||||||
|
|
||||||
If you do not know the name of the specific numpy module you wish to
|
|
||||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
|
||||||
the name of the module from the Spack spec.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -21,23 +21,86 @@ is the following:
|
|||||||
Reuse already installed packages
|
Reuse already installed packages
|
||||||
--------------------------------
|
--------------------------------
|
||||||
|
|
||||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||||
whether it will do a "fresh" installation and prefer the latest settings from
|
attribute can either be a single value, or an object for more complex configurations.
|
||||||
``package.py`` files and ``packages.yaml`` (``false``).
|
|
||||||
You can use:
|
In the former case ("single value") it allows Spack to:
|
||||||
|
|
||||||
|
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||||
|
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||||
|
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||||
|
|
||||||
|
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||||
|
an object, with the following keys:
|
||||||
|
|
||||||
|
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||||
|
2. ``from``: list of sources from which reused specs are taken
|
||||||
|
|
||||||
|
Each source in ``from`` is itself an object:
|
||||||
|
|
||||||
|
.. list-table:: Attributes for a source or reusable specs
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Attribute name
|
||||||
|
- Description
|
||||||
|
* - type (mandatory, string)
|
||||||
|
- Can be ``local``, ``buildcache``, or ``external``
|
||||||
|
* - include (optional, list of specs)
|
||||||
|
- If present, reusable specs must match at least one of the constraint in the list
|
||||||
|
* - exclude (optional, list of specs)
|
||||||
|
- If present, reusable specs must not match any of the constraint in the list.
|
||||||
|
|
||||||
|
For instance, the following configuration:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
- "%clang"
|
||||||
|
|
||||||
|
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||||
|
in the local store. Any spec from remote buildcaches is disregarded.
|
||||||
|
|
||||||
|
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||||
|
``exclude`` options can be pushed up one level:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
- type: buildcache
|
||||||
|
- type: local
|
||||||
|
include:
|
||||||
|
- "foo %oneapi"
|
||||||
|
|
||||||
|
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||||
|
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||||
|
specs override the default ``include`` attribute.
|
||||||
|
|
||||||
|
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||||
|
configurations. This means a user can:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack install --reuse <spec>
|
% spack install --reuse <spec>
|
||||||
|
|
||||||
to enable reuse for a single installation, and you can use:
|
to enable reuse for a single installation, or:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack install --fresh <spec>
|
spack install --fresh <spec>
|
||||||
|
|
||||||
to do a fresh install if ``reuse`` is enabled by default.
|
to do a fresh install if ``reuse`` is enabled by default.
|
||||||
``reuse: dependencies`` is the default.
|
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
|
|||||||
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
|||||||
def autoreconf(self, spec, prefix):
|
def autoreconf(self, spec, prefix):
|
||||||
which("bash")("autogen.sh")
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
|
If the ``package.py`` has build instructions in a separate
|
||||||
|
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class AutotoolsBuilder(AutotoolsBuilder):
|
||||||
|
def autoreconf(self, pkg, spec, prefix):
|
||||||
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
patching configure or Makefile.in files
|
patching configure or Makefile.in files
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
|||||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||||
list of available oneAPI packages, or use::
|
list of available oneAPI packages, or use::
|
||||||
|
|
||||||
spack list -d oneAPI
|
spack list -d oneAPI
|
||||||
|
|||||||
@@ -718,23 +718,45 @@ command-line tool, or C/C++/Fortran program with optional Python
|
|||||||
modules? The former should be prepended with ``py-``, while the
|
modules? The former should be prepended with ``py-``, while the
|
||||||
latter should not.
|
latter should not.
|
||||||
|
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
extends vs. depends_on
|
``extends`` vs. ``depends_on``
|
||||||
""""""""""""""""""""""
|
""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
This is very similar to the naming dilemma above, with a slight twist.
|
|
||||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||||
that the extension and extendee share the same prefix in views.
|
that the extension and extendee share the same prefix in views.
|
||||||
This allows the user to import a Python module without
|
This allows the user to import a Python module without
|
||||||
having to add that module to ``PYTHONPATH``.
|
having to add that module to ``PYTHONPATH``.
|
||||||
|
|
||||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
Additionally, ``extends("python")`` adds a dependency on the package
|
||||||
thumb is to check the installation prefix. If Python libraries are
|
``python-venv``. This improves isolation from the system, whether
|
||||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
it's during the build or at runtime: user and system site packages
|
||||||
should use ``extends``. If Python libraries are installed elsewhere
|
cannot accidentally be used by any package that ``extends("python")``.
|
||||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
|
||||||
don't use ``extends``.
|
As a rule of thumb: if a package does not install any Python modules
|
||||||
|
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||||
|
then there is no need for ``extends``. If the package installs modules
|
||||||
|
in the ``site-packages`` directory, it requires ``extends``.
|
||||||
|
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
Executing ``python`` during the build
|
||||||
|
"""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Whenever you need to execute a Python command or pass the path of the
|
||||||
|
Python interpreter to the build system, it is best to use the global
|
||||||
|
variable ``python`` directly. For example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
@run_before("install")
|
||||||
|
def recythonize(self):
|
||||||
|
python("setup.py", "clean") # use the `python` global
|
||||||
|
|
||||||
|
As mentioned in the previous section, ``extends("python")`` adds an
|
||||||
|
automatic dependency on ``python-venv``, which is a virtual environment
|
||||||
|
that guarantees build isolation. The ``python`` global always refers to
|
||||||
|
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||||
|
or ``depends_on("python")``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
|
|||||||
@@ -11,7 +11,8 @@ Chaining Spack Installations
|
|||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
you can add it as an entry to ``upstreams.yaml``:
|
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||||
|
:ref:`configuration-scopes`:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
|||||||
install_tree: /path/to/another/spack/opt/spack
|
install_tree: /path/to/another/spack/opt/spack
|
||||||
|
|
||||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||||
Spack base directory.
|
Spack base directory, or the location of the ``install_tree`` defined
|
||||||
|
in :ref:`config.yaml <config-yaml>`.
|
||||||
|
|
||||||
Once the upstream Spack instance has been added, ``spack find`` will
|
Once the upstream Spack instance has been added, ``spack find`` will
|
||||||
automatically check the upstream instance when querying installed packages,
|
automatically check the upstream instance when querying installed packages,
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
|
|||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
Path to custom certificats for SSL verification. The value can be a
|
Path to custom certificats for SSL verification. The value can be a
|
||||||
filesytem path, or an environment variable that expands to a file path.
|
filesytem path, or an environment variable that expands to an absolute file path.
|
||||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||||
to use the same syntax used by many other applications that automatically
|
to use the same syntax used by many other applications that automatically
|
||||||
detect custom certificates.
|
detect custom certificates.
|
||||||
@@ -160,6 +160,9 @@ in the subprocess calling ``curl``.
|
|||||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||||
will work.
|
will work.
|
||||||
|
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||||
|
Certificates relative to an environment can be created by prepending the path variable
|
||||||
|
with the Spack configuration variable``$env``.
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``checksum``
|
``checksum``
|
||||||
|
|||||||
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
* - Ubuntu 18.04
|
|
||||||
- ``ubuntu:18.04``
|
|
||||||
- ``spack/ubuntu-bionic``
|
|
||||||
* - Ubuntu 20.04
|
* - Ubuntu 20.04
|
||||||
- ``ubuntu:20.04``
|
- ``ubuntu:20.04``
|
||||||
- ``spack/ubuntu-focal``
|
- ``spack/ubuntu-focal``
|
||||||
* - Ubuntu 22.04
|
* - Ubuntu 22.04
|
||||||
- ``ubuntu:22.04``
|
- ``ubuntu:22.04``
|
||||||
- ``spack/ubuntu-jammy``
|
- ``spack/ubuntu-jammy``
|
||||||
|
* - Ubuntu 24.04
|
||||||
|
- ``ubuntu:24.04``
|
||||||
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS 7
|
* - CentOS 7
|
||||||
- ``centos:7``
|
- ``centos:7``
|
||||||
- ``spack/centos7``
|
- ``spack/centos7``
|
||||||
@@ -227,12 +227,6 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Rocky Linux 9
|
* - Rocky Linux 9
|
||||||
- ``rockylinux:9``
|
- ``rockylinux:9``
|
||||||
- ``spack/rockylinux9``
|
- ``spack/rockylinux9``
|
||||||
* - Fedora Linux 37
|
|
||||||
- ``fedora:37``
|
|
||||||
- ``spack/fedora37``
|
|
||||||
* - Fedora Linux 38
|
|
||||||
- ``fedora:38``
|
|
||||||
- ``spack/fedora38``
|
|
||||||
* - Fedora Linux 39
|
* - Fedora Linux 39
|
||||||
- ``fedora:39``
|
- ``fedora:39``
|
||||||
- ``spack/fedora39``
|
- ``spack/fedora39``
|
||||||
|
|||||||
@@ -142,12 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment, if it is
|
The ``activate`` command can also be used to create a new environment if it does not already
|
||||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
exist.
|
||||||
environments, anonymous environments are explained in the next section,
|
|
||||||
can both be created using the same flags that `spack env create` accepts.
|
|
||||||
If an environment already exists then spack will simply activate it and ignore the
|
|
||||||
create specific flags.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -176,21 +172,36 @@ environment will remove the view from the user environment.
|
|||||||
Anonymous Environments
|
Anonymous Environments
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Any directory can be treated as an environment if it contains a file
|
Apart from managed environments, Spack also supports anonymous environments.
|
||||||
``spack.yaml``. To load an anonymous environment, use:
|
|
||||||
|
Anonymous environments can be placed in any directory of choice.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||||
|
that are still used in a managed environment. This is not the case for anonymous
|
||||||
|
environments.
|
||||||
|
|
||||||
|
To create an anonymous environment, use one of the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate -d /path/to/directory
|
$ spack env create --dir my_env
|
||||||
|
$ spack env create ./my_env
|
||||||
|
|
||||||
Anonymous specs can be created in place using the command:
|
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||||
|
already exist:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create -d .
|
$ spack env activate --create ./my_env
|
||||||
|
|
||||||
|
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate --temp
|
||||||
|
|
||||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
|
||||||
directory.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment Sensitive Commands
|
Environment Sensitive Commands
|
||||||
@@ -449,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
|||||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||||
file may also be copied out of the environment, renamed, etc.
|
file may also be copied out of the environment, renamed, etc.
|
||||||
|
|
||||||
|
|
||||||
|
.. _environment_include_concrete:
|
||||||
|
|
||||||
|
------------------------------
|
||||||
|
Included Concrete Environments
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Spack environments can create an environment based off of information in already
|
||||||
|
established environments. You can think of it as a combination of existing
|
||||||
|
environments. It will gather information from the existing environment's
|
||||||
|
``spack.lock`` and use that during the creation of this included concrete
|
||||||
|
environment. When an included concrete environment is created it will generate
|
||||||
|
a ``spack.lock`` file for the newly created environment.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Creating included environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
To create a combined concrete environment, you must have at least one existing
|
||||||
|
concrete environment. You will use the command ``spack env create`` with the
|
||||||
|
argument ``--include-concrete`` followed by the name or path of the environment
|
||||||
|
you'd like to include. Here is an example of how to create a combined environment
|
||||||
|
from the command line.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||||
|
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||||
|
absolute path to the independent environments.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
include_concrete:
|
||||||
|
- /absolute/path/to/environment1
|
||||||
|
- /absolute/path/to/environment2
|
||||||
|
|
||||||
|
|
||||||
|
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||||
|
get the concrete specs from the included environments.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Updating an included environment
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
If changes were made to the base environment and you want that reflected in the
|
||||||
|
included environment you will need to reconcretize both the base environment and the
|
||||||
|
included environment for the change to be implemented. For example:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
Here we see that ``included_env`` has access to the python package through
|
||||||
|
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||||
|
``included_env`` will not be able to access the new information.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e myenv add perl
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
It isn't until you run the ``spack concretize`` command that the combined
|
||||||
|
environment will get the updated information from the reconcretized base environmennt.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e included_env concretize
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
.. _environment-configuration:
|
.. _environment-configuration:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
@@ -800,6 +930,7 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
Filesystem Views
|
Filesystem Views
|
||||||
----------------
|
----------------
|
||||||
@@ -1033,7 +1164,7 @@ other targets to depend on the environment installation.
|
|||||||
|
|
||||||
A typical workflow is as follows:
|
A typical workflow is as follows:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack env create -d .
|
spack env create -d .
|
||||||
spack -e . add perl
|
spack -e . add perl
|
||||||
@@ -1126,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
|||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
@@ -1148,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
|||||||
variable. Assuming we have an active and concrete environment, we generate the
|
variable. Assuming we have an active and concrete environment, we generate the
|
||||||
associated ``Makefile`` with a prefix ``example``:
|
associated ``Makefile`` with a prefix ``example``:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o env.mk --make-prefix example
|
$ spack env depfile -o env.mk --make-prefix example
|
||||||
|
|
||||||
|
|||||||
@@ -478,6 +478,13 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
|||||||
all other fields from the compilers config can be added to the
|
all other fields from the compilers config can be added to the
|
||||||
``extra_attributes`` field for an external representing a compiler.
|
``extra_attributes`` field for an external representing a compiler.
|
||||||
|
|
||||||
|
Note that the format for the ``paths`` field in the
|
||||||
|
``extra_attributes`` section is different than in the ``compilers``
|
||||||
|
config. For compilers configured as external packages, the section is
|
||||||
|
named ``compilers`` and the dictionary maps language names (``c``,
|
||||||
|
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||||
|
``fc``, and ``f77``.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
@@ -493,11 +500,10 @@ all other fields from the compilers config can be added to the
|
|||||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||||
prefix: /usr
|
prefix: /usr
|
||||||
extra_attributes:
|
extra_attributes:
|
||||||
paths:
|
compilers:
|
||||||
cc: /usr/bin/clang-with-suffix
|
c: /usr/bin/clang-with-suffix
|
||||||
cxx: /usr/bin/clang++-with-extra-info
|
cxx: /usr/bin/clang++-with-extra-info
|
||||||
fc: /usr/bin/gfortran
|
fortran: /usr/bin/gfortran
|
||||||
f77: /usr/bin/gfortran
|
|
||||||
extra_rpaths:
|
extra_rpaths:
|
||||||
- /usr/lib/llvm/
|
- /usr/lib/llvm/
|
||||||
|
|
||||||
@@ -1358,187 +1364,6 @@ This will write the private key to the file `dinosaur.priv`.
|
|||||||
or for help on an issue or the Spack slack.
|
or for help on an issue or the Spack slack.
|
||||||
|
|
||||||
|
|
||||||
.. _cray-support:
|
|
||||||
|
|
||||||
-------------
|
|
||||||
Spack on Cray
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Spack differs slightly when used on a Cray system. The architecture spec
|
|
||||||
can differentiate between the front-end and back-end processor and operating system.
|
|
||||||
For example, on Edison at NERSC, the back-end target processor
|
|
||||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=ivybridge
|
|
||||||
|
|
||||||
You can also use the operating system to build against the back-end:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=CNL10
|
|
||||||
|
|
||||||
Notice that the name includes both the operating system name and the major
|
|
||||||
version number concatenated together.
|
|
||||||
|
|
||||||
Alternatively, if you want to build something for the front-end,
|
|
||||||
you can specify the front-end target processor. The processor for a login node
|
|
||||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib target=sandybridge
|
|
||||||
|
|
||||||
And the front-end operating system is:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install zlib os=SuSE11
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Cray compiler detection
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack can detect compilers using two methods. For the front-end, we treat
|
|
||||||
everything the same. The difference lies in back-end compiler detection.
|
|
||||||
Back-end compiler detection is made via the Tcl module avail command.
|
|
||||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
|
||||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
|
||||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
|
||||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
|
||||||
|
|
||||||
The compilers.yaml config file will also differ. There is a
|
|
||||||
modules section that is filled with the compiler's Programming Environment
|
|
||||||
and module name. On other systems, this field is empty []:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
- compiler:
|
|
||||||
modules:
|
|
||||||
- PrgEnv-intel
|
|
||||||
- intel/15.0.109
|
|
||||||
|
|
||||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
|
||||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
|
||||||
compiler are replaced with their respective Cray compiler wrapper names:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
paths:
|
|
||||||
cc: cc
|
|
||||||
cxx: CC
|
|
||||||
f77: ftn
|
|
||||||
fc: ftn
|
|
||||||
|
|
||||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
|
||||||
to call the Cray compiler wrappers during build time.
|
|
||||||
|
|
||||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
|
||||||
|
|
||||||
Spack sets the default Cray link type to dynamic, to better match other
|
|
||||||
other platforms. Individual packages can enable static linking (which is the
|
|
||||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Setting defaults and using Cray modules
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
If you want to use default compilers for each PrgEnv and also be able
|
|
||||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
|
||||||
|
|
||||||
Here's an example of an external configuration for cray modules:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
all:
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
This tells Spack that for whatever package that depends on mpi, load the
|
|
||||||
cray-mpich module into the environment. You can then be able to use whatever
|
|
||||||
environment variables, libraries, etc, that are brought into the environment
|
|
||||||
via module load.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
|
||||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
|
||||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
|
||||||
compiler wrapper link line).
|
|
||||||
|
|
||||||
You can set the default compiler that Spack can use for each compiler type.
|
|
||||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
|
||||||
In the compiler field, set the compiler specs in your order of preference.
|
|
||||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
|
||||||
|
|
||||||
Here is an example of a full packages.yaml used at NERSC
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
packages:
|
|
||||||
mpich:
|
|
||||||
externals:
|
|
||||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-mpich
|
|
||||||
buildable: False
|
|
||||||
netcdf:
|
|
||||||
externals:
|
|
||||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-netcdf
|
|
||||||
buildable: False
|
|
||||||
hdf5:
|
|
||||||
externals:
|
|
||||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
|
||||||
modules:
|
|
||||||
- cray-hdf5
|
|
||||||
buildable: False
|
|
||||||
all:
|
|
||||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
|
||||||
providers:
|
|
||||||
mpi: [mpich]
|
|
||||||
|
|
||||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
|
||||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
|
||||||
for each compiler type for each cray modules. This ensures that for each
|
|
||||||
compiler on our system we can use that external module.
|
|
||||||
|
|
||||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Using Linux containers on Cray machines
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack uses environment variables particular to the Cray programming
|
|
||||||
environment to determine which systems are Cray platforms. These
|
|
||||||
environment variables may be propagated into containers that are not
|
|
||||||
using the Cray programming environment.
|
|
||||||
|
|
||||||
To ensure that Spack does not autodetect the Cray programming
|
|
||||||
environment, unset the environment variable ``MODULEPATH``. This
|
|
||||||
will cause Spack to treat a linux container on a Cray system as a base
|
|
||||||
linux distro.
|
|
||||||
|
|
||||||
.. _windows_support:
|
.. _windows_support:
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
@@ -1572,6 +1397,8 @@ Microsoft Visual Studio
|
|||||||
"""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
|
|
||||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||||
|
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||||
|
visual studio installation as it is required to build many packages from source.
|
||||||
|
|
||||||
We require several specific components to be included in the Visual Studio installation.
|
We require several specific components to be included in the Visual Studio installation.
|
||||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||||
@@ -1579,6 +1406,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
|||||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||||
This provides CMake and Ninja for use during Spack configuration.
|
This provides CMake and Ninja for use during Spack configuration.
|
||||||
|
|
||||||
|
|
||||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||||
"Installation details" pane on the right.
|
"Installation details" pane on the right.
|
||||||
|
|||||||
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
|||||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||||
<https://docs.python.org/2/library/imp.html>`_.
|
<https://docs.python.org/2/library/imp.html>`_.
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
The mechanism for extending packages is not yet extensively tested,
|
|
||||||
and extending packages across repositories imposes inter-repo
|
|
||||||
dependencies, which may be hard to manage. Use this feature at your
|
|
||||||
own risk, but let us know if you have a use case for it.
|
|
||||||
|
|||||||
@@ -4,10 +4,10 @@ sphinx_design==0.5.0
|
|||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.17.2
|
pygments==2.18.0
|
||||||
urllib3==2.2.1
|
urllib3==2.2.1
|
||||||
pytest==8.1.1
|
pytest==8.2.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.4.0
|
black==24.4.2
|
||||||
flake8==7.0.0
|
flake8==7.0.0
|
||||||
mypy==1.9.0
|
mypy==1.10.0
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
|||||||
"""Init file to avoid namespace packages"""
|
"""Init file to avoid namespace packages"""
|
||||||
|
|
||||||
__version__ = "0.2.3"
|
__version__ = "0.2.4"
|
||||||
|
|||||||
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,9 +5,10 @@
|
|||||||
"""The "cpu" package permits to query and compare different
|
"""The "cpu" package permits to query and compare different
|
||||||
CPU microarchitectures.
|
CPU microarchitectures.
|
||||||
"""
|
"""
|
||||||
from .detect import host
|
from .detect import brand_string, host
|
||||||
from .microarchitecture import (
|
from .microarchitecture import (
|
||||||
TARGETS,
|
TARGETS,
|
||||||
|
InvalidCompilerVersion,
|
||||||
Microarchitecture,
|
Microarchitecture,
|
||||||
UnsupportedMicroarchitecture,
|
UnsupportedMicroarchitecture,
|
||||||
generic_microarchitecture,
|
generic_microarchitecture,
|
||||||
@@ -15,10 +16,12 @@
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
"brand_string",
|
||||||
|
"host",
|
||||||
|
"TARGETS",
|
||||||
|
"InvalidCompilerVersion",
|
||||||
"Microarchitecture",
|
"Microarchitecture",
|
||||||
"UnsupportedMicroarchitecture",
|
"UnsupportedMicroarchitecture",
|
||||||
"TARGETS",
|
|
||||||
"generic_microarchitecture",
|
"generic_microarchitecture",
|
||||||
"host",
|
|
||||||
"version_components",
|
"version_components",
|
||||||
]
|
]
|
||||||
|
|||||||
42
lib/spack/external/archspec/cpu/detect.py
vendored
42
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -155,6 +155,31 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
|||||||
mask = 1 << bit
|
mask = 1 << bit
|
||||||
return register & mask > 0
|
return register & mask > 0
|
||||||
|
|
||||||
|
def brand_string(self) -> Optional[str]:
|
||||||
|
"""Returns the brand string, if available."""
|
||||||
|
if self.highest_extension_support < 0x80000004:
|
||||||
|
return None
|
||||||
|
|
||||||
|
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||||
|
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||||
|
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||||
|
result = struct.pack(
|
||||||
|
"IIIIIIIIIIII",
|
||||||
|
r1.eax,
|
||||||
|
r1.ebx,
|
||||||
|
r1.ecx,
|
||||||
|
r1.edx,
|
||||||
|
r2.eax,
|
||||||
|
r2.ebx,
|
||||||
|
r2.ecx,
|
||||||
|
r2.edx,
|
||||||
|
r3.eax,
|
||||||
|
r3.ebx,
|
||||||
|
r3.ecx,
|
||||||
|
r3.edx,
|
||||||
|
).decode("utf-8")
|
||||||
|
return result.strip("\x00")
|
||||||
|
|
||||||
|
|
||||||
@detection(operating_system="Windows")
|
@detection(operating_system="Windows")
|
||||||
def cpuid_info():
|
def cpuid_info():
|
||||||
@@ -174,8 +199,8 @@ def _check_output(args, env):
|
|||||||
|
|
||||||
|
|
||||||
WINDOWS_MAPPING = {
|
WINDOWS_MAPPING = {
|
||||||
"AMD64": "x86_64",
|
"AMD64": X86_64,
|
||||||
"ARM64": "aarch64",
|
"ARM64": AARCH64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -409,3 +434,16 @@ def compatibility_check_for_riscv64(info, target):
|
|||||||
return (target == arch_root or arch_root in target.ancestors) and (
|
return (target == arch_root or arch_root in target.ancestors) and (
|
||||||
target.name == info.name or target.vendor == "generic"
|
target.name == info.name or target.vendor == "generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def brand_string() -> Optional[str]:
|
||||||
|
"""Returns the brand string of the host, if detected, or None."""
|
||||||
|
if platform.system() == "Darwin":
|
||||||
|
return _check_output(
|
||||||
|
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||||
|
).strip()
|
||||||
|
|
||||||
|
if host().family == X86_64:
|
||||||
|
return CpuidInfoCollector().brand_string()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|||||||
@@ -208,6 +208,8 @@ def optimization_flags(self, compiler, version):
|
|||||||
"""Returns a string containing the optimization flags that needs
|
"""Returns a string containing the optimization flags that needs
|
||||||
to be used to produce code optimized for this micro-architecture.
|
to be used to produce code optimized for this micro-architecture.
|
||||||
|
|
||||||
|
The version is expected to be a string of dot separated digits.
|
||||||
|
|
||||||
If there is no information on the compiler passed as argument the
|
If there is no information on the compiler passed as argument the
|
||||||
function returns an empty string. If it is known that the compiler
|
function returns an empty string. If it is known that the compiler
|
||||||
version we want to use does not support this architecture the function
|
version we want to use does not support this architecture the function
|
||||||
@@ -216,6 +218,11 @@ def optimization_flags(self, compiler, version):
|
|||||||
Args:
|
Args:
|
||||||
compiler (str): name of the compiler to be used
|
compiler (str): name of the compiler to be used
|
||||||
version (str): version of the compiler to be used
|
version (str): version of the compiler to be used
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||||
|
this micro-architecture.
|
||||||
|
ValueError: if the version doesn't match the expected format
|
||||||
"""
|
"""
|
||||||
# If we don't have information on compiler at all return an empty string
|
# If we don't have information on compiler at all return an empty string
|
||||||
if compiler not in self.family.compilers:
|
if compiler not in self.family.compilers:
|
||||||
@@ -232,6 +239,14 @@ def optimization_flags(self, compiler, version):
|
|||||||
msg = msg.format(compiler, best_target, best_target.family)
|
msg = msg.format(compiler, best_target, best_target.family)
|
||||||
raise UnsupportedMicroarchitecture(msg)
|
raise UnsupportedMicroarchitecture(msg)
|
||||||
|
|
||||||
|
# Check that the version matches the expected format
|
||||||
|
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||||
|
msg = (
|
||||||
|
"invalid format for the compiler version argument. "
|
||||||
|
"Only dot separated digits are allowed."
|
||||||
|
)
|
||||||
|
raise InvalidCompilerVersion(msg)
|
||||||
|
|
||||||
# If we have information on this compiler we need to check the
|
# If we have information on this compiler we need to check the
|
||||||
# version being used
|
# version being used
|
||||||
compiler_info = self.compilers[compiler]
|
compiler_info = self.compilers[compiler]
|
||||||
@@ -292,7 +307,7 @@ def generic_microarchitecture(name):
|
|||||||
Args:
|
Args:
|
||||||
name (str): name of the micro-architecture
|
name (str): name of the micro-architecture
|
||||||
"""
|
"""
|
||||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||||
|
|
||||||
|
|
||||||
def version_components(version):
|
def version_components(version):
|
||||||
@@ -367,7 +382,15 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedMicroarchitecture(ValueError):
|
class ArchspecError(Exception):
|
||||||
|
"""Base class for errors within archspec"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||||
"""Raised if a compiler version does not support optimization for a given
|
"""Raised if a compiler version does not support optimization for a given
|
||||||
micro-architecture.
|
micro-architecture.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||||
|
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||||
|
|||||||
@@ -2937,8 +2937,6 @@
|
|||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3066,8 +3064,6 @@
|
|||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"paca",
|
|
||||||
"pacg",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
"sveaes",
|
"sveaes",
|
||||||
@@ -3081,8 +3077,7 @@
|
|||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16",
|
"bf16",
|
||||||
"dgh",
|
"dgh"
|
||||||
"bti"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
|
|||||||
@@ -98,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
if _func:
|
if _func:
|
||||||
return holder_func(_func)
|
return holder_func(_func)
|
||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
|
"""Strip Windows extended path prefix from strings
|
||||||
|
Returns sanitized string.
|
||||||
|
no-op if extended path prefix is not present"""
|
||||||
|
return path.lstrip("\\\\?\\")
|
||||||
|
|||||||
@@ -187,12 +187,18 @@ def polite_filename(filename: str) -> str:
|
|||||||
return _polite_antipattern().sub("_", filename)
|
return _polite_antipattern().sub("_", filename)
|
||||||
|
|
||||||
|
|
||||||
def getuid():
|
def getuid() -> Union[str, int]:
|
||||||
|
"""Returns os getuid on non Windows
|
||||||
|
On Windows returns 0 for admin users, login string otherwise
|
||||||
|
This is in line with behavior from get_owner_uid which
|
||||||
|
always returns the login string on Windows
|
||||||
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
# If not admin, use the string name of the login as a unique ID
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
return 1
|
return os.getlogin()
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return os.getuid()
|
return os.getuid()
|
||||||
@@ -213,6 +219,15 @@ def _win_rename(src, dst):
|
|||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def msdos_escape_parens(path):
|
||||||
|
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
return path.replace("(", "^(").replace(")", "^)")
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
@@ -553,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||||
|
"""Returns owner UID of path destination
|
||||||
|
On non Windows this is the value of st_uid
|
||||||
|
On Windows this is the login string associated with the
|
||||||
|
owning user.
|
||||||
|
|
||||||
|
"""
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
mkdirp(path, mode=stat.S_IRWXU)
|
mkdirp(path, mode=stat.S_IRWXU)
|
||||||
|
|
||||||
@@ -745,7 +766,6 @@ def copy_tree(
|
|||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -768,8 +788,6 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -777,8 +795,6 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
if allow_broken_symlinks and sys.platform == "win32":
|
|
||||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -822,7 +838,7 @@ def copy_tree(
|
|||||||
if islink(s):
|
if islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = readlink(s)
|
||||||
if os.path.isabs(target):
|
if os.path.isabs(target):
|
||||||
|
|
||||||
def escaped_path(path):
|
def escaped_path(path):
|
||||||
@@ -851,16 +867,14 @@ def escaped_path(path):
|
|||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
for target, d, s in links:
|
for target, d, s in links:
|
||||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
symlink(target, d)
|
||||||
if _permissions:
|
if _permissions:
|
||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(
|
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
|
||||||
):
|
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -871,21 +885,12 @@ def install_tree(
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
|
||||||
On Windows, setting this to True will raise an exception.
|
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(
|
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||||
src,
|
|
||||||
dest,
|
|
||||||
symlinks=symlinks,
|
|
||||||
allow_broken_symlinks=allow_broken_symlinks,
|
|
||||||
ignore=ignore,
|
|
||||||
_permissions=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -2429,9 +2434,10 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
new_pth = pathlib.Path(pth)
|
||||||
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
@@ -2509,8 +2515,14 @@ def establish_link(self):
|
|||||||
|
|
||||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
# install a symlink to each dependent library
|
# install a symlink to each dependent library
|
||||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
|
||||||
self._link(library, lib_dir)
|
# do not rpath for system libraries included in the dag
|
||||||
|
# we should not be modifying libraries managed by the Windows system
|
||||||
|
# as this will negatively impact linker behavior and can result in permission
|
||||||
|
# errors if those system libs are not modifiable by Spack
|
||||||
|
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||||
|
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||||
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
|
|||||||
@@ -8,100 +8,75 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
from ..path import system_path_filter
|
from ..path import sanitize_win_longpath, system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
is_windows = sys.platform == "win32"
|
|
||||||
|
|
||||||
|
def _windows_symlink(
|
||||||
|
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||||
|
):
|
||||||
|
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||||
|
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||||
|
hardlink for a file. On Windows the various link types are:
|
||||||
|
|
||||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||||
"""
|
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||||
Create a link.
|
privileges to make these.
|
||||||
|
|
||||||
On non-Windows and Windows with System Administrator
|
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||||
privleges this will be a normal symbolic link via
|
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||||
os.symlink.
|
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||||
|
privileges.
|
||||||
|
|
||||||
On Windows without privledges the link will be a
|
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||||
junction for a directory and a hardlink for a file.
|
remote directory. Don't need System Administrator privileges."""
|
||||||
On Windows the various link types are:
|
source_path = os.path.normpath(src)
|
||||||
|
|
||||||
Symbolic Link: A link to a file or directory on the
|
|
||||||
same or different volume (drive letter) or even to
|
|
||||||
a remote file or directory (using UNC in its path).
|
|
||||||
Need System Administrator privileges to make these.
|
|
||||||
|
|
||||||
Hard Link: A link to a file on the same volume (drive
|
|
||||||
letter) only. Every file (file's data) has at least 1
|
|
||||||
hard link (file's name). But when this method creates
|
|
||||||
a new hard link there will be 2. Deleting all hard
|
|
||||||
links effectively deletes the file. Don't need System
|
|
||||||
Administrator privileges.
|
|
||||||
|
|
||||||
Junction: A link to a directory on the same or different
|
|
||||||
volume (drive letter) but not to a remote directory. Don't
|
|
||||||
need System Administrator privileges.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
source_path (str): The real file or directory that the link points to.
|
|
||||||
Must be absolute OR relative to the link.
|
|
||||||
link_path (str): The path where the link will exist.
|
|
||||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
|
||||||
doesn't exist. This will still raise an exception on Windows.
|
|
||||||
"""
|
|
||||||
source_path = os.path.normpath(source_path)
|
|
||||||
win_source_path = source_path
|
win_source_path = source_path
|
||||||
link_path = os.path.normpath(link_path)
|
link_path = os.path.normpath(dst)
|
||||||
|
|
||||||
# Never allow broken links on Windows.
|
# Perform basic checks to make sure symlinking will succeed
|
||||||
if sys.platform == "win32" and allow_broken_symlinks:
|
if os.path.lexists(link_path):
|
||||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||||
|
|
||||||
if not allow_broken_symlinks:
|
if not os.path.exists(source_path):
|
||||||
# Perform basic checks to make sure symlinking will succeed
|
if os.path.isabs(source_path):
|
||||||
if os.path.lexists(link_path):
|
# An absolute source path that does not exist will result in a broken link.
|
||||||
raise AlreadyExistsError(
|
raise SymlinkError(
|
||||||
f"Link path ({link_path}) already exists. Cannot create link."
|
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||||
|
f"link would be broken so not making link."
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
if not os.path.exists(source_path):
|
# os.symlink can create a link when the given source path is relative to
|
||||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
# the link path. Emulate this behavior and check to see if the source exists
|
||||||
# An absolute source path that does not exist will result in a broken link.
|
# relative to the link path ahead of link creation to prevent broken
|
||||||
raise SymlinkError(
|
# links from being made.
|
||||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
link_parent_dir = os.path.dirname(link_path)
|
||||||
f"link would be broken so not making link."
|
relative_path = os.path.join(link_parent_dir, source_path)
|
||||||
)
|
if os.path.exists(relative_path):
|
||||||
|
# In order to work on windows, the source path needs to be modified to be
|
||||||
|
# relative because hardlink/junction dont resolve relative paths the same
|
||||||
|
# way as os.symlink. This is ignored on other operating systems.
|
||||||
|
win_source_path = relative_path
|
||||||
else:
|
else:
|
||||||
# os.symlink can create a link when the given source path is relative to
|
raise SymlinkError(
|
||||||
# the link path. Emulate this behavior and check to see if the source exists
|
f"The source path ({source_path}) is not relative to the link path "
|
||||||
# relative to the link path ahead of link creation to prevent broken
|
f"({link_path}). Resulting link would be broken so not making link."
|
||||||
# links from being made.
|
)
|
||||||
link_parent_dir = os.path.dirname(link_path)
|
|
||||||
relative_path = os.path.join(link_parent_dir, source_path)
|
|
||||||
if os.path.exists(relative_path):
|
|
||||||
# In order to work on windows, the source path needs to be modified to be
|
|
||||||
# relative because hardlink/junction dont resolve relative paths the same
|
|
||||||
# way as os.symlink. This is ignored on other operating systems.
|
|
||||||
win_source_path = relative_path
|
|
||||||
elif not allow_broken_symlinks:
|
|
||||||
raise SymlinkError(
|
|
||||||
f"The source path ({source_path}) is not relative to the link path "
|
|
||||||
f"({link_path}). Resulting link would be broken so not making link."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create the symlink
|
# Create the symlink
|
||||||
if sys.platform == "win32" and not _windows_can_symlink():
|
if not _windows_can_symlink():
|
||||||
_windows_create_link(win_source_path, link_path)
|
_windows_create_link(win_source_path, link_path)
|
||||||
else:
|
else:
|
||||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||||
|
|
||||||
|
|
||||||
def islink(path: str) -> bool:
|
def _windows_islink(path: str) -> bool:
|
||||||
"""Override os.islink to give correct answer for spack logic.
|
"""Override os.islink to give correct answer for spack logic.
|
||||||
|
|
||||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||||
@@ -247,9 +222,9 @@ def _windows_create_junction(source: str, link: str):
|
|||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
tty.debug(out.decode())
|
tty.debug(out.decode())
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
err = err.decode()
|
err_str = err.decode()
|
||||||
tty.error(err)
|
tty.error(err_str)
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
@@ -269,14 +244,14 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str):
|
def _windows_readlink(path: str, *, dir_fd=None):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
elif _windows_is_junction(path):
|
elif _windows_is_junction(path):
|
||||||
return _windows_read_junction(path)
|
return _windows_read_junction(path)
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
@@ -338,6 +313,16 @@ def resolve_link_target_relative_to_the_link(link):
|
|||||||
return os.path.join(link_dir, target)
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
symlink = _windows_symlink
|
||||||
|
readlink = _windows_readlink
|
||||||
|
islink = _windows_islink
|
||||||
|
else:
|
||||||
|
symlink = os.symlink
|
||||||
|
readlink = os.readlink
|
||||||
|
islink = os.path.islink
|
||||||
|
|
||||||
|
|
||||||
class SymlinkError(RuntimeError):
|
class SymlinkError(RuntimeError):
|
||||||
"""Exception class for errors raised while creating symlinks,
|
"""Exception class for errors raised while creating symlinks,
|
||||||
junctions and hard links
|
junctions and hard links
|
||||||
|
|||||||
@@ -59,6 +59,7 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
@@ -101,9 +102,29 @@ def __init__(self, message):
|
|||||||
# Mapping from color arguments to values for tty.set_color
|
# Mapping from color arguments to values for tty.set_color
|
||||||
color_when_values = {"always": True, "auto": None, "never": False}
|
color_when_values = {"always": True, "auto": None, "never": False}
|
||||||
|
|
||||||
# Force color; None: Only color if stdout is a tty
|
|
||||||
# True: Always colorize output, False: Never colorize output
|
def _color_when_value(when):
|
||||||
_force_color = None
|
"""Raise a ValueError for an invalid color setting.
|
||||||
|
|
||||||
|
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||||
|
True, False, and None.
|
||||||
|
"""
|
||||||
|
if when in color_when_values:
|
||||||
|
return color_when_values[when]
|
||||||
|
elif when not in color_when_values.values():
|
||||||
|
raise ValueError("Invalid color setting: %s" % when)
|
||||||
|
return when
|
||||||
|
|
||||||
|
|
||||||
|
def _color_from_environ() -> Optional[bool]:
|
||||||
|
try:
|
||||||
|
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||||
|
_force_color = _color_from_environ()
|
||||||
|
|
||||||
|
|
||||||
def try_enable_terminal_color_on_windows():
|
def try_enable_terminal_color_on_windows():
|
||||||
@@ -164,19 +185,6 @@ def _err_check(result, func, args):
|
|||||||
debug("Unable to support color on Windows terminal")
|
debug("Unable to support color on Windows terminal")
|
||||||
|
|
||||||
|
|
||||||
def _color_when_value(when):
|
|
||||||
"""Raise a ValueError for an invalid color setting.
|
|
||||||
|
|
||||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
|
||||||
True, False, and None.
|
|
||||||
"""
|
|
||||||
if when in color_when_values:
|
|
||||||
return color_when_values[when]
|
|
||||||
elif when not in color_when_values.values():
|
|
||||||
raise ValueError("Invalid color setting: %s" % when)
|
|
||||||
return when
|
|
||||||
|
|
||||||
|
|
||||||
def get_color_when():
|
def get_color_when():
|
||||||
"""Return whether commands should print color or not."""
|
"""Return whether commands should print color or not."""
|
||||||
if _force_color is not None:
|
if _force_color is not None:
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.22.0.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _deprecated_preferences(error_cls):
|
def _deprecated_preferences(error_cls):
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||||
errors = []
|
errors = []
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||||
|
|
||||||
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
)
|
)
|
||||||
|
github_pull_commits_re = (
|
||||||
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
|
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
|
)
|
||||||
# Only .diff URLs have stable/full hashes:
|
# Only .diff URLs have stable/full hashes:
|
||||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||||
gitlab_patch_url_re = (
|
gitlab_patch_url_re = (
|
||||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if re.match(github_patch_url_re, patch.url):
|
if re.match(github_pull_commits_re, patch.url):
|
||||||
|
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||||
|
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||||
|
errors.append(
|
||||||
|
error_cls(
|
||||||
|
f"patch URL in package {pkg_cls.name} "
|
||||||
|
+ "must not be a pull request commit; "
|
||||||
|
+ f"instead use {url}",
|
||||||
|
[patch.url],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif re.match(github_patch_url_re, patch.url):
|
||||||
full_index_arg = "?full_index=1"
|
full_index_arg = "?full_index=1"
|
||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with {1}".format(
|
f"patch URL in package {pkg_cls.name} "
|
||||||
pkg_cls.name, full_index_arg
|
+ f"must end with {full_index_arg}",
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(".diff"):
|
if not patch.url.endswith(".diff"):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
"patch URL in package {0} must end with .diff".format(
|
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||||
pkg_cls.name
|
|
||||||
),
|
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -1046,7 +1058,7 @@ def _extracts_errors(triggers, summary):
|
|||||||
group="externals",
|
group="externals",
|
||||||
tag="PKG-EXTERNALS",
|
tag="PKG-EXTERNALS",
|
||||||
description="Sanity checks for external software detection",
|
description="Sanity checks for external software detection",
|
||||||
kwargs=("pkgs",),
|
kwargs=("pkgs", "debug_log"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1069,7 +1081,7 @@ def packages_with_detection_tests():
|
|||||||
|
|
||||||
|
|
||||||
@external_detection
|
@external_detection
|
||||||
def _test_detection_by_executable(pkgs, error_cls):
|
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||||
"""Test drive external detection for packages"""
|
"""Test drive external detection for packages"""
|
||||||
import spack.detection
|
import spack.detection
|
||||||
|
|
||||||
@@ -1095,6 +1107,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
for idx, test_runner in enumerate(
|
for idx, test_runner in enumerate(
|
||||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||||
):
|
):
|
||||||
|
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||||
specs = test_runner.execute()
|
specs = test_runner.execute()
|
||||||
expected_specs = test_runner.expected_specs
|
expected_specs = test_runner.expected_specs
|
||||||
|
|
||||||
@@ -1115,11 +1128,10 @@ def _test_detection_by_executable(pkgs, error_cls):
|
|||||||
for candidate in expected_specs:
|
for candidate in expected_specs:
|
||||||
try:
|
try:
|
||||||
idx = specs.index(candidate)
|
idx = specs.index(candidate)
|
||||||
|
matched_detection.append((candidate, specs[idx]))
|
||||||
except (AttributeError, ValueError):
|
except (AttributeError, ValueError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
matched_detection.append((candidate, specs[idx]))
|
|
||||||
|
|
||||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||||
result = []
|
result = []
|
||||||
# Check items are of the same type
|
# Check items are of the same type
|
||||||
|
|||||||
@@ -29,6 +29,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -658,7 +659,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# 2. paths are used as strings.
|
# 2. paths are used as strings.
|
||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = os.readlink(abs_path)
|
link = readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
@@ -2001,6 +2002,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,13 @@
|
|||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
from .core import (
|
||||||
|
all_core_root_specs,
|
||||||
|
ensure_clingo_importable_or_raise,
|
||||||
|
ensure_core_dependencies,
|
||||||
|
ensure_gpg_in_path_or_raise,
|
||||||
|
ensure_patchelf_in_path_or_raise,
|
||||||
|
)
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
|
|
||||||
@@ -13,6 +19,8 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
|
"ensure_gpg_in_path_or_raise",
|
||||||
|
"ensure_clingo_importable_or_raise",
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
"all_core_root_specs",
|
"all_core_root_specs",
|
||||||
"ensure_environment_dependencies",
|
"ensure_environment_dependencies",
|
||||||
|
|||||||
@@ -54,10 +54,14 @@ def _try_import_from_store(
|
|||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
pkg = candidate_spec["python"].package
|
# previously bootstrapped specs may not have a python-venv dependency.
|
||||||
|
if candidate_spec.dependencies("python-venv"):
|
||||||
|
python, *_ = candidate_spec.dependencies("python-venv")
|
||||||
|
else:
|
||||||
|
python, *_ = candidate_spec.dependencies("python")
|
||||||
module_paths = [
|
module_paths = [
|
||||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||||
]
|
]
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
@@ -209,15 +213,18 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
Args:
|
Args:
|
||||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||||
"""
|
"""
|
||||||
# Add a compiler requirement to the root spec.
|
# Add a compiler and platform requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
if platform == "darwin":
|
if platform == "darwin":
|
||||||
spec_str += " %apple-clang"
|
spec_str += " %apple-clang"
|
||||||
|
elif platform == "windows":
|
||||||
|
spec_str += " %msvc"
|
||||||
elif platform == "linux":
|
elif platform == "linux":
|
||||||
spec_str += " %gcc"
|
spec_str += " %gcc"
|
||||||
elif platform == "freebsd":
|
elif platform == "freebsd":
|
||||||
spec_str += " %clang"
|
spec_str += " %clang"
|
||||||
|
spec_str += f" platform={platform}"
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
|
||||||
|
|||||||
@@ -270,10 +270,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
with spack_python_interpreter():
|
with spack_python_interpreter():
|
||||||
# Add hint to use frontend operating system on Cray
|
# Add hint to use frontend operating system on Cray
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
|
||||||
# only when bootstrapping from sources on Python 3.12
|
|
||||||
if spec_for_current_python() == "python@3.12":
|
|
||||||
concrete_spec.constrain("+force_setuptools")
|
|
||||||
|
|
||||||
if module == "clingo":
|
if module == "clingo":
|
||||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||||
@@ -538,6 +534,41 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_winsdk_external_or_raise() -> None:
|
||||||
|
"""Ensure the Windows SDK + WGL are available on system
|
||||||
|
If both of these package are found, the Spack user or bootstrap
|
||||||
|
configuration (depending on where Spack is running)
|
||||||
|
will be updated to include all versions and variants detected.
|
||||||
|
If either the WDK or WSDK are not found, this method will raise
|
||||||
|
a RuntimeError.
|
||||||
|
|
||||||
|
**NOTE:** This modifies the Spack config in the current scope,
|
||||||
|
either user or environment depending on the calling context.
|
||||||
|
This is different from all other current bootstrap dependency
|
||||||
|
checks.
|
||||||
|
"""
|
||||||
|
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||||
|
return
|
||||||
|
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||||
|
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||||
|
missing_packages_lst = []
|
||||||
|
if "wgl" not in externals:
|
||||||
|
missing_packages_lst.append("wgl")
|
||||||
|
if "win-sdk" not in externals:
|
||||||
|
missing_packages_lst.append("win-sdk")
|
||||||
|
missing_packages = " & ".join(missing_packages_lst)
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Unable to find the {missing_packages}, please install these packages \
|
||||||
|
via the Visual Studio installer \
|
||||||
|
before proceeding with Spack or provide the path to a non standard install with \
|
||||||
|
'spack external find --path'"
|
||||||
|
)
|
||||||
|
# wgl/sdk are not required for bootstrapping Spack, but
|
||||||
|
# are required for building anything non trivial
|
||||||
|
# add to user config so they can be used by subsequent Spack ops
|
||||||
|
spack.detection.update_configuration(externals, buildable=False)
|
||||||
|
|
||||||
|
|
||||||
def ensure_core_dependencies() -> None:
|
def ensure_core_dependencies() -> None:
|
||||||
"""Ensure the presence of all the core dependencies."""
|
"""Ensure the presence of all the core dependencies."""
|
||||||
if sys.platform.lower() == "linux":
|
if sys.platform.lower() == "linux":
|
||||||
|
|||||||
@@ -3,13 +3,11 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||||
import glob
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
from typing import Iterable, List
|
||||||
from typing import List
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -28,6 +26,16 @@
|
|||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
if not self.spack_yaml().exists():
|
||||||
|
self._write_spack_yaml_file()
|
||||||
|
super().__init__(self.environment_root())
|
||||||
|
|
||||||
|
# Remove python package roots created before python-venv was introduced
|
||||||
|
for s in self.concrete_roots():
|
||||||
|
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||||
|
self.deconcretize(s)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_dev_requirements(cls) -> List[str]:
|
def spack_dev_requirements(cls) -> List[str]:
|
||||||
"""Spack development requirements"""
|
"""Spack development requirements"""
|
||||||
@@ -59,31 +67,19 @@ def view_root(cls) -> pathlib.Path:
|
|||||||
return cls.environment_root().joinpath("view")
|
return cls.environment_root().joinpath("view")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def pythonpaths(cls) -> List[str]:
|
def bin_dir(cls) -> pathlib.Path:
|
||||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
|
||||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
|
||||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
|
||||||
result = glob.glob(glob_expr)
|
|
||||||
if not result:
|
|
||||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
|
||||||
warnings.warn(msg)
|
|
||||||
return result
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
|
||||||
"""Paths to be added to PATH"""
|
"""Paths to be added to PATH"""
|
||||||
return [cls.view_root().joinpath("bin")]
|
return cls.view_root().joinpath("bin")
|
||||||
|
|
||||||
|
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||||
|
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||||
|
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_yaml(cls) -> pathlib.Path:
|
def spack_yaml(cls) -> pathlib.Path:
|
||||||
"""Environment spack.yaml file"""
|
"""Environment spack.yaml file"""
|
||||||
return cls.environment_root().joinpath("spack.yaml")
|
return cls.environment_root().joinpath("spack.yaml")
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
if not self.spack_yaml().exists():
|
|
||||||
self._write_spack_yaml_file()
|
|
||||||
super().__init__(self.environment_root())
|
|
||||||
|
|
||||||
def update_installations(self) -> None:
|
def update_installations(self) -> None:
|
||||||
"""Update the installations of this environment."""
|
"""Update the installations of this environment."""
|
||||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||||
@@ -100,21 +96,13 @@ def update_installations(self) -> None:
|
|||||||
self.install_all()
|
self.install_all()
|
||||||
self.write(regenerate=True)
|
self.write(regenerate=True)
|
||||||
|
|
||||||
def update_syspath_and_environ(self) -> None:
|
def load(self) -> None:
|
||||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
"""Update PATH and sys.path."""
|
||||||
the environment view.
|
# Make executables available (shouldn't need PYTHONPATH)
|
||||||
"""
|
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
|
||||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
# Spack itself imports pytest
|
||||||
# the performance of the current interpreter
|
sys.path.extend(str(p) for p in self.python_dirs())
|
||||||
sys.path.extend(self.pythonpaths())
|
|
||||||
os.environ["PATH"] = os.pathsep.join(
|
|
||||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
|
||||||
)
|
|
||||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
|
||||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
|
||||||
+ [str(x) for x in self.pythonpaths()]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _write_spack_yaml_file(self) -> None:
|
def _write_spack_yaml_file(self) -> None:
|
||||||
tty.msg(
|
tty.msg(
|
||||||
@@ -164,4 +152,4 @@ def ensure_environment_dependencies() -> None:
|
|||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.update_syspath_and_environ()
|
env.load()
|
||||||
|
|||||||
@@ -43,7 +43,7 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import List, Set, Tuple
|
from typing import Dict, List, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -91,7 +91,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
from spack.util.module_cmd import load_module, path_from_modules
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -190,14 +190,6 @@ def __call__(self, *args, **kwargs):
|
|||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _on_cray():
|
|
||||||
host_platform = spack.platforms.host()
|
|
||||||
host_os = host_platform.operating_system("default_os")
|
|
||||||
on_cray = str(host_platform) == "cray"
|
|
||||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
|
||||||
return on_cray, using_cnl
|
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -241,17 +233,6 @@ def clean_environment():
|
|||||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||||
env.unset(varname)
|
env.unset(varname)
|
||||||
|
|
||||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
|
||||||
# interference with Spack dependencies.
|
|
||||||
# CNL requires these variables to be set (or at least some of them,
|
|
||||||
# depending on the CNL version).
|
|
||||||
on_cray, using_cnl = _on_cray()
|
|
||||||
if on_cray and not using_cnl:
|
|
||||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
|
||||||
for varname in os.environ.keys():
|
|
||||||
if "PKGCONF" in varname:
|
|
||||||
env.unset(varname)
|
|
||||||
|
|
||||||
# Unset the following variables because they can affect installation of
|
# Unset the following variables because they can affect installation of
|
||||||
# Autotools and CMake packages.
|
# Autotools and CMake packages.
|
||||||
build_system_vars = [
|
build_system_vars = [
|
||||||
@@ -381,11 +362,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
# Don't set on cray platform because the targeting module handles this
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
if spec.satisfies("platform=cray"):
|
|
||||||
isa_arg = ""
|
|
||||||
else:
|
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -730,12 +707,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
|||||||
return compiler(*compiler_args, output=compiler_output)
|
return compiler(*compiler_args, output=compiler_output)
|
||||||
|
|
||||||
|
|
||||||
def get_rpath_deps(pkg):
|
def _get_rpath_deps_from_spec(
|
||||||
"""Return immediate or transitive RPATHs depending on the package."""
|
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||||
if pkg.transitive_rpaths:
|
) -> List[spack.spec.Spec]:
|
||||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
if not transitive_rpaths:
|
||||||
else:
|
return spec.dependencies(deptype=dt.LINK)
|
||||||
return pkg.spec.dependencies(deptype="link")
|
|
||||||
|
by_name: Dict[str, spack.spec.Spec] = {}
|
||||||
|
|
||||||
|
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||||
|
lookup = by_name.get(dep.name)
|
||||||
|
if lookup is None:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
elif lookup.version < dep.version:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
|
||||||
|
return list(by_name.values())
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||||
|
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||||
|
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||||
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def get_rpaths(pkg):
|
def get_rpaths(pkg):
|
||||||
@@ -817,14 +810,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
|
||||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
|
||||||
# unnecessary
|
|
||||||
on_cray, _ = _on_cray()
|
|
||||||
if on_cray and not dirty:
|
|
||||||
for mod in ["cray-mpich", "cray-libsci"]:
|
|
||||||
module("unload", mod)
|
|
||||||
|
|
||||||
if target and target.module_name:
|
if target and target.module_name:
|
||||||
load_module(target.module_name)
|
load_module(target.module_name)
|
||||||
|
|
||||||
|
|||||||
@@ -39,16 +39,11 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
|||||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||||
``find_python_hints`` for context."""
|
``find_python_hints`` for context."""
|
||||||
if not getattr(pkg, "find_python_hints", False):
|
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||||
|
"python", dt.BUILD | dt.LINK
|
||||||
|
):
|
||||||
return
|
return
|
||||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
python_executable = pkg.spec["python"].command.path
|
||||||
if len(pythons) != 1:
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
python_executable = pythons[0].package.command.path
|
|
||||||
except RuntimeError:
|
|
||||||
return
|
|
||||||
|
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[
|
||||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||||
|
|||||||
144
lib/spack/spack/build_systems/compiler.py
Normal file
144
lib/spack/spack/build_systems/compiler.py
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from typing import Dict, List, Sequence, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import classproperty
|
||||||
|
|
||||||
|
import spack.compiler
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
# Local "type" for type hints
|
||||||
|
Path = Union[str, pathlib.Path]
|
||||||
|
|
||||||
|
|
||||||
|
class CompilerPackage(spack.package_base.PackageBase):
|
||||||
|
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||||
|
|
||||||
|
# TODO: how do these play nicely with other tags
|
||||||
|
tags: Sequence[str] = ["compiler"]
|
||||||
|
|
||||||
|
#: Optional suffix regexes for searching for this type of compiler.
|
||||||
|
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||||
|
#: version suffix for gcc.
|
||||||
|
compiler_suffixes: List[str] = [r"-.*"]
|
||||||
|
|
||||||
|
#: Optional prefix regexes for searching for this compiler
|
||||||
|
compiler_prefixes: List[str] = []
|
||||||
|
|
||||||
|
#: Compiler argument(s) that produces version information
|
||||||
|
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||||
|
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||||
|
|
||||||
|
#: Regex used to extract version from compiler's output
|
||||||
|
compiler_version_regex: str = "(.*)"
|
||||||
|
|
||||||
|
#: Static definition of languages supported by this class
|
||||||
|
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||||
|
|
||||||
|
def __init__(self, spec: "spack.spec.Spec"):
|
||||||
|
super().__init__(spec)
|
||||||
|
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||||
|
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||||
|
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_languages(self) -> Sequence[str]:
|
||||||
|
"""Dynamic definition of languages supported by this package"""
|
||||||
|
return self.compiler_languages
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def compiler_names(cls) -> Sequence[str]:
|
||||||
|
"""Construct list of compiler names from per-language names"""
|
||||||
|
names = []
|
||||||
|
for language in cls.compiler_languages:
|
||||||
|
names.extend(getattr(cls, f"{language}_names"))
|
||||||
|
return names
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def executables(cls) -> Sequence[str]:
|
||||||
|
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||||
|
regexp_fmt = r"^({0}){1}({2})$"
|
||||||
|
prefixes = [""] + cls.compiler_prefixes
|
||||||
|
suffixes = [""] + cls.compiler_suffixes
|
||||||
|
if sys.platform == "win32":
|
||||||
|
ext = r"\.(?:exe|bat)"
|
||||||
|
suffixes += [suf + ext for suf in suffixes]
|
||||||
|
return [
|
||||||
|
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||||
|
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_version(cls, exe: Path):
|
||||||
|
version_argument = cls.compiler_version_argument
|
||||||
|
if isinstance(version_argument, str):
|
||||||
|
version_argument = (version_argument,)
|
||||||
|
|
||||||
|
for va in version_argument:
|
||||||
|
try:
|
||||||
|
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||||
|
match = re.search(cls.compiler_version_regex, output)
|
||||||
|
if match:
|
||||||
|
return ".".join(match.groups())
|
||||||
|
except spack.util.executable.ProcessError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
tty.debug(
|
||||||
|
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||||
|
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||||
|
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||||
|
return os.path.join(prefix, "bin")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||||
|
"""Compute the paths to compiler executables associated with this package
|
||||||
|
|
||||||
|
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||||
|
to include with each spec object."""
|
||||||
|
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||||
|
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||||
|
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||||
|
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||||
|
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||||
|
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||||
|
# order:
|
||||||
|
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||||
|
# Second pass checks for names with prefix/suffix
|
||||||
|
# Second pass is sorted by language name length because longer named languages
|
||||||
|
# e.g. cxx can often contain the names of shorter named languages
|
||||||
|
# e.g. c (e.g. clang/clang++)
|
||||||
|
paths = {}
|
||||||
|
exes = sorted(exes, reverse=True)
|
||||||
|
languages = {
|
||||||
|
lang: getattr(cls, f"{lang}_names")
|
||||||
|
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||||
|
}
|
||||||
|
for exe in exes:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if os.path.basename(exe) in names:
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
for lang, names in languages.items():
|
||||||
|
if any(name in os.path.basename(exe) for name in names):
|
||||||
|
paths[lang] = exe
|
||||||
|
break
|
||||||
|
|
||||||
|
return paths
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||||
|
# path determination is separated so it can be reused in subclasses
|
||||||
|
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||||
@@ -110,9 +110,8 @@ def cuda_flags(arch_list):
|
|||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
# platform=linux, since they should also apply to platform=cray, and may
|
# platform=linux, since they may apply to platform=darwin. We currently
|
||||||
# apply to platform=darwin. We currently do not provide conflicts for
|
# do not provide conflicts for platform=darwin with %apple-clang.
|
||||||
# platform=darwin with %apple-clang.
|
|
||||||
|
|
||||||
# Linux x86_64 compiler conflicts from here:
|
# Linux x86_64 compiler conflicts from here:
|
||||||
# https://gist.github.com/ax3l/9489132
|
# https://gist.github.com/ax3l/9489132
|
||||||
@@ -137,11 +136,14 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||||
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|||||||
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
|||||||
"^mpich@2:" in spec_root
|
"^mpich@2:" in spec_root
|
||||||
or "^cray-mpich" in spec_root
|
or "^cray-mpich" in spec_root
|
||||||
or "^mvapich2" in spec_root
|
or "^mvapich2" in spec_root
|
||||||
|
or "^mvapich" in spec_root
|
||||||
or "^intel-mpi" in spec_root
|
or "^intel-mpi" in spec_root
|
||||||
or "^intel-oneapi-mpi" in spec_root
|
or "^intel-oneapi-mpi" in spec_root
|
||||||
or "^intel-parallel-studio" in spec_root
|
or "^intel-parallel-studio" in spec_root
|
||||||
@@ -936,32 +937,15 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
|||||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||||
}
|
}
|
||||||
|
|
||||||
# CAUTION - SIMILAR code in:
|
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||||
# var/spack/repos/builtin/packages/mpich/package.py
|
wrapper_vars.update(
|
||||||
# var/spack/repos/builtin/packages/openmpi/package.py
|
{
|
||||||
# var/spack/repos/builtin/packages/mvapich2/package.py
|
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||||
#
|
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||||
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||||
if "platform=cray" in self.spec:
|
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||||
# TODO: Confirm
|
}
|
||||||
wrapper_vars.update(
|
)
|
||||||
{
|
|
||||||
"MPICC": compilers_of_client["CC"],
|
|
||||||
"MPICXX": compilers_of_client["CXX"],
|
|
||||||
"MPIF77": compilers_of_client["F77"],
|
|
||||||
"MPIF90": compilers_of_client["F90"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
|
||||||
wrapper_vars.update(
|
|
||||||
{
|
|
||||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
|
||||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
|
||||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
|
||||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Ensure that the directory containing the compiler wrappers is in the
|
# Ensure that the directory containing the compiler wrappers is in the
|
||||||
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
|||||||
build_system("msbuild")
|
build_system("msbuild")
|
||||||
conflicts("platform=linux", when="build_system=msbuild")
|
conflicts("platform=linux", when="build_system=msbuild")
|
||||||
conflicts("platform=darwin", when="build_system=msbuild")
|
conflicts("platform=darwin", when="build_system=msbuild")
|
||||||
conflicts("platform=cray", when="build_system=msbuild")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("msbuild")
|
@spack.builder.builder("msbuild")
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ class NMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("nmake")
|
build_system("nmake")
|
||||||
conflicts("platform=linux", when="build_system=nmake")
|
conflicts("platform=linux", when="build_system=nmake")
|
||||||
conflicts("platform=darwin", when="build_system=nmake")
|
conflicts("platform=darwin", when="build_system=nmake")
|
||||||
conflicts("platform=cray", when="build_system=nmake")
|
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("nmake")
|
@spack.builder.builder("nmake")
|
||||||
@@ -145,7 +144,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||||
|
|||||||
@@ -36,9 +36,8 @@ class IntelOneApiPackage(Package):
|
|||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
"target=aarch64:",
|
"target=aarch64:",
|
||||||
"platform=darwin:",
|
"platform=darwin",
|
||||||
"platform=cray:",
|
"platform=windows",
|
||||||
"platform=windows:",
|
|
||||||
]:
|
]:
|
||||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||||
|
|
||||||
|
|||||||
@@ -138,16 +138,21 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
return conflicts
|
return conflicts
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||||
if not self.extendee_spec or self.extendee_spec.external:
|
# view.
|
||||||
|
if not self.extendee_spec:
|
||||||
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
|
||||||
|
if python.external:
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
# We only patch shebangs in the bin directory.
|
# We only patch shebangs in the bin directory.
|
||||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
python_prefix = self.extendee_spec.prefix
|
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if skip_if_exists and os.path.lexists(dst):
|
if skip_if_exists and os.path.lexists(dst):
|
||||||
continue
|
continue
|
||||||
@@ -168,7 +173,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
|||||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||||
shutil.copy2(src, dst)
|
shutil.copy2(src, dst)
|
||||||
fs.filter_file(
|
fs.filter_file(
|
||||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
view.link(src, dst)
|
view.link(src, dst)
|
||||||
@@ -199,14 +204,13 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
ignore_namespace = True
|
ignore_namespace = True
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
|
||||||
|
|
||||||
to_remove = []
|
to_remove = []
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if ignore_namespace and namespace_init(dst):
|
if ignore_namespace and namespace_init(dst):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
to_remove.append(dst)
|
to_remove.append(dst)
|
||||||
else:
|
else:
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
@@ -362,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return f"https://pypi.org/simple/{name}/"
|
return f"https://pypi.org/simple/{name}/"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_spec(self):
|
||||||
|
"""Get python-venv if it exists or python otherwise."""
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
return python
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self) -> HeaderList:
|
def headers(self) -> HeaderList:
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
@@ -371,8 +381,9 @@ def headers(self) -> HeaderList:
|
|||||||
|
|
||||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||||
headers = functools.reduce(operator.add, headers_list)
|
headers = functools.reduce(operator.add, headers_list)
|
||||||
@@ -391,8 +402,9 @@ def libs(self) -> LibraryList:
|
|||||||
name = self.spec.name[3:]
|
name = self.spec.name[3:]
|
||||||
|
|
||||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
python = self.python_spec
|
||||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||||
|
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||||
|
|
||||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||||
@@ -504,6 +516,8 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
|||||||
|
|
||||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
|
pip = spec["python"].command
|
||||||
|
pip.add_default_arg("-m", "pip")
|
||||||
|
|
||||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||||
|
|
||||||
@@ -519,14 +533,6 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
|||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
pip = spec["python"].command
|
|
||||||
# Hide user packages, since we don't have build isolation. This is
|
|
||||||
# necessary because pip / setuptools may run hooks from arbitrary
|
|
||||||
# packages during the build. There is no equivalent variable to hide
|
|
||||||
# system packages, so this is not reliable for external Python.
|
|
||||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
|
||||||
pip.add_default_arg("-m")
|
|
||||||
pip.add_default_arg("pip")
|
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
|
|||||||
@@ -44,6 +44,7 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters import CDash, CDashConfiguration
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
@@ -683,6 +684,22 @@ def generate_gitlab_ci_yaml(
|
|||||||
"instead.",
|
"instead.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def ensure_expected_target_path(path):
|
||||||
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
|
for posix separators only if copy_only_pipeline is enabled
|
||||||
|
|
||||||
|
This is required as copy_only_pipelines are a unique scenario where
|
||||||
|
the generate job and child pipelines are run on different platforms.
|
||||||
|
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||||
|
that will be consumed on by the Posix copy job runner.
|
||||||
|
|
||||||
|
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||||
|
style paths
|
||||||
|
"""
|
||||||
|
if copy_only_pipeline and path:
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
return path
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
deprecated_mirror_config = False
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
@@ -806,7 +823,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
if scope not in include_scopes and scope not in env_includes:
|
if scope not in include_scopes and scope not in env_includes:
|
||||||
include_scopes.insert(0, scope)
|
include_scopes.insert(0, scope)
|
||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = env_includes
|
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||||
|
|
||||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||||
@@ -1227,6 +1244,9 @@ def main_script_replacements(cmd):
|
|||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
}
|
}
|
||||||
|
output_vars = output_object["variables"]
|
||||||
|
for item, val in output_vars.items():
|
||||||
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
# TODO: Remove this block in Spack 0.23
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
@@ -1283,7 +1303,6 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = {}
|
sorted_output = {}
|
||||||
for output_key, output_value in sorted(output_object.items()):
|
for output_key, output_value in sorted(output_object.items()):
|
||||||
sorted_output[output_key] = output_value
|
sorted_output[output_key] = output_value
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||||
@@ -1478,6 +1497,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def win_quote(quote_str: str) -> str:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
quote_str = f'"{quote_str}"'
|
||||||
|
return quote_str
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
@@ -1500,7 +1525,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
request = Request(url, headers=headers)
|
request = Request(url, headers=headers)
|
||||||
request.get_method = lambda: "GET"
|
request.get_method = lambda: "GET"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
@@ -1942,9 +1967,9 @@ def compose_command_err_handling(args):
|
|||||||
# but we need to handle EXEs (git, etc) ourselves
|
# but we need to handle EXEs (git, etc) ourselves
|
||||||
catch_exe_failure = (
|
catch_exe_failure = (
|
||||||
"""
|
"""
|
||||||
if ($LASTEXITCODE -ne 0){
|
if ($LASTEXITCODE -ne 0){{
|
||||||
throw "Command {} has failed"
|
throw 'Command {} has failed'
|
||||||
}
|
}}
|
||||||
"""
|
"""
|
||||||
if IS_WINDOWS
|
if IS_WINDOWS
|
||||||
else ""
|
else ""
|
||||||
@@ -2176,13 +2201,13 @@ def __init__(self, ci_cdash):
|
|||||||
def args(self):
|
def args(self):
|
||||||
return [
|
return [
|
||||||
"--cdash-upload-url",
|
"--cdash-upload-url",
|
||||||
self.upload_url,
|
win_quote(self.upload_url),
|
||||||
"--cdash-build",
|
"--cdash-build",
|
||||||
self.build_name,
|
win_quote(self.build_name),
|
||||||
"--cdash-site",
|
"--cdash-site",
|
||||||
self.site,
|
win_quote(self.site),
|
||||||
"--cdash-buildstamp",
|
"--cdash-buildstamp",
|
||||||
self.build_stamp,
|
win_quote(self.build_stamp),
|
||||||
]
|
]
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
@@ -2248,7 +2273,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
|||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
if response_code not in [200, 201]:
|
||||||
@@ -2294,7 +2319,7 @@ def populate_buildgroup(self, job_names):
|
|||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ def externals(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||||
_process_reports(reports)
|
_process_reports(reports)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib.request
|
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -54,6 +53,7 @@
|
|||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
get_manifest_and_config_with_retry,
|
get_manifest_and_config_with_retry,
|
||||||
|
list_tags,
|
||||||
upload_blob_with_retry,
|
upload_blob_with_retry,
|
||||||
upload_manifest_with_retry,
|
upload_manifest_with_retry,
|
||||||
)
|
)
|
||||||
@@ -856,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
|||||||
|
|
||||||
|
|
||||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||||
request = urllib.request.Request(url=image_ref.tags_url())
|
tags = list_tags(image_ref)
|
||||||
response = spack.oci.opener.urlopen(request)
|
|
||||||
spack.oci.opener.ensure_status(request, response, 200)
|
|
||||||
tags = json.load(response)["tags"]
|
|
||||||
|
|
||||||
# Fetch all image config files in parallel
|
# Fetch all image config files in parallel
|
||||||
spec_dicts = pool.starmap(
|
spec_dicts = pool.starmap(
|
||||||
|
|||||||
@@ -31,7 +31,6 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
MAKE_COMMAND = "make"
|
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
|
|
||||||
@@ -40,6 +39,12 @@ def deindent(desc):
|
|||||||
return desc.replace(" ", "")
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
def unicode_escape(path: str) -> str:
|
||||||
|
"""Returns transformed path with any unicode
|
||||||
|
characters replaced with their corresponding escapes"""
|
||||||
|
return path.encode("unicode-escape").decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
@@ -551,75 +556,35 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = []
|
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||||
|
|
||||||
# Arguments when installing dependencies from cache
|
|
||||||
deps_install_args = install_args
|
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
root_install_args = install_args + [
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
"--keep-stage",
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
"--only=package",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
]
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
root_install_args.extend(cdash_handler.args())
|
root_install_args.extend(cdash_handler.args())
|
||||||
root_install_args.append(slash_hash)
|
|
||||||
|
|
||||||
# ["x", "y"] -> "'x' 'y'"
|
|
||||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||||
[
|
spack_cmd + deps_install_args + [slash_hash],
|
||||||
SPACK_COMMAND,
|
spack_cmd + root_install_args + [slash_hash],
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"env",
|
|
||||||
"depfile",
|
|
||||||
"-o",
|
|
||||||
"Makefile",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
slash_hash, # limit to spec we're building
|
|
||||||
],
|
|
||||||
[
|
|
||||||
# --output-sync requires GNU make 4.x.
|
|
||||||
# Old make errors when you pass it a flag it doesn't recognize,
|
|
||||||
# but it doesn't error or warn when you set unrecognized flags in
|
|
||||||
# this variable.
|
|
||||||
"export",
|
|
||||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
MAKE_COMMAND,
|
|
||||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
|
||||||
"SPACK_COLOR=always",
|
|
||||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
|
||||||
"-j$(nproc)",
|
|
||||||
"install-deps/{}".format(
|
|
||||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
|
||||||
"{name}-{version}-{hash}"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
],
|
|
||||||
spack_cmd + ["install"] + root_install_args,
|
|
||||||
]
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -106,7 +106,8 @@ def clean(parser, args):
|
|||||||
|
|
||||||
# Then do the cleaning falling through the cases
|
# Then do the cleaning falling through the cases
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
|
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = "Cleaning build stage [{0}]"
|
msg = "Cleaning build stage [{0}]"
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
|
|||||||
@@ -563,12 +563,13 @@ def add_concretizer_args(subparser):
|
|||||||
help="reuse installed packages/buildcaches when possible",
|
help="reuse installed packages/buildcaches when possible",
|
||||||
)
|
)
|
||||||
subgroup.add_argument(
|
subgroup.add_argument(
|
||||||
|
"--fresh-roots",
|
||||||
"--reuse-deps",
|
"--reuse-deps",
|
||||||
action=ConfigSetAction,
|
action=ConfigSetAction,
|
||||||
dest="concretizer:reuse",
|
dest="concretizer:reuse",
|
||||||
const="dependencies",
|
const="dependencies",
|
||||||
default=None,
|
default=None,
|
||||||
help="reuse installed dependencies only",
|
help="concretize with fresh roots and reused dependencies",
|
||||||
)
|
)
|
||||||
subgroup.add_argument(
|
subgroup.add_argument(
|
||||||
"--deprecated",
|
"--deprecated",
|
||||||
|
|||||||
@@ -10,13 +10,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.string as string
|
import llnl.string as string
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common
|
import spack.cmd.common
|
||||||
@@ -61,14 +61,7 @@
|
|||||||
#
|
#
|
||||||
def env_create_setup_parser(subparser):
|
def env_create_setup_parser(subparser):
|
||||||
"""create a new environment"""
|
"""create a new environment"""
|
||||||
subparser.add_argument(
|
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||||
"env_name",
|
|
||||||
metavar="env",
|
|
||||||
help=(
|
|
||||||
"name of managed environment or directory of the anonymous env "
|
|
||||||
"(when using --dir/-d) to activate"
|
|
||||||
),
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||||
)
|
)
|
||||||
@@ -94,6 +87,9 @@ def env_create_setup_parser(subparser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_create(args):
|
def env_create(args):
|
||||||
@@ -111,19 +107,32 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
|
include_concrete = None
|
||||||
|
if hasattr(args, "include_concrete"):
|
||||||
|
include_concrete = args.include_concrete
|
||||||
|
|
||||||
env = _env_create(
|
env = _env_create(
|
||||||
args.env_name,
|
args.env_name,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir,
|
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||||
with_view=with_view,
|
with_view=with_view,
|
||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate views, only really useful for environments created from spack.lock files.
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
env.regenerate_views()
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
def _env_create(
|
||||||
|
name_or_path: str,
|
||||||
|
*,
|
||||||
|
init_file: Optional[str] = None,
|
||||||
|
dir: bool = False,
|
||||||
|
with_view: Optional[str] = None,
|
||||||
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
|
):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -135,22 +144,31 @@ def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep
|
|||||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||||
the new environment file, otherwise they may be made absolute if the
|
the new environment file, otherwise they may be made absolute if the
|
||||||
new environment is in a different location
|
new environment is in a different location
|
||||||
|
include_concrete (list): list of the included concrete environments
|
||||||
"""
|
"""
|
||||||
if not dir:
|
if not dir:
|
||||||
env = ev.create(
|
env = ev.create(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
tty.msg(
|
||||||
tty.msg("You can activate this environment with:")
|
colorize(
|
||||||
tty.msg(" spack env activate %s" % (name_or_path))
|
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||||
return env
|
)
|
||||||
|
)
|
||||||
env = ev.create_in_dir(
|
else:
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
env = ev.create_in_dir(
|
||||||
)
|
name_or_path,
|
||||||
tty.msg("Created environment in %s" % env.path)
|
init_file=init_file,
|
||||||
tty.msg("You can activate this environment with:")
|
with_view=with_view,
|
||||||
tty.msg(" spack env activate %s" % env.path)
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
|
)
|
||||||
|
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||||
|
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
@@ -436,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
|||||||
"""remove an existing environment"""
|
"""remove an existing environment"""
|
||||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||||
|
subparser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--force",
|
||||||
|
action="store_true",
|
||||||
|
help="remove the environment even if it is included in another environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_remove(args):
|
def env_remove(args):
|
||||||
@@ -445,13 +469,35 @@ def env_remove(args):
|
|||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
read_envs = []
|
read_envs = []
|
||||||
|
valid_envs = []
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
for env_name in args.rm_env:
|
invalid_envs = []
|
||||||
|
|
||||||
|
for env_name in ev.all_environment_names():
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
read_envs.append(env)
|
valid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
|
read_envs.append(env)
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
bad_envs.append(env_name)
|
invalid_envs.append(env_name)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
|
# Check if env is linked to another before trying to remove
|
||||||
|
for name in valid_envs:
|
||||||
|
# don't check if environment is included to itself
|
||||||
|
if name == env_name:
|
||||||
|
continue
|
||||||
|
environ = ev.Environment(ev.root(name))
|
||||||
|
if ev.root(env_name) in environ.included_concrete_envs:
|
||||||
|
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||||
|
if args.force:
|
||||||
|
tty.warn(msg)
|
||||||
|
else:
|
||||||
|
tty.die(msg)
|
||||||
|
|
||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import copy
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -271,6 +272,27 @@ def root_decorator(spec, string):
|
|||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
if env.included_concrete_envs:
|
||||||
|
tty.msg("Included specs")
|
||||||
|
|
||||||
|
# Root specs cannot be displayed with prefixes, since those are not
|
||||||
|
# set for abstract specs. Same for hashes
|
||||||
|
root_args = copy.copy(args)
|
||||||
|
root_args.paths = False
|
||||||
|
|
||||||
|
# Roots are displayed with variants, etc. so that we can see
|
||||||
|
# specifically what the user asked for.
|
||||||
|
cmd.display_specs(
|
||||||
|
env.included_user_specs,
|
||||||
|
root_args,
|
||||||
|
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||||
|
namespace=True,
|
||||||
|
show_flags=True,
|
||||||
|
show_full_compiler=True,
|
||||||
|
variants=True,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
if args.show_concretized:
|
if args.show_concretized:
|
||||||
tty.msg("Concretized roots")
|
tty.msg("Concretized roots")
|
||||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||||
|
|||||||
@@ -50,7 +50,7 @@
|
|||||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||||
|
|
||||||
architecture variants:
|
architecture variants:
|
||||||
@m{platform=platform} linux, darwin, cray, etc.
|
@m{platform=platform} linux, darwin, freebsd, windows
|
||||||
@m{os=operating_system} specific <operating_system>
|
@m{os=operating_system} specific <operating_system>
|
||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|||||||
@@ -61,7 +61,6 @@ def install_kwargs_from_args(args):
|
|||||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||||
"include_build_deps": args.include_build_deps,
|
"include_build_deps": args.include_build_deps,
|
||||||
"explicit": True, # Use true as a default for install command
|
|
||||||
"stop_at": args.until,
|
"stop_at": args.until,
|
||||||
"unsigned": args.unsigned,
|
"unsigned": args.unsigned,
|
||||||
"install_deps": ("dependencies" in args.things_to_install),
|
"install_deps": ("dependencies" in args.things_to_install),
|
||||||
@@ -473,6 +472,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
|||||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||||
|
|
||||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
installs = [s.package for s in concrete_specs]
|
||||||
builder = PackageInstaller(installs)
|
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||||
|
builder = PackageInstaller(installs, install_kwargs)
|
||||||
builder.install()
|
builder.install()
|
||||||
|
|||||||
@@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.21"
|
tutorial_branch = "releases/v0.22"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
|||||||
@@ -151,7 +151,8 @@ def is_installed(spec):
|
|||||||
key=lambda s: s.dag_hash(),
|
key=lambda s: s.dag_hash(),
|
||||||
)
|
)
|
||||||
|
|
||||||
return [spec for spec in specs if is_installed(spec)]
|
with spack.store.STORE.db.read_transaction():
|
||||||
|
return [spec for spec in specs if is_installed(spec)]
|
||||||
|
|
||||||
|
|
||||||
def dependent_environments(
|
def dependent_environments(
|
||||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
|||||||
print()
|
print()
|
||||||
tty.info("The following environments still reference these specs:")
|
tty.info("The following environments still reference these specs:")
|
||||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||||
|
if env:
|
||||||
|
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||||
msgs.append("use `spack env remove` to remove environments")
|
msgs.append("use `spack env remove` to remove environments")
|
||||||
msgs.append("use `spack uninstall --force` to override")
|
msgs.append("use `spack uninstall --force` to override")
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -214,8 +214,6 @@ def unit_test(parser, args, unknown_args):
|
|||||||
|
|
||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
# Note: skip on windows here because for the moment,
|
|
||||||
# clingo is wholly unsupported from bootstrap
|
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
if pytest is None:
|
if pytest is None:
|
||||||
|
|||||||
@@ -20,6 +20,7 @@
|
|||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.libc
|
import spack.util.libc
|
||||||
@@ -683,8 +684,8 @@ def __str__(self):
|
|||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def compiler_environment(self):
|
def compiler_environment(self):
|
||||||
# yield immediately if no modules
|
# Avoid modifying os.environ if possible.
|
||||||
if not self.modules:
|
if not self.modules and not self.environment:
|
||||||
yield
|
yield
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -694,20 +695,12 @@ def compiler_environment(self):
|
|||||||
try:
|
try:
|
||||||
# load modules and set env variables
|
# load modules and set env variables
|
||||||
for module in self.modules:
|
for module in self.modules:
|
||||||
# On cray, mic-knl module cannot be loaded without cce module
|
|
||||||
# See: https://github.com/spack/spack/issues/3153
|
|
||||||
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
|
||||||
spack.util.module_cmd.load_module("cce")
|
|
||||||
spack.util.module_cmd.load_module(module)
|
spack.util.module_cmd.load_module(module)
|
||||||
|
|
||||||
# apply other compiler environment changes
|
# apply other compiler environment changes
|
||||||
env = spack.util.environment.EnvironmentModifications()
|
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||||
env.extend(spack.schema.environment.parse(self.environment))
|
|
||||||
env.apply_modifications()
|
|
||||||
|
|
||||||
yield
|
yield
|
||||||
except BaseException:
|
|
||||||
raise
|
|
||||||
finally:
|
finally:
|
||||||
# Restore environment regardless of whether inner code succeeded
|
# Restore environment regardless of whether inner code succeeded
|
||||||
os.environ.clear()
|
os.environ.clear()
|
||||||
|
|||||||
@@ -164,43 +164,66 @@ def _compiler_config_from_package_config(config):
|
|||||||
|
|
||||||
|
|
||||||
def _compiler_config_from_external(config):
|
def _compiler_config_from_external(config):
|
||||||
|
extra_attributes_key = "extra_attributes"
|
||||||
|
compilers_key = "compilers"
|
||||||
|
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||||
|
|
||||||
|
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
|
||||||
compiler_spec = spack.spec.CompilerSpec(
|
compiler_spec = spack.spec.CompilerSpec(
|
||||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||||
)
|
)
|
||||||
|
|
||||||
extra_attributes = config.get("extra_attributes", {})
|
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||||
prefix = config.get("prefix", None)
|
|
||||||
|
|
||||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||||
paths = extra_attributes.get("paths", {})
|
# therefore just leave a debug message, but don't be loud with a warning.
|
||||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
if extra_attributes_key not in config:
|
||||||
for lang in compiler_langs:
|
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||||
if paths.setdefault(lang, None):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not prefix:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check for files that satisfy the naming scheme for this compiler
|
|
||||||
bindir = os.path.join(prefix, "bin")
|
|
||||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
|
||||||
if regex.match(f):
|
|
||||||
paths[lang] = os.path.join(bindir, f)
|
|
||||||
|
|
||||||
if all(v is None for v in paths.values()):
|
|
||||||
return None
|
return None
|
||||||
|
extra_attributes = config[extra_attributes_key]
|
||||||
|
|
||||||
|
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||||
|
if compilers_key not in extra_attributes:
|
||||||
|
warnings.warn(
|
||||||
|
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
attribute_compilers = extra_attributes[compilers_key]
|
||||||
|
|
||||||
|
if c_key not in attribute_compilers:
|
||||||
|
warnings.warn(
|
||||||
|
f"{err_header}: missing the C compiler path under "
|
||||||
|
f"'{extra_attributes_key}:{compilers_key}'"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
c_compiler = attribute_compilers[c_key]
|
||||||
|
|
||||||
|
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||||
|
if cxx_key not in attribute_compilers:
|
||||||
|
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||||
|
|
||||||
|
if fortran_key not in attribute_compilers:
|
||||||
|
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||||
|
|
||||||
|
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||||
|
paths = {
|
||||||
|
"cc": c_compiler,
|
||||||
|
"cxx": attribute_compilers.get(cxx_key, None),
|
||||||
|
"fc": attribute_compilers.get(fortran_key, None),
|
||||||
|
"f77": attribute_compilers.get(fortran_key, None),
|
||||||
|
}
|
||||||
|
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = host_platform.target("default_target").microarchitecture
|
||||||
else:
|
else:
|
||||||
target = spec.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
host_platform = spack.platforms.host()
|
target = spack.platforms.host().target("default_target")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = target.microarchitecture
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
|
|||||||
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
|||||||
|
|
||||||
openmp_flag = "-fopenmp"
|
openmp_flag = "-fopenmp"
|
||||||
|
|
||||||
|
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx11_flag(self):
|
def cxx11_flag(self):
|
||||||
if self.real_version < Version("3.3"):
|
if self.real_version < Version("3.3"):
|
||||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
|||||||
|
|
||||||
return "-std=c++17"
|
return "-std=c++17"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx20_flag(self):
|
||||||
|
if self.real_version < Version("5.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||||
|
elif self.real_version < Version("11.0"):
|
||||||
|
return "-std=c++2a"
|
||||||
|
else:
|
||||||
|
return "-std=c++20"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx23_flag(self):
|
||||||
|
if self.real_version < Version("12.0"):
|
||||||
|
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||||
|
elif self.real_version < Version("17.0"):
|
||||||
|
return "-std=c++2b"
|
||||||
|
else:
|
||||||
|
return "-std=c++23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
return "-std=c99"
|
return "-std=c99"
|
||||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
|||||||
def c23_flag(self):
|
def c23_flag(self):
|
||||||
if self.real_version < Version("9.0"):
|
if self.real_version < Version("9.0"):
|
||||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||||
return "-std=c2x"
|
elif self.real_version < Version("18.0"):
|
||||||
|
return "-std=c2x"
|
||||||
|
else:
|
||||||
|
return "-std=c23"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cc_pic_flag(self):
|
def cc_pic_flag(self):
|
||||||
|
|||||||
@@ -74,6 +74,10 @@ class Concretizer:
|
|||||||
#: during concretization. Used for testing and for mirror creation
|
#: during concretization. Used for testing and for mirror creation
|
||||||
check_for_compiler_existence = None
|
check_for_compiler_existence = None
|
||||||
|
|
||||||
|
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||||
|
#: Those will not be considered as providers for virtuals.
|
||||||
|
non_buildable_packages = {"glibc", "musl"}
|
||||||
|
|
||||||
def __init__(self, abstract_spec=None):
|
def __init__(self, abstract_spec=None):
|
||||||
if Concretizer.check_for_compiler_existence is None:
|
if Concretizer.check_for_compiler_existence is None:
|
||||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||||
@@ -113,7 +117,11 @@ def _valid_virtuals_and_externals(self, spec):
|
|||||||
pref_key = lambda spec: 0 # no-op pref key
|
pref_key = lambda spec: 0 # no-op pref key
|
||||||
|
|
||||||
if spec.virtual:
|
if spec.virtual:
|
||||||
candidates = spack.repo.PATH.providers_for(spec)
|
candidates = [
|
||||||
|
s
|
||||||
|
for s in spack.repo.PATH.providers_for(spec)
|
||||||
|
if s.name not in self.non_buildable_packages
|
||||||
|
]
|
||||||
if not candidates:
|
if not candidates:
|
||||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||||
|
|
||||||
|
|||||||
@@ -34,28 +34,6 @@
|
|||||||
"image": "docker.io/fedora:39"
|
"image": "docker.io/fedora:39"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"fedora:38": {
|
|
||||||
"bootstrap": {
|
|
||||||
"template": "container/fedora.dockerfile",
|
|
||||||
"image": "docker.io/fedora:38"
|
|
||||||
},
|
|
||||||
"os_package_manager": "dnf",
|
|
||||||
"build": "spack/fedora38",
|
|
||||||
"final": {
|
|
||||||
"image": "docker.io/fedora:38"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"fedora:37": {
|
|
||||||
"bootstrap": {
|
|
||||||
"template": "container/fedora.dockerfile",
|
|
||||||
"image": "docker.io/fedora:37"
|
|
||||||
},
|
|
||||||
"os_package_manager": "dnf",
|
|
||||||
"build": "spack/fedora37",
|
|
||||||
"final": {
|
|
||||||
"image": "docker.io/fedora:37"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"rockylinux:9": {
|
"rockylinux:9": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/rockylinux_9.dockerfile",
|
"template": "container/rockylinux_9.dockerfile",
|
||||||
@@ -138,6 +116,13 @@
|
|||||||
},
|
},
|
||||||
"os_package_manager": "apt"
|
"os_package_manager": "apt"
|
||||||
},
|
},
|
||||||
|
"ubuntu:24.04": {
|
||||||
|
"bootstrap": {
|
||||||
|
"template": "container/ubuntu_2404.dockerfile"
|
||||||
|
},
|
||||||
|
"os_package_manager": "apt",
|
||||||
|
"build": "spack/ubuntu-noble"
|
||||||
|
},
|
||||||
"ubuntu:22.04": {
|
"ubuntu:22.04": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/ubuntu_2204.dockerfile"
|
"template": "container/ubuntu_2204.dockerfile"
|
||||||
@@ -151,13 +136,6 @@
|
|||||||
},
|
},
|
||||||
"build": "spack/ubuntu-focal",
|
"build": "spack/ubuntu-focal",
|
||||||
"os_package_manager": "apt"
|
"os_package_manager": "apt"
|
||||||
},
|
|
||||||
"ubuntu:18.04": {
|
|
||||||
"bootstrap": {
|
|
||||||
"template": "container/ubuntu_1804.dockerfile"
|
|
||||||
},
|
|
||||||
"os_package_manager": "apt",
|
|
||||||
"build": "spack/ubuntu-bionic"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"os_package_managers": {
|
"os_package_managers": {
|
||||||
|
|||||||
@@ -662,6 +662,7 @@ def _execute_redistribute(
|
|||||||
@directive(("extendees", "dependencies"))
|
@directive(("extendees", "dependencies"))
|
||||||
def extends(spec, when=None, type=("build", "run"), patches=None):
|
def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||||
"""Same as depends_on, but also adds this package to the extendee list.
|
"""Same as depends_on, but also adds this package to the extendee list.
|
||||||
|
In case of Python, also adds a dependency on python-venv.
|
||||||
|
|
||||||
keyword arguments can be passed to extends() so that extension
|
keyword arguments can be passed to extends() so that extension
|
||||||
packages can pass parameters to the extendee's extension
|
packages can pass parameters to the extendee's extension
|
||||||
@@ -677,6 +678,11 @@ def _execute_extends(pkg):
|
|||||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||||
spec_obj = spack.spec.Spec(spec)
|
spec_obj = spack.spec.Spec(spec)
|
||||||
|
|
||||||
|
# When extending python, also add a dependency on python-venv. This is done so that
|
||||||
|
# Spack environment views are Python virtual environments.
|
||||||
|
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||||
|
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||||
|
|
||||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -181,7 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
|||||||
base_dir = (
|
base_dir = (
|
||||||
self.path_for_spec(deprecator_spec)
|
self.path_for_spec(deprecator_spec)
|
||||||
if deprecator_spec
|
if deprecator_spec
|
||||||
else os.readlink(deprecated_spec.prefix)
|
else readlink(deprecated_spec.prefix)
|
||||||
)
|
)
|
||||||
|
|
||||||
yaml_path = os.path.join(
|
yaml_path = os.path.join(
|
||||||
|
|||||||
@@ -34,6 +34,9 @@
|
|||||||
* ``spec``: a string representation of the abstract spec that was concretized
|
* ``spec``: a string representation of the abstract spec that was concretized
|
||||||
|
|
||||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||||
|
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||||
|
and concrete specs from the included environments, keyed by the path to that
|
||||||
|
environment
|
||||||
|
|
||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
@@ -50,26 +53,37 @@
|
|||||||
- ``v2``
|
- ``v2``
|
||||||
- ``v3``
|
- ``v3``
|
||||||
- ``v4``
|
- ``v4``
|
||||||
|
- ``v5``
|
||||||
* - ``v0.12:0.14``
|
* - ``v0.12:0.14``
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.15:0.16``
|
* - ``v0.15:0.16``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.17``
|
* - ``v0.17``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.18:``
|
* - ``v0.18:``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
|
-
|
||||||
|
* - ``v0.22:``
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
|
||||||
Version 1
|
Version 1
|
||||||
---------
|
---------
|
||||||
@@ -334,6 +348,118 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Version 5
|
||||||
|
---------
|
||||||
|
|
||||||
|
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||||
|
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||||
|
environments, which are keyed by the path to that environment. Since this is optional
|
||||||
|
if the environment does not have any included environments ``include_concrete`` will
|
||||||
|
not be a part of the lockfile.
|
||||||
|
|
||||||
|
.. code-block:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"_meta": {
|
||||||
|
"file-type": "spack-lockfile",
|
||||||
|
"lockfile-version": 5,
|
||||||
|
"specfile-version": 3
|
||||||
|
},
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"include_concrete": {
|
||||||
|
"<path to environment>": {
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .environment import (
|
from .environment import (
|
||||||
|
|||||||
@@ -16,13 +16,13 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util.link_tree import ConflictingSpecsError
|
from llnl.util.link_tree import ConflictingSpecsError
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
@@ -159,6 +159,8 @@ def default_manifest_yaml():
|
|||||||
default_view_name = "default"
|
default_view_name = "default"
|
||||||
# Default behavior to link all packages into views (vs. only root packages)
|
# Default behavior to link all packages into views (vs. only root packages)
|
||||||
default_view_link = "all"
|
default_view_link = "all"
|
||||||
|
# The name for any included concrete specs
|
||||||
|
included_concrete_name = "include_concrete"
|
||||||
|
|
||||||
|
|
||||||
def installed_specs():
|
def installed_specs():
|
||||||
@@ -293,6 +295,7 @@ def create(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create a managed environment in Spack and returns it.
|
"""Create a managed environment in Spack and returns it.
|
||||||
|
|
||||||
@@ -309,10 +312,15 @@ def create(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: list of concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||||
return create_in_dir(
|
return create_in_dir(
|
||||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
environment_dir,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -321,6 +329,7 @@ def create_in_dir(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create an environment in the directory passed as input and returns it.
|
"""Create an environment in the directory passed as input and returns it.
|
||||||
|
|
||||||
@@ -334,6 +343,7 @@ def create_in_dir(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
initialize_environment_dir(root, envfile=init_file)
|
initialize_environment_dir(root, envfile=init_file)
|
||||||
|
|
||||||
@@ -346,6 +356,12 @@ def create_in_dir(
|
|||||||
if with_view is not None:
|
if with_view is not None:
|
||||||
manifest.set_default_view(with_view)
|
manifest.set_default_view(with_view)
|
||||||
|
|
||||||
|
if include_concrete is not None:
|
||||||
|
set_included_envs_to_env_paths(include_concrete)
|
||||||
|
validate_included_envs_exists(include_concrete)
|
||||||
|
validate_included_envs_concrete(include_concrete)
|
||||||
|
manifest.set_include_concrete(include_concrete)
|
||||||
|
|
||||||
manifest.flush()
|
manifest.flush()
|
||||||
|
|
||||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||||
@@ -419,6 +435,67 @@ def ensure_env_root_path_exists():
|
|||||||
fs.mkdirp(env_root_path())
|
fs.mkdirp(env_root_path())
|
||||||
|
|
||||||
|
|
||||||
|
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||||
|
"""If the included environment(s) is the environment name
|
||||||
|
it is replaced by the path to the environment
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of env name or path to env"""
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if is_env_dir(env_name):
|
||||||
|
include_concrete[i] = env_name
|
||||||
|
elif exists(env_name):
|
||||||
|
include_concrete[i] = root(env_name)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments exist
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments do not exist
|
||||||
|
"""
|
||||||
|
|
||||||
|
missing_envs = set()
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if not is_env_dir(env_name):
|
||||||
|
missing_envs.add(env_name)
|
||||||
|
|
||||||
|
if missing_envs:
|
||||||
|
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments are concrete
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments are not concrete
|
||||||
|
"""
|
||||||
|
|
||||||
|
non_concrete_envs = set()
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
if not os.path.exists(Environment(env_path).lock_path):
|
||||||
|
non_concrete_envs.add(Environment(env_path).name)
|
||||||
|
|
||||||
|
if non_concrete_envs:
|
||||||
|
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||||
|
", ".join(non_concrete_envs)
|
||||||
|
)
|
||||||
|
for env in non_concrete_envs:
|
||||||
|
msg += f"\n\t`spack -e {env} concretize`"
|
||||||
|
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
def all_environment_names():
|
def all_environment_names():
|
||||||
"""List the names of environments that currently exist."""
|
"""List the names of environments that currently exist."""
|
||||||
# just return empty if the env path does not exist. A read-only
|
# just return empty if the env path does not exist. A read-only
|
||||||
@@ -585,7 +662,7 @@ def _current_root(self):
|
|||||||
if not os.path.islink(self.root):
|
if not os.path.islink(self.root):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
root = os.readlink(self.root)
|
root = readlink(self.root)
|
||||||
if os.path.isabs(root):
|
if os.path.isabs(root):
|
||||||
return root
|
return root
|
||||||
|
|
||||||
@@ -821,6 +898,18 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
|||||||
self.specs_by_hash: Dict[str, Spec] = {}
|
self.specs_by_hash: Dict[str, Spec] = {}
|
||||||
#: Repository for this environment (memoized)
|
#: Repository for this environment (memoized)
|
||||||
self._repo = None
|
self._repo = None
|
||||||
|
|
||||||
|
#: Environment paths for concrete (lockfile) included environments
|
||||||
|
self.included_concrete_envs: List[str] = []
|
||||||
|
#: First-level included concretized spec data from/to the lockfile.
|
||||||
|
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||||
|
#: User specs from included environments from the last concretization
|
||||||
|
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||||
|
#: Roots from included environments with the last concretization, in order
|
||||||
|
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||||
|
#: Concretized specs by hash from the included environments
|
||||||
|
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||||
|
|
||||||
#: Previously active environment
|
#: Previously active environment
|
||||||
self._previous_active = None
|
self._previous_active = None
|
||||||
self._dev_specs = None
|
self._dev_specs = None
|
||||||
@@ -858,7 +947,7 @@ def _read(self):
|
|||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path) as f:
|
||||||
read_lock_version = self._read_lockfile(f)
|
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||||
|
|
||||||
if read_lock_version == 1:
|
if read_lock_version == 1:
|
||||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||||
@@ -926,6 +1015,20 @@ def add_view(name, values):
|
|||||||
if self.views == dict():
|
if self.views == dict():
|
||||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||||
|
|
||||||
|
def _process_concrete_includes(self):
|
||||||
|
"""Extract and load into memory included concrete spec data."""
|
||||||
|
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
if os.path.exists(self.lock_path):
|
||||||
|
with open(self.lock_path) as f:
|
||||||
|
data = self._read_lockfile(f)
|
||||||
|
|
||||||
|
if included_concrete_name in data:
|
||||||
|
self.included_concrete_spec_data = data[included_concrete_name]
|
||||||
|
else:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
def _construct_state_from_manifest(self):
|
def _construct_state_from_manifest(self):
|
||||||
"""Set up user specs and views from the manifest file."""
|
"""Set up user specs and views from the manifest file."""
|
||||||
self.spec_lists = collections.OrderedDict()
|
self.spec_lists = collections.OrderedDict()
|
||||||
@@ -942,6 +1045,31 @@ def _construct_state_from_manifest(self):
|
|||||||
self.spec_lists[user_speclist_name] = user_specs
|
self.spec_lists[user_speclist_name] = user_specs
|
||||||
|
|
||||||
self._process_view(spack.config.get("view", True))
|
self._process_view(spack.config.get("view", True))
|
||||||
|
self._process_concrete_includes()
|
||||||
|
|
||||||
|
def all_concretized_user_specs(self) -> List[Spec]:
|
||||||
|
"""Returns all of the concretized user specs of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_user_specs = self.concretized_user_specs[:]
|
||||||
|
for included_specs in self.included_concretized_user_specs.values():
|
||||||
|
for included in included_specs:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_user_specs:
|
||||||
|
concretized_user_specs.append(included)
|
||||||
|
|
||||||
|
return concretized_user_specs
|
||||||
|
|
||||||
|
def all_concretized_orders(self) -> List[str]:
|
||||||
|
"""Returns all of the concretized order of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_order = self.concretized_order[:]
|
||||||
|
for included_concretized_order in self.included_concretized_order.values():
|
||||||
|
for included in included_concretized_order:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_order:
|
||||||
|
concretized_order.append(included)
|
||||||
|
|
||||||
|
return concretized_order
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_specs(self):
|
def user_specs(self):
|
||||||
@@ -966,6 +1094,26 @@ def _read_dev_specs(self):
|
|||||||
dev_specs[name] = local_entry
|
dev_specs[name] = local_entry
|
||||||
return dev_specs
|
return dev_specs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_user_specs(self) -> SpecList:
|
||||||
|
"""Included concrete user (or root) specs from last concretization."""
|
||||||
|
spec_list = SpecList()
|
||||||
|
|
||||||
|
if not self.included_concrete_envs:
|
||||||
|
return spec_list
|
||||||
|
|
||||||
|
def add_root_specs(included_concrete_specs):
|
||||||
|
# add specs from the include *and* any nested includes it may have
|
||||||
|
for env, info in included_concrete_specs.items():
|
||||||
|
for root_list in info["roots"]:
|
||||||
|
spec_list.add(root_list["spec"])
|
||||||
|
|
||||||
|
if "include_concrete" in info:
|
||||||
|
add_root_specs(info["include_concrete"])
|
||||||
|
|
||||||
|
add_root_specs(self.included_concrete_spec_data)
|
||||||
|
return spec_list
|
||||||
|
|
||||||
def clear(self, re_read=False):
|
def clear(self, re_read=False):
|
||||||
"""Clear the contents of the environment
|
"""Clear the contents of the environment
|
||||||
|
|
||||||
@@ -977,9 +1125,15 @@ def clear(self, re_read=False):
|
|||||||
self.spec_lists[user_speclist_name] = SpecList()
|
self.spec_lists[user_speclist_name] = SpecList()
|
||||||
|
|
||||||
self._dev_specs = {}
|
self._dev_specs = {}
|
||||||
self.concretized_user_specs = [] # user specs from last concretize
|
|
||||||
self.concretized_order = [] # roots of last concretize, in order
|
self.concretized_order = [] # roots of last concretize, in order
|
||||||
|
self.concretized_user_specs = [] # user specs from last concretize
|
||||||
self.specs_by_hash = {} # concretized specs by hash
|
self.specs_by_hash = {} # concretized specs by hash
|
||||||
|
|
||||||
|
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||||
|
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||||
|
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||||
|
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||||
|
|
||||||
self.invalidate_repository_cache()
|
self.invalidate_repository_cache()
|
||||||
self._previous_active = None # previously active environment
|
self._previous_active = None # previously active environment
|
||||||
if not re_read:
|
if not re_read:
|
||||||
@@ -1033,6 +1187,55 @@ def scope_name(self):
|
|||||||
"""Name of the config scope of this environment's manifest file."""
|
"""Name of the config scope of this environment's manifest file."""
|
||||||
return self.manifest.scope_name
|
return self.manifest.scope_name
|
||||||
|
|
||||||
|
def include_concrete_envs(self):
|
||||||
|
"""Copy and save the included envs' specs internally"""
|
||||||
|
|
||||||
|
lockfile_meta = None
|
||||||
|
root_hash_seen = set()
|
||||||
|
concrete_hash_seen = set()
|
||||||
|
self.included_concrete_spec_data = {}
|
||||||
|
|
||||||
|
for env_path in self.included_concrete_envs:
|
||||||
|
# Check that environment exists
|
||||||
|
if not is_env_dir(env_path):
|
||||||
|
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||||
|
|
||||||
|
env = Environment(env_path)
|
||||||
|
|
||||||
|
with open(env.lock_path) as f:
|
||||||
|
lockfile_as_dict = env._read_lockfile(f)
|
||||||
|
|
||||||
|
# Lockfile_meta must match each env and use at least format version 5
|
||||||
|
if lockfile_meta is None:
|
||||||
|
lockfile_meta = lockfile_as_dict["_meta"]
|
||||||
|
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||||
|
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||||
|
elif lockfile_meta["lockfile-version"] < 5:
|
||||||
|
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||||
|
|
||||||
|
# Copy unique root specs from env
|
||||||
|
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||||
|
for root_dict in lockfile_as_dict["roots"]:
|
||||||
|
if root_dict["hash"] not in root_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||||
|
root_hash_seen.add(root_dict["hash"])
|
||||||
|
|
||||||
|
# Copy unique concrete specs from env
|
||||||
|
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||||
|
if concrete_spec not in concrete_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path].update(
|
||||||
|
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||||
|
)
|
||||||
|
concrete_hash_seen.add(concrete_spec)
|
||||||
|
|
||||||
|
if "include_concrete" in lockfile_as_dict.keys():
|
||||||
|
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||||
|
"include_concrete"
|
||||||
|
]
|
||||||
|
|
||||||
|
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||||
|
self.write()
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove this environment from Spack entirely."""
|
"""Remove this environment from Spack entirely."""
|
||||||
shutil.rmtree(self.path)
|
shutil.rmtree(self.path)
|
||||||
@@ -1232,6 +1435,10 @@ def concretize(self, force=False, tests=False):
|
|||||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||||
self.deconcretize(spec, concrete=False)
|
self.deconcretize(spec, concrete=False)
|
||||||
|
|
||||||
|
# If a combined env, check updated spec is in the linked envs
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
# Pick the right concretization strategy
|
# Pick the right concretization strategy
|
||||||
if self.unify == "when_possible":
|
if self.unify == "when_possible":
|
||||||
return self._concretize_together_where_possible(tests=tests)
|
return self._concretize_together_where_possible(tests=tests)
|
||||||
@@ -1415,7 +1622,7 @@ def _concretize_separately(self, tests=False):
|
|||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_core_dependencies()
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -1704,8 +1911,14 @@ def _partition_roots_by_install_status(self):
|
|||||||
of per spec."""
|
of per spec."""
|
||||||
installed, uninstalled = [], []
|
installed, uninstalled = [], []
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
for concretized_hash in self.concretized_order:
|
for concretized_hash in self.all_concretized_orders():
|
||||||
spec = self.specs_by_hash[concretized_hash]
|
if concretized_hash in self.specs_by_hash:
|
||||||
|
spec = self.specs_by_hash[concretized_hash]
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||||
|
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||||
|
break
|
||||||
if not spec.installed or (
|
if not spec.installed or (
|
||||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||||
):
|
):
|
||||||
@@ -1735,13 +1948,19 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
|||||||
specs = specs if specs is not None else roots
|
specs = specs if specs is not None else roots
|
||||||
|
|
||||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||||
install_args["overwrite"] = (
|
overwrite: Set[str] = set()
|
||||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
overwrite.update(install_args.get("overwrite", []), self._dev_specs_that_need_overwrite())
|
||||||
|
install_args["overwrite"] = overwrite
|
||||||
|
|
||||||
|
explicit: Set[str] = set()
|
||||||
|
explicit.update(
|
||||||
|
install_args.get("explicit", []),
|
||||||
|
(s.dag_hash() for s in specs),
|
||||||
|
(s.dag_hash() for s in roots),
|
||||||
)
|
)
|
||||||
|
install_args["explicit"] = explicit
|
||||||
|
|
||||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||||
|
|
||||||
PackageInstaller(installs).install()
|
|
||||||
|
|
||||||
def all_specs_generator(self) -> Iterable[Spec]:
|
def all_specs_generator(self) -> Iterable[Spec]:
|
||||||
"""Returns a generator for all concrete specs"""
|
"""Returns a generator for all concrete specs"""
|
||||||
@@ -1785,8 +2004,14 @@ def added_specs(self):
|
|||||||
|
|
||||||
def concretized_specs(self):
|
def concretized_specs(self):
|
||||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||||
yield (s, self.specs_by_hash[h])
|
if h in self.specs_by_hash:
|
||||||
|
yield (s, self.specs_by_hash[h])
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if h in self.included_specs_by_hash[env_path]:
|
||||||
|
yield (s, self.included_specs_by_hash[env_path][h])
|
||||||
|
break
|
||||||
|
|
||||||
def concrete_roots(self):
|
def concrete_roots(self):
|
||||||
"""Same as concretized_specs, except it returns the list of concrete
|
"""Same as concretized_specs, except it returns the list of concrete
|
||||||
@@ -1915,8 +2140,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
|||||||
If these specs appear under different user_specs, only one copy
|
If these specs appear under different user_specs, only one copy
|
||||||
is added to the list returned.
|
is added to the list returned.
|
||||||
"""
|
"""
|
||||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||||
|
|
||||||
if recurse_dependencies:
|
if recurse_dependencies:
|
||||||
specs.extend(
|
specs.extend(
|
||||||
traverse.traverse_nodes(
|
traverse.traverse_nodes(
|
||||||
@@ -1961,31 +2185,76 @@ def _to_lockfile_dict(self):
|
|||||||
"concrete_specs": concrete_specs,
|
"concrete_specs": concrete_specs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
data[included_concrete_name] = self.included_concrete_spec_data
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _read_lockfile(self, file_or_json):
|
def _read_lockfile(self, file_or_json):
|
||||||
"""Read a lockfile from a file or from a raw string."""
|
"""Read a lockfile from a file or from a raw string."""
|
||||||
lockfile_dict = sjson.load(file_or_json)
|
lockfile_dict = sjson.load(file_or_json)
|
||||||
self._read_lockfile_dict(lockfile_dict)
|
self._read_lockfile_dict(lockfile_dict)
|
||||||
return lockfile_dict["_meta"]["lockfile-version"]
|
return lockfile_dict
|
||||||
|
|
||||||
|
def set_included_concretized_user_specs(
|
||||||
|
self,
|
||||||
|
env_name: str,
|
||||||
|
env_info: Dict[str, Dict[str, Any]],
|
||||||
|
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Sets all of the concretized user specs from included environments
|
||||||
|
to include those from nested included environments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_name: the name (technically the path) of the included environment
|
||||||
|
env_info: included concrete environment data
|
||||||
|
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||||
|
|
||||||
|
Returns: updated specs_by_hash
|
||||||
|
"""
|
||||||
|
self.included_concretized_order[env_name] = []
|
||||||
|
self.included_concretized_user_specs[env_name] = []
|
||||||
|
|
||||||
|
def add_specs(name, info, specs_by_hash):
|
||||||
|
# Add specs from the environment as well as any of its nested
|
||||||
|
# environments.
|
||||||
|
for root_info in info["roots"]:
|
||||||
|
self.included_concretized_order[name].append(root_info["hash"])
|
||||||
|
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||||
|
if "concrete_specs" in info:
|
||||||
|
specs_by_hash.update(info["concrete_specs"])
|
||||||
|
|
||||||
|
if included_concrete_name in info:
|
||||||
|
for included_name, included_info in info[included_concrete_name].items():
|
||||||
|
if included_name not in self.included_concretized_order:
|
||||||
|
self.included_concretized_order[included_name] = []
|
||||||
|
self.included_concretized_user_specs[included_name] = []
|
||||||
|
add_specs(included_name, included_info, specs_by_hash)
|
||||||
|
|
||||||
|
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||||
|
return included_json_specs_by_hash
|
||||||
|
|
||||||
def _read_lockfile_dict(self, d):
|
def _read_lockfile_dict(self, d):
|
||||||
"""Read a lockfile dictionary into this environment."""
|
"""Read a lockfile dictionary into this environment."""
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
self.included_specs_by_hash = {}
|
||||||
|
self.included_concretized_user_specs = {}
|
||||||
|
self.included_concretized_order = {}
|
||||||
|
|
||||||
roots = d["roots"]
|
roots = d["roots"]
|
||||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||||
self.concretized_order = [r["hash"] for r in roots]
|
self.concretized_order = [r["hash"] for r in roots]
|
||||||
json_specs_by_hash = d["concrete_specs"]
|
json_specs_by_hash = d["concrete_specs"]
|
||||||
|
included_json_specs_by_hash = {}
|
||||||
|
|
||||||
# Track specs by their lockfile key. Currently spack uses the finest
|
if included_concrete_name in d:
|
||||||
# grained hash as the lockfile key, while older formats used the build
|
for env_name, env_info in d[included_concrete_name].items():
|
||||||
# hash or a previous incarnation of the DAG hash (one that did not
|
included_json_specs_by_hash.update(
|
||||||
# include build deps or package hash).
|
self.set_included_concretized_user_specs(
|
||||||
specs_by_hash = {}
|
env_name, env_info, included_json_specs_by_hash
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
|
||||||
first_seen = {}
|
|
||||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||||
try:
|
try:
|
||||||
reader = READER_CLS[current_lockfile_format]
|
reader = READER_CLS[current_lockfile_format]
|
||||||
@@ -1998,6 +2267,39 @@ def _read_lockfile_dict(self, d):
|
|||||||
msg += " You need to use a newer Spack version."
|
msg += " You need to use a newer Spack version."
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
first_seen, self.concretized_order = self.filter_specs(
|
||||||
|
reader, json_specs_by_hash, self.concretized_order
|
||||||
|
)
|
||||||
|
|
||||||
|
for spec_dag_hash in self.concretized_order:
|
||||||
|
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||||
|
|
||||||
|
if any(self.included_concretized_order.values()):
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
|
for env_name, concretized_order in self.included_concretized_order.items():
|
||||||
|
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||||
|
reader, included_json_specs_by_hash, concretized_order
|
||||||
|
)
|
||||||
|
first_seen.update(filtered_spec)
|
||||||
|
|
||||||
|
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||||
|
self.included_specs_by_hash[env_path] = {}
|
||||||
|
for spec_dag_hash in spec_hashes:
|
||||||
|
self.included_specs_by_hash[env_path].update(
|
||||||
|
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||||
|
# Track specs by their lockfile key. Currently spack uses the finest
|
||||||
|
# grained hash as the lockfile key, while older formats used the build
|
||||||
|
# hash or a previous incarnation of the DAG hash (one that did not
|
||||||
|
# include build deps or package hash).
|
||||||
|
specs_by_hash = {}
|
||||||
|
|
||||||
|
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
# First pass: Put each spec in the map ignoring dependencies
|
# First pass: Put each spec in the map ignoring dependencies
|
||||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||||
spec = reader.from_node_dict(node_dict)
|
spec = reader.from_node_dict(node_dict)
|
||||||
@@ -2020,7 +2322,8 @@ def _read_lockfile_dict(self, d):
|
|||||||
# keep. This is only required as long as we support older lockfile
|
# keep. This is only required as long as we support older lockfile
|
||||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||||
# one-to-many.
|
# one-to-many.
|
||||||
for lockfile_key in self.concretized_order:
|
|
||||||
|
for lockfile_key in order_concretized:
|
||||||
for s in specs_by_hash[lockfile_key].traverse():
|
for s in specs_by_hash[lockfile_key].traverse():
|
||||||
if s.dag_hash() not in first_seen:
|
if s.dag_hash() not in first_seen:
|
||||||
first_seen[s.dag_hash()] = s
|
first_seen[s.dag_hash()] = s
|
||||||
@@ -2028,12 +2331,10 @@ def _read_lockfile_dict(self, d):
|
|||||||
# Now make sure concretized_order and our internal specs dict
|
# Now make sure concretized_order and our internal specs dict
|
||||||
# contains the keys used by modern spack (i.e. the dag_hash
|
# contains the keys used by modern spack (i.e. the dag_hash
|
||||||
# that includes build deps and package hash).
|
# that includes build deps and package hash).
|
||||||
self.concretized_order = [
|
|
||||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
|
||||||
]
|
|
||||||
|
|
||||||
for spec_dag_hash in self.concretized_order:
|
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
|
||||||
|
return first_seen, order_concretized
|
||||||
|
|
||||||
def write(self, regenerate: bool = True) -> None:
|
def write(self, regenerate: bool = True) -> None:
|
||||||
"""Writes an in-memory environment to its location on disk.
|
"""Writes an in-memory environment to its location on disk.
|
||||||
@@ -2046,7 +2347,7 @@ def write(self, regenerate: bool = True) -> None:
|
|||||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||||
"""
|
"""
|
||||||
self.manifest_uptodate_or_warn()
|
self.manifest_uptodate_or_warn()
|
||||||
if self.specs_by_hash:
|
if self.specs_by_hash or self.included_concrete_envs:
|
||||||
self.ensure_env_directory_exists(dot_env=True)
|
self.ensure_env_directory_exists(dot_env=True)
|
||||||
self.update_environment_repository()
|
self.update_environment_repository()
|
||||||
self.manifest.flush()
|
self.manifest.flush()
|
||||||
@@ -2545,6 +2846,19 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
|||||||
raise SpackEnvironmentError(msg) from e
|
raise SpackEnvironmentError(msg) from e
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
|
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||||
|
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
"""
|
||||||
|
self.pristine_configuration[included_concrete_name] = []
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||||
|
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||||
|
|
||||||
@@ -2728,54 +3042,56 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
for i, config_path in enumerate(reversed(includes)):
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
config_path = substitute_path_variables(config_path)
|
config_path = substitute_path_variables(config_path)
|
||||||
|
|
||||||
include_url = urllib.parse.urlparse(config_path)
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
# Transform file:// URLs to direct includes.
|
# If scheme is not valid, config_path is not a url
|
||||||
if include_url.scheme == "file":
|
# of a type Spack is generally aware
|
||||||
config_path = urllib.request.url2pathname(include_url.path)
|
if spack.util.url.validate_scheme(include_url.scheme):
|
||||||
|
# Transform file:// URLs to direct includes.
|
||||||
|
if include_url.scheme == "file":
|
||||||
|
config_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
|
||||||
# Any other URL should be fetched.
|
# Any other URL should be fetched.
|
||||||
elif include_url.scheme in ("http", "https", "ftp"):
|
elif include_url.scheme in ("http", "https", "ftp"):
|
||||||
# Stage any remote configuration file(s)
|
# Stage any remote configuration file(s)
|
||||||
staged_configs = (
|
staged_configs = (
|
||||||
os.listdir(self.config_stage_dir)
|
os.listdir(self.config_stage_dir)
|
||||||
if os.path.exists(self.config_stage_dir)
|
if os.path.exists(self.config_stage_dir)
|
||||||
else []
|
else []
|
||||||
)
|
|
||||||
remote_path = urllib.request.url2pathname(include_url.path)
|
|
||||||
basename = os.path.basename(remote_path)
|
|
||||||
if basename in staged_configs:
|
|
||||||
# Do NOT re-stage configuration files over existing
|
|
||||||
# ones with the same name since there is a risk of
|
|
||||||
# losing changes (e.g., from 'spack config update').
|
|
||||||
tty.warn(
|
|
||||||
"Will not re-stage configuration from {0} to avoid "
|
|
||||||
"losing changes to the already staged file of the "
|
|
||||||
"same name.".format(remote_path)
|
|
||||||
)
|
)
|
||||||
|
remote_path = urllib.request.url2pathname(include_url.path)
|
||||||
# Recognize the configuration stage directory
|
basename = os.path.basename(remote_path)
|
||||||
# is flattened to ensure a single copy of each
|
if basename in staged_configs:
|
||||||
# configuration file.
|
# Do NOT re-stage configuration files over existing
|
||||||
config_path = self.config_stage_dir
|
# ones with the same name since there is a risk of
|
||||||
if basename.endswith(".yaml"):
|
# losing changes (e.g., from 'spack config update').
|
||||||
config_path = os.path.join(config_path, basename)
|
tty.warn(
|
||||||
else:
|
"Will not re-stage configuration from {0} to avoid "
|
||||||
staged_path = spack.config.fetch_remote_configs(
|
"losing changes to the already staged file of the "
|
||||||
config_path, str(self.config_stage_dir), skip_existing=True
|
"same name.".format(remote_path)
|
||||||
)
|
|
||||||
if not staged_path:
|
|
||||||
raise SpackEnvironmentError(
|
|
||||||
"Unable to fetch remote configuration {0}".format(config_path)
|
|
||||||
)
|
)
|
||||||
config_path = staged_path
|
|
||||||
|
|
||||||
elif include_url.scheme:
|
# Recognize the configuration stage directory
|
||||||
raise ValueError(
|
# is flattened to ensure a single copy of each
|
||||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
# configuration file.
|
||||||
f"environment include: {config_path}"
|
config_path = self.config_stage_dir
|
||||||
)
|
if basename.endswith(".yaml"):
|
||||||
|
config_path = os.path.join(config_path, basename)
|
||||||
|
else:
|
||||||
|
staged_path = spack.config.fetch_remote_configs(
|
||||||
|
config_path, str(self.config_stage_dir), skip_existing=True
|
||||||
|
)
|
||||||
|
if not staged_path:
|
||||||
|
raise SpackEnvironmentError(
|
||||||
|
"Unable to fetch remote configuration {0}".format(config_path)
|
||||||
|
)
|
||||||
|
config_path = staged_path
|
||||||
|
|
||||||
|
elif include_url.scheme:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||||
|
f"environment include: {config_path}"
|
||||||
|
)
|
||||||
|
|
||||||
# treat relative paths as relative to the environment
|
# treat relative paths as relative to the environment
|
||||||
if not os.path.isabs(config_path):
|
if not os.path.isabs(config_path):
|
||||||
|
|||||||
@@ -13,7 +13,6 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
from spack.util.elf import ElfParsingError, parse_elf
|
from spack.util.elf import ElfParsingError, parse_elf
|
||||||
from spack.util.executable import Executable
|
|
||||||
|
|
||||||
|
|
||||||
def is_shared_library_elf(filepath):
|
def is_shared_library_elf(filepath):
|
||||||
@@ -141,7 +140,7 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Only enable on platforms using ELF.
|
# Only enable on platforms using ELF.
|
||||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
if not spec.satisfies("platform=linux"):
|
||||||
return
|
return
|
||||||
|
|
||||||
# Disable this hook when bootstrapping, to avoid recursion.
|
# Disable this hook when bootstrapping, to avoid recursion.
|
||||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Should failing to locate patchelf be a hard error?
|
# Should failing to locate patchelf be a hard error?
|
||||||
patchelf_path = spack.relocate._patchelf()
|
patchelf = spack.relocate._patchelf()
|
||||||
if not patchelf_path:
|
if not patchelf:
|
||||||
return
|
return
|
||||||
patchelf = Executable(patchelf_path)
|
|
||||||
|
|
||||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,10 @@
|
|||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
# Push package to all buildcaches with autopush==True
|
# Push package to all buildcaches with autopush==True
|
||||||
|
|
||||||
|
# Do nothing if spec is an external package
|
||||||
|
if spec.external:
|
||||||
|
return
|
||||||
|
|
||||||
# Do nothing if package was not installed from source
|
# Do nothing if package was not installed from source
|
||||||
pkg = spec.package
|
pkg = spec.package
|
||||||
if pkg.installed_from_binary_cache:
|
if pkg.installed_from_binary_cache:
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def post_install(spec, explicit=None):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Only enable on platforms using ELF.
|
# Only enable on platforms using ELF.
|
||||||
if not spec.satisfies("platform=linux") and not spec.satisfies("platform=cray"):
|
if not spec.satisfies("platform=linux"):
|
||||||
return
|
return
|
||||||
|
|
||||||
visit_directory_tree(spec.prefix, ElfFilesWithRPathVisitor())
|
visit_directory_tree(spec.prefix, ElfFilesWithRPathVisitor())
|
||||||
|
|||||||
8
lib/spack/spack/hooks/windows_runtime_linkage.py
Normal file
8
lib/spack/spack/hooks/windows_runtime_linkage.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
|
def post_install(spec, explicit=None):
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
@@ -488,6 +488,7 @@ def _process_binary_cache_tarball(
|
|||||||
|
|
||||||
with timer.measure("install"), spack.util.path.filter_padding():
|
with timer.measure("install"), spack.util.path.filter_padding():
|
||||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||||
|
pkg.windows_establish_runtime_linkage()
|
||||||
|
|
||||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||||
pkg._post_buildcache_install_hook()
|
pkg._post_buildcache_install_hook()
|
||||||
@@ -599,9 +600,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
if node is spec:
|
if node is spec:
|
||||||
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
spack.repo.PATH.dump_provenance(node, dest_pkg_dir)
|
||||||
elif source_pkg_dir:
|
elif source_pkg_dir:
|
||||||
fs.install_tree(
|
fs.install_tree(source_pkg_dir, dest_pkg_dir)
|
||||||
source_pkg_dir, dest_pkg_dir, allow_broken_symlinks=(sys.platform != "win32")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||||
@@ -760,12 +759,8 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
|||||||
if not self.pkg.spec.concrete:
|
if not self.pkg.spec.concrete:
|
||||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||||
|
|
||||||
# Cache the package phase options with the explicit package,
|
self.pkg.stop_before_phase = install_args.get("stop_before") # type: ignore[attr-defined] # noqa: E501
|
||||||
# popping the options to ensure installation of associated
|
self.pkg.last_phase = install_args.get("stop_at") # type: ignore[attr-defined]
|
||||||
# dependencies is NOT affected by these options.
|
|
||||||
|
|
||||||
self.pkg.stop_before_phase = install_args.pop("stop_before", None) # type: ignore[attr-defined] # noqa: E501
|
|
||||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
# Cache the package id for convenience
|
# Cache the package id for convenience
|
||||||
self.pkg_id = package_id(pkg.spec)
|
self.pkg_id = package_id(pkg.spec)
|
||||||
@@ -1075,19 +1070,17 @@ def flag_installed(self, installed: List[str]) -> None:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def explicit(self) -> bool:
|
def explicit(self) -> bool:
|
||||||
"""The package was explicitly requested by the user."""
|
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||||
return self.is_root and self.request.install_args.get("explicit", True)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_root(self) -> bool:
|
def is_build_request(self) -> bool:
|
||||||
"""The package was requested directly, but may or may not be explicit
|
"""The package was requested directly"""
|
||||||
in an environment."""
|
|
||||||
return self.pkg == self.request.pkg
|
return self.pkg == self.request.pkg
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def use_cache(self) -> bool:
|
def use_cache(self) -> bool:
|
||||||
_use_cache = True
|
_use_cache = True
|
||||||
if self.is_root:
|
if self.is_build_request:
|
||||||
return self.request.install_args.get("package_use_cache", _use_cache)
|
return self.request.install_args.get("package_use_cache", _use_cache)
|
||||||
else:
|
else:
|
||||||
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
||||||
@@ -1095,7 +1088,7 @@ def use_cache(self) -> bool:
|
|||||||
@property
|
@property
|
||||||
def cache_only(self) -> bool:
|
def cache_only(self) -> bool:
|
||||||
_cache_only = False
|
_cache_only = False
|
||||||
if self.is_root:
|
if self.is_build_request:
|
||||||
return self.request.install_args.get("package_cache_only", _cache_only)
|
return self.request.install_args.get("package_cache_only", _cache_only)
|
||||||
else:
|
else:
|
||||||
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
||||||
@@ -1121,24 +1114,17 @@ def priority(self):
|
|||||||
|
|
||||||
class PackageInstaller:
|
class PackageInstaller:
|
||||||
"""
|
"""
|
||||||
Class for managing the install process for a Spack instance based on a
|
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||||
bottom-up DAG approach.
|
|
||||||
|
|
||||||
This installer can coordinate concurrent batch and interactive, local
|
This installer can coordinate concurrent batch and interactive, local and distributed (on a
|
||||||
and distributed (on a shared file system) builds for the same Spack
|
shared file system) builds for the same Spack instance.
|
||||||
instance.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
def __init__(
|
||||||
"""Initialize the installer.
|
self, packages: List["spack.package_base.PackageBase"], install_args: dict
|
||||||
|
) -> None:
|
||||||
Args:
|
|
||||||
installs (list): list of tuples, where each
|
|
||||||
tuple consists of a package (PackageBase) and its associated
|
|
||||||
install arguments (dict)
|
|
||||||
"""
|
|
||||||
# List of build requests
|
# List of build requests
|
||||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||||
|
|
||||||
# Priority queue of build tasks
|
# Priority queue of build tasks
|
||||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||||
@@ -1561,7 +1547,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
|||||||
#
|
#
|
||||||
# External and upstream packages need to get flagged as installed to
|
# External and upstream packages need to get flagged as installed to
|
||||||
# ensure proper status tracking for environment build.
|
# ensure proper status tracking for environment build.
|
||||||
explicit = request.install_args.get("explicit", True)
|
explicit = request.pkg.spec.dag_hash() in request.install_args.get("explicit", [])
|
||||||
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
||||||
if not_local:
|
if not_local:
|
||||||
self._flag_installed(request.pkg)
|
self._flag_installed(request.pkg)
|
||||||
@@ -1682,10 +1668,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
|||||||
if not pkg.unit_test_check():
|
if not pkg.unit_test_check():
|
||||||
return
|
return
|
||||||
|
|
||||||
# Injecting information to know if this installation request is the root one
|
|
||||||
# to determine in BuildProcessInstaller whether installation is explicit or not
|
|
||||||
install_args["is_root"] = task.is_root
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._setup_install_dir(pkg)
|
self._setup_install_dir(pkg)
|
||||||
|
|
||||||
@@ -1698,10 +1680,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
|||||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||||
pkg, build_process, install_args
|
pkg, build_process, install_args
|
||||||
)
|
)
|
||||||
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
|
||||||
# establish runtime linkage via Windows Runtime link object
|
|
||||||
# Note: this is a no-op on non Windows platforms
|
|
||||||
pkg.windows_establish_runtime_linkage()
|
|
||||||
# Note: PARENT of the build process adds the new package to
|
# Note: PARENT of the build process adds the new package to
|
||||||
# the database, so that we don't need to re-read from file.
|
# the database, so that we don't need to re-read from file.
|
||||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||||
@@ -2001,8 +1979,8 @@ def install(self) -> None:
|
|||||||
|
|
||||||
self._init_queue()
|
self._init_queue()
|
||||||
fail_fast_err = "Terminating after first install failure"
|
fail_fast_err = "Terminating after first install failure"
|
||||||
single_explicit_spec = len(self.build_requests) == 1
|
single_requested_spec = len(self.build_requests) == 1
|
||||||
failed_explicits = []
|
failed_build_requests = []
|
||||||
|
|
||||||
install_status = InstallStatus(len(self.build_pq))
|
install_status = InstallStatus(len(self.build_pq))
|
||||||
|
|
||||||
@@ -2200,14 +2178,11 @@ def install(self) -> None:
|
|||||||
if self.fail_fast:
|
if self.fail_fast:
|
||||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||||
|
|
||||||
# Terminate at this point if the single explicit spec has
|
# Terminate when a single build request has failed, or summarize errors later.
|
||||||
# failed to install.
|
if task.is_build_request:
|
||||||
if single_explicit_spec and task.explicit:
|
if single_requested_spec:
|
||||||
raise
|
raise
|
||||||
|
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||||
# Track explicit spec id and error to summarize when done
|
|
||||||
if task.explicit:
|
|
||||||
failed_explicits.append((pkg, pkg_id, str(exc)))
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Remove the install prefix if anything went wrong during
|
# Remove the install prefix if anything went wrong during
|
||||||
@@ -2230,16 +2205,16 @@ def install(self) -> None:
|
|||||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||||
]
|
]
|
||||||
|
|
||||||
if failed_explicits or missing:
|
if failed_build_requests or missing:
|
||||||
for _, pkg_id, err in failed_explicits:
|
for _, pkg_id, err in failed_build_requests:
|
||||||
tty.error(f"{pkg_id}: {err}")
|
tty.error(f"{pkg_id}: {err}")
|
||||||
|
|
||||||
for _, pkg_id in missing:
|
for _, pkg_id in missing:
|
||||||
tty.error(f"{pkg_id}: Package was not installed")
|
tty.error(f"{pkg_id}: Package was not installed")
|
||||||
|
|
||||||
if len(failed_explicits) > 0:
|
if len(failed_build_requests) > 0:
|
||||||
pkg = failed_explicits[0][0]
|
pkg = failed_build_requests[0][0]
|
||||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
ids = [pkg_id for _, pkg_id, _ in failed_build_requests]
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Associating installation failure with first failed "
|
"Associating installation failure with first failed "
|
||||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||||
@@ -2298,7 +2273,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
|||||||
self.verbose = bool(install_args.get("verbose", False))
|
self.verbose = bool(install_args.get("verbose", False))
|
||||||
|
|
||||||
# whether installation was explicitly requested by the user
|
# whether installation was explicitly requested by the user
|
||||||
self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
|
self.explicit = pkg.spec.dag_hash() in install_args.get("explicit", [])
|
||||||
|
|
||||||
# env before starting installation
|
# env before starting installation
|
||||||
self.unmodified_env = install_args.get("unmodified_env", {})
|
self.unmodified_env = install_args.get("unmodified_env", {})
|
||||||
@@ -2383,9 +2358,7 @@ def _install_source(self) -> None:
|
|||||||
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
|
||||||
tty.debug(f"{self.pre} Copying source to {src_target}")
|
tty.debug(f"{self.pre} Copying source to {src_target}")
|
||||||
|
|
||||||
fs.install_tree(
|
fs.install_tree(pkg.stage.source_path, src_target)
|
||||||
pkg.stage.source_path, src_target, allow_broken_symlinks=(sys.platform != "win32")
|
|
||||||
)
|
|
||||||
|
|
||||||
def _real_install(self) -> None:
|
def _real_install(self) -> None:
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
|||||||
@@ -427,7 +427,7 @@ def make_argument_parser(**kwargs):
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--color",
|
"--color",
|
||||||
action="store",
|
action="store",
|
||||||
default=os.environ.get("SPACK_COLOR", "auto"),
|
default=None,
|
||||||
choices=("always", "never", "auto"),
|
choices=("always", "never", "auto"),
|
||||||
help="when to colorize output (default: auto)",
|
help="when to colorize output (default: auto)",
|
||||||
)
|
)
|
||||||
@@ -622,7 +622,8 @@ def setup_main_options(args):
|
|||||||
# with color
|
# with color
|
||||||
color.try_enable_terminal_color_on_windows()
|
color.try_enable_terminal_color_on_windows()
|
||||||
# when to use color (takes always, auto, or never)
|
# when to use color (takes always, auto, or never)
|
||||||
color.set_color_when(args.color)
|
if args.color is not None:
|
||||||
|
color.set_color_when(args.color)
|
||||||
|
|
||||||
|
|
||||||
def allows_unknown_args(command):
|
def allows_unknown_args(command):
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from http.client import HTTPResponse
|
from http.client import HTTPResponse
|
||||||
from typing import NamedTuple, Tuple
|
from typing import List, NamedTuple, Tuple
|
||||||
from urllib.request import Request
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -27,6 +27,7 @@
|
|||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.traverse
|
import spack.traverse
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
|
import spack.util.url
|
||||||
|
|
||||||
from .image import Digest, ImageReference
|
from .image import Digest, ImageReference
|
||||||
|
|
||||||
@@ -69,6 +70,42 @@ def with_query_param(url: str, param: str, value: str) -> str:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
|
||||||
|
"""Retrieves the list of tags associated with an image, handling pagination."""
|
||||||
|
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||||
|
tags = set()
|
||||||
|
fetch_url = ref.tags_url()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
# Fetch tags
|
||||||
|
request = Request(url=fetch_url)
|
||||||
|
response = _urlopen(request)
|
||||||
|
spack.oci.opener.ensure_status(request, response, 200)
|
||||||
|
tags.update(json.load(response)["tags"])
|
||||||
|
|
||||||
|
# Check for pagination
|
||||||
|
link_header = response.headers["Link"]
|
||||||
|
|
||||||
|
if link_header is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
tty.debug(f"OCI tag pagination: {link_header}")
|
||||||
|
|
||||||
|
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
|
||||||
|
|
||||||
|
if rel_next_value is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
rel_next = urllib.parse.urlparse(rel_next_value)
|
||||||
|
|
||||||
|
if rel_next.scheme not in ("https", ""):
|
||||||
|
break
|
||||||
|
|
||||||
|
fetch_url = ref.endpoint(rel_next_value)
|
||||||
|
|
||||||
|
return sorted(tags)
|
||||||
|
|
||||||
|
|
||||||
def upload_blob(
|
def upload_blob(
|
||||||
ref: ImageReference,
|
ref: ImageReference,
|
||||||
file: str,
|
file: str,
|
||||||
|
|||||||
@@ -398,7 +398,7 @@ def create_opener():
|
|||||||
opener = urllib.request.OpenerDirector()
|
opener = urllib.request.OpenerDirector()
|
||||||
for handler in [
|
for handler in [
|
||||||
urllib.request.UnknownHandler(),
|
urllib.request.UnknownHandler(),
|
||||||
urllib.request.HTTPSHandler(),
|
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||||
urllib.request.HTTPRedirectHandler(),
|
urllib.request.HTTPRedirectHandler(),
|
||||||
urllib.request.HTTPErrorProcessor(),
|
urllib.request.HTTPErrorProcessor(),
|
||||||
@@ -418,18 +418,27 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def default_retry(f, retries: int = 3, sleep=None):
|
def default_retry(f, retries: int = 5, sleep=None):
|
||||||
sleep = sleep or time.sleep
|
sleep = sleep or time.sleep
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
for i in range(retries):
|
for i in range(retries):
|
||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except urllib.error.HTTPError as e:
|
except (urllib.error.URLError, TimeoutError) as e:
|
||||||
# Retry on internal server errors, and rate limit errors
|
# Retry on internal server errors, and rate limit errors
|
||||||
# Potentially this could take into account the Retry-After header
|
# Potentially this could take into account the Retry-After header
|
||||||
# if registries support it
|
# if registries support it
|
||||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
if i + 1 != retries and (
|
||||||
|
(
|
||||||
|
isinstance(e, urllib.error.HTTPError)
|
||||||
|
and (500 <= e.code < 600 or e.code == 429)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||||
|
)
|
||||||
|
or isinstance(e, TimeoutError)
|
||||||
|
):
|
||||||
# Exponential backoff
|
# Exponential backoff
|
||||||
sleep(2**i)
|
sleep(2**i)
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -3,22 +3,12 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from ._operating_system import OperatingSystem
|
from ._operating_system import OperatingSystem
|
||||||
from .cray_backend import CrayBackend
|
|
||||||
from .cray_frontend import CrayFrontend
|
|
||||||
from .freebsd import FreeBSDOs
|
from .freebsd import FreeBSDOs
|
||||||
from .linux_distro import LinuxDistro
|
from .linux_distro import LinuxDistro
|
||||||
from .mac_os import MacOs
|
from .mac_os import MacOs
|
||||||
from .windows_os import WindowsOs
|
from .windows_os import WindowsOs
|
||||||
|
|
||||||
__all__ = [
|
__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "WindowsOs", "FreeBSDOs"]
|
||||||
"OperatingSystem",
|
|
||||||
"LinuxDistro",
|
|
||||||
"MacOs",
|
|
||||||
"CrayFrontend",
|
|
||||||
"CrayBackend",
|
|
||||||
"WindowsOs",
|
|
||||||
"FreeBSDOs",
|
|
||||||
]
|
|
||||||
|
|
||||||
#: List of all the Operating Systems known to Spack
|
#: List of all the Operating Systems known to Spack
|
||||||
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs, FreeBSDOs]
|
operating_systems = [LinuxDistro, MacOs, WindowsOs, FreeBSDOs]
|
||||||
|
|||||||
@@ -1,172 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.error
|
|
||||||
import spack.version
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from .linux_distro import LinuxDistro
|
|
||||||
|
|
||||||
#: Possible locations of the Cray CLE release file,
|
|
||||||
#: which we look at to get the CNL OS version.
|
|
||||||
_cle_release_file = "/etc/opt/cray/release/cle-release"
|
|
||||||
_clerelease_file = "/etc/opt/cray/release/clerelease"
|
|
||||||
|
|
||||||
|
|
||||||
def read_cle_release_file():
|
|
||||||
"""Read the CLE release file and return a dict with its attributes.
|
|
||||||
|
|
||||||
This file is present on newer versions of Cray.
|
|
||||||
|
|
||||||
The release file looks something like this::
|
|
||||||
|
|
||||||
RELEASE=6.0.UP07
|
|
||||||
BUILD=6.0.7424
|
|
||||||
...
|
|
||||||
|
|
||||||
The dictionary we produce looks like this::
|
|
||||||
|
|
||||||
{
|
|
||||||
"RELEASE": "6.0.UP07",
|
|
||||||
"BUILD": "6.0.7424",
|
|
||||||
...
|
|
||||||
}
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: dictionary of release attributes
|
|
||||||
"""
|
|
||||||
with open(_cle_release_file) as release_file:
|
|
||||||
result = {}
|
|
||||||
for line in release_file:
|
|
||||||
# use partition instead of split() to ensure we only split on
|
|
||||||
# the first '=' in the line.
|
|
||||||
key, _, value = line.partition("=")
|
|
||||||
result[key] = value.strip()
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def read_clerelease_file():
|
|
||||||
"""Read the CLE release file and return the Cray OS version.
|
|
||||||
|
|
||||||
This file is present on older versions of Cray.
|
|
||||||
|
|
||||||
The release file looks something like this::
|
|
||||||
|
|
||||||
5.2.UP04
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: the Cray OS version
|
|
||||||
"""
|
|
||||||
with open(_clerelease_file) as release_file:
|
|
||||||
for line in release_file:
|
|
||||||
return line.strip()
|
|
||||||
|
|
||||||
|
|
||||||
class CrayBackend(LinuxDistro):
|
|
||||||
"""Compute Node Linux (CNL) is the operating system used for the Cray XC
|
|
||||||
series super computers. It is a very stripped down version of GNU/Linux.
|
|
||||||
Any compilers found through this operating system will be used with
|
|
||||||
modules. If updated, user must make sure that version and name are
|
|
||||||
updated to indicate that OS has been upgraded (or downgraded)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
name = "cnl"
|
|
||||||
version = self._detect_crayos_version()
|
|
||||||
if version:
|
|
||||||
# If we found a CrayOS version, we do not want the information
|
|
||||||
# from LinuxDistro. In order to skip the logic from
|
|
||||||
# distro.linux_distribution, while still calling __init__
|
|
||||||
# methods further up the MRO, we skip LinuxDistro in the MRO and
|
|
||||||
# call the OperatingSystem superclass __init__ method
|
|
||||||
super(LinuxDistro, self).__init__(name, version)
|
|
||||||
else:
|
|
||||||
super().__init__()
|
|
||||||
self.modulecmd = module
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return self.name + str(self.version)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _detect_crayos_version(cls):
|
|
||||||
if os.path.isfile(_cle_release_file):
|
|
||||||
release_attrs = read_cle_release_file()
|
|
||||||
if "RELEASE" not in release_attrs:
|
|
||||||
# This Cray system uses a base OS not CLE/CNL
|
|
||||||
return None
|
|
||||||
v = spack.version.Version(release_attrs["RELEASE"])
|
|
||||||
return v[0]
|
|
||||||
elif os.path.isfile(_clerelease_file):
|
|
||||||
v = read_clerelease_file()
|
|
||||||
return spack.version.Version(v)[0]
|
|
||||||
else:
|
|
||||||
# Not all Cray systems run CNL on the backend.
|
|
||||||
# Systems running in what Cray calls "cluster" mode run other
|
|
||||||
# linux OSs under the Cray PE.
|
|
||||||
# So if we don't detect any Cray OS version on the system,
|
|
||||||
# we return None. We can't ever be sure we will get a Cray OS
|
|
||||||
# version.
|
|
||||||
# Returning None allows the calling code to test for the value
|
|
||||||
# being "True-ish" rather than requiring a try/except block.
|
|
||||||
return None
|
|
||||||
|
|
||||||
def arguments_to_detect_version_fn(self, paths):
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
command_arguments = []
|
|
||||||
for compiler_name in spack.compilers.supported_compilers():
|
|
||||||
cmp_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
|
||||||
|
|
||||||
# If the compiler doesn't have a corresponding
|
|
||||||
# Programming Environment, skip to the next
|
|
||||||
if cmp_cls.PrgEnv is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if cmp_cls.PrgEnv_compiler is None:
|
|
||||||
tty.die("Must supply PrgEnv_compiler with PrgEnv")
|
|
||||||
|
|
||||||
compiler_id = spack.compilers.CompilerID(self, compiler_name, None)
|
|
||||||
detect_version_args = spack.compilers.DetectVersionArgs(
|
|
||||||
id=compiler_id, variation=(None, None), language="cc", path="cc"
|
|
||||||
)
|
|
||||||
command_arguments.append(detect_version_args)
|
|
||||||
return command_arguments
|
|
||||||
|
|
||||||
def detect_version(self, detect_version_args):
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
modulecmd = self.modulecmd
|
|
||||||
compiler_name = detect_version_args.id.compiler_name
|
|
||||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
|
||||||
output = modulecmd("avail", compiler_cls.PrgEnv_compiler)
|
|
||||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
|
||||||
matches = re.findall(version_regex, output)
|
|
||||||
version = tuple(version for _, version in matches if "classic" not in version)
|
|
||||||
compiler_id = detect_version_args.id
|
|
||||||
value = detect_version_args._replace(id=compiler_id._replace(version=version))
|
|
||||||
return value, None
|
|
||||||
|
|
||||||
def make_compilers(self, compiler_id, paths):
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
name = compiler_id.compiler_name
|
|
||||||
cmp_cls = spack.compilers.class_for_compiler_name(name)
|
|
||||||
compilers = []
|
|
||||||
for v in compiler_id.version:
|
|
||||||
comp = cmp_cls(
|
|
||||||
spack.spec.CompilerSpec(name + "@=" + v),
|
|
||||||
self,
|
|
||||||
"any",
|
|
||||||
["cc", "CC", "ftn"],
|
|
||||||
[cmp_cls.PrgEnv, name + "/" + v],
|
|
||||||
)
|
|
||||||
|
|
||||||
compilers.append(comp)
|
|
||||||
return compilers
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
|
||||||
import llnl.util.lang
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
from spack.util.environment import get_path
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from .linux_distro import LinuxDistro
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def unload_programming_environment():
|
|
||||||
"""Context manager that unloads Cray Programming Environments."""
|
|
||||||
env_bu = None
|
|
||||||
|
|
||||||
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
|
|
||||||
# environment variable.
|
|
||||||
if "PE_ENV" in os.environ:
|
|
||||||
# Copy environment variables to restore them after the compiler
|
|
||||||
# detection. We expect that the only thing PrgEnv-* modules do is
|
|
||||||
# the environment variables modifications.
|
|
||||||
env_bu = os.environ.copy()
|
|
||||||
|
|
||||||
# Get the name of the module from the environment variable.
|
|
||||||
prg_env = "PrgEnv-" + os.environ["PE_ENV"].lower()
|
|
||||||
|
|
||||||
# Unload the PrgEnv-* module. By doing this we intentionally
|
|
||||||
# provoke errors when the Cray's compiler wrappers are executed
|
|
||||||
# (Error: A PrgEnv-* modulefile must be loaded.) so they will not
|
|
||||||
# be detected as valid compilers by the overridden method. We also
|
|
||||||
# expect that the modules that add the actual compilers' binaries
|
|
||||||
# into the PATH environment variable (i.e. the following modules:
|
|
||||||
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
|
|
||||||
# specified as prerequisites in the PrgEnv-* modulefiles.
|
|
||||||
module("unload", prg_env)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# Restore the environment.
|
|
||||||
if env_bu is not None:
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(env_bu)
|
|
||||||
|
|
||||||
|
|
||||||
class CrayFrontend(LinuxDistro):
|
|
||||||
"""Represents OS that runs on login and service nodes of the Cray platform.
|
|
||||||
It acts as a regular Linux without Cray-specific modules and compiler
|
|
||||||
wrappers."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def compiler_search_paths(self):
|
|
||||||
"""Calls the default function but unloads Cray's programming
|
|
||||||
environments first.
|
|
||||||
|
|
||||||
This prevents from detecting Cray compiler wrappers and avoids
|
|
||||||
possible false detections.
|
|
||||||
"""
|
|
||||||
import spack.compilers
|
|
||||||
|
|
||||||
with unload_programming_environment():
|
|
||||||
search_paths = get_path("PATH")
|
|
||||||
|
|
||||||
extract_path_re = re.compile(r"prepend-path[\s]*PATH[\s]*([/\w\.:-]*)")
|
|
||||||
|
|
||||||
for compiler_cls in spack.compilers.all_compiler_types():
|
|
||||||
# Check if the compiler class is supported on Cray
|
|
||||||
prg_env = getattr(compiler_cls, "PrgEnv", None)
|
|
||||||
compiler_module = getattr(compiler_cls, "PrgEnv_compiler", None)
|
|
||||||
if not (prg_env and compiler_module):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# It is supported, check which versions are available
|
|
||||||
output = module("avail", compiler_cls.PrgEnv_compiler)
|
|
||||||
version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
|
|
||||||
matches = re.findall(version_regex, output)
|
|
||||||
versions = tuple(version for _, version in matches if "classic" not in version)
|
|
||||||
|
|
||||||
# Now inspect the modules and add to paths
|
|
||||||
msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]"
|
|
||||||
tty.debug(msg.format(compiler_module, versions))
|
|
||||||
for v in versions:
|
|
||||||
try:
|
|
||||||
current_module = compiler_module + "/" + v
|
|
||||||
out = module("show", current_module)
|
|
||||||
match = extract_path_re.search(out)
|
|
||||||
search_paths += match.group(1).split(":")
|
|
||||||
except Exception as e:
|
|
||||||
msg = (
|
|
||||||
"[CRAY FE] An unexpected error occurred while "
|
|
||||||
"detecting FE compiler [compiler={0}, "
|
|
||||||
" version={1}, error={2}]"
|
|
||||||
)
|
|
||||||
tty.debug(msg.format(compiler_cls.name, v, str(e)))
|
|
||||||
|
|
||||||
search_paths = list(llnl.util.lang.dedupe(search_paths))
|
|
||||||
return fs.search_paths_for_executables(*search_paths)
|
|
||||||
@@ -39,6 +39,7 @@
|
|||||||
)
|
)
|
||||||
from spack.build_systems.cargo import CargoPackage
|
from spack.build_systems.cargo import CargoPackage
|
||||||
from spack.build_systems.cmake import CMakePackage, generator
|
from spack.build_systems.cmake import CMakePackage, generator
|
||||||
|
from spack.build_systems.compiler import CompilerPackage
|
||||||
from spack.build_systems.cuda import CudaPackage
|
from spack.build_systems.cuda import CudaPackage
|
||||||
from spack.build_systems.generic import Package
|
from spack.build_systems.generic import Package
|
||||||
from spack.build_systems.gnu import GNUMirrorPackage
|
from spack.build_systems.gnu import GNUMirrorPackage
|
||||||
|
|||||||
@@ -161,7 +161,11 @@ def windows_establish_runtime_linkage(self):
|
|||||||
|
|
||||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
# If spec is an external, we should not be modifying its bin directory, as we would
|
||||||
|
# be doing in this method
|
||||||
|
# Spack should in general not modify things it has not installed
|
||||||
|
# we can reasonably expect externals to have their link interface properly established
|
||||||
|
if sys.platform == "win32" and not self.spec.external:
|
||||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||||
self.win_rpath.establish_link()
|
self.win_rpath.establish_link()
|
||||||
@@ -1240,7 +1244,7 @@ def install_test_root(self):
|
|||||||
"""Return the install test root directory."""
|
"""Return the install test root directory."""
|
||||||
tty.warn(
|
tty.warn(
|
||||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||||
)
|
)
|
||||||
return install_test_root(self)
|
return install_test_root(self)
|
||||||
|
|
||||||
@@ -1877,7 +1881,10 @@ def do_install(self, **kwargs):
|
|||||||
verbose (bool): Display verbose build output (by default,
|
verbose (bool): Display verbose build output (by default,
|
||||||
suppresses it)
|
suppresses it)
|
||||||
"""
|
"""
|
||||||
PackageInstaller([(self, kwargs)]).install()
|
explicit = kwargs.get("explicit", True)
|
||||||
|
if isinstance(explicit, bool):
|
||||||
|
kwargs["explicit"] = {self.spec.dag_hash()} if explicit else set()
|
||||||
|
PackageInstaller([self], kwargs).install()
|
||||||
|
|
||||||
# TODO (post-34236): Update tests and all packages that use this as a
|
# TODO (post-34236): Update tests and all packages that use this as a
|
||||||
# TODO (post-34236): package method to the routine made available to
|
# TODO (post-34236): package method to the routine made available to
|
||||||
@@ -1898,7 +1905,7 @@ def cache_extra_test_sources(self, srcs):
|
|||||||
"""
|
"""
|
||||||
msg = (
|
msg = (
|
||||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||||
"instead."
|
"instead."
|
||||||
)
|
)
|
||||||
warnings.warn(msg)
|
warnings.warn(msg)
|
||||||
@@ -2446,9 +2453,18 @@ def rpath(self):
|
|||||||
|
|
||||||
# on Windows, libraries of runtime interest are typically
|
# on Windows, libraries of runtime interest are typically
|
||||||
# stored in the bin directory
|
# stored in the bin directory
|
||||||
|
# Do not include Windows system libraries in the rpath interface
|
||||||
|
# these libraries are handled automatically by VS/VCVARS and adding
|
||||||
|
# Spack derived system libs into the link path or address space of a program
|
||||||
|
# can result in conflicting versions, which makes Spack packages less useable
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
rpaths = [self.prefix.bin]
|
rpaths = [self.prefix.bin]
|
||||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
rpaths.extend(
|
||||||
|
d.prefix.bin
|
||||||
|
for d in deps
|
||||||
|
if os.path.isdir(d.prefix.bin)
|
||||||
|
and "windows-system" not in getattr(d.package, "tags", [])
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
rpaths = [self.prefix.lib, self.prefix.lib64]
|
rpaths = [self.prefix.lib, self.prefix.lib64]
|
||||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||||
@@ -2555,7 +2571,12 @@ class PackageStillNeededError(InstallError):
|
|||||||
"""Raised when package is still needed by another on uninstall."""
|
"""Raised when package is still needed by another on uninstall."""
|
||||||
|
|
||||||
def __init__(self, spec, dependents):
|
def __init__(self, spec, dependents):
|
||||||
super().__init__("Cannot uninstall %s" % spec)
|
spec_fmt = spack.spec.DEFAULT_FORMAT + " /{hash:7}"
|
||||||
|
dep_fmt = "{name}{@versions} /{hash:7}"
|
||||||
|
super().__init__(
|
||||||
|
f"Cannot uninstall {spec.format(spec_fmt)}, "
|
||||||
|
f"needed by {[dep.format(dep_fmt) for dep in dependents]}"
|
||||||
|
)
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.dependents = dependents
|
self.dependents = dependents
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
|
|
||||||
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
||||||
from ._platform import Platform
|
from ._platform import Platform
|
||||||
from .cray import Cray
|
|
||||||
from .darwin import Darwin
|
from .darwin import Darwin
|
||||||
from .freebsd import FreeBSD
|
from .freebsd import FreeBSD
|
||||||
from .linux import Linux
|
from .linux import Linux
|
||||||
@@ -15,7 +14,6 @@
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Platform",
|
"Platform",
|
||||||
"Cray",
|
|
||||||
"Darwin",
|
"Darwin",
|
||||||
"Linux",
|
"Linux",
|
||||||
"FreeBSD",
|
"FreeBSD",
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
|
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
|
|
||||||
from .cray import Cray
|
|
||||||
from .darwin import Darwin
|
from .darwin import Darwin
|
||||||
from .freebsd import FreeBSD
|
from .freebsd import FreeBSD
|
||||||
from .linux import Linux
|
from .linux import Linux
|
||||||
@@ -16,7 +15,7 @@
|
|||||||
from .windows import Windows
|
from .windows import Windows
|
||||||
|
|
||||||
#: List of all the platform classes known to Spack
|
#: List of all the platform classes known to Spack
|
||||||
platforms = [Cray, Darwin, Linux, Windows, FreeBSD, Test]
|
platforms = [Darwin, Linux, Windows, FreeBSD, Test]
|
||||||
|
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
|
|||||||
@@ -2,253 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
|
||||||
import os.path
|
import os.path
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack.target
|
|
||||||
import spack.version
|
|
||||||
from spack.operating_systems.cray_backend import CrayBackend
|
|
||||||
from spack.operating_systems.cray_frontend import CrayFrontend
|
|
||||||
from spack.paths import build_env_path
|
|
||||||
from spack.util.executable import Executable
|
|
||||||
from spack.util.module_cmd import module
|
|
||||||
|
|
||||||
from ._platform import NoPlatformError, Platform
|
|
||||||
|
|
||||||
_craype_name_to_target_name = {
|
|
||||||
"x86-cascadelake": "cascadelake",
|
|
||||||
"x86-naples": "zen",
|
|
||||||
"x86-rome": "zen2",
|
|
||||||
"x86-milan": "zen3",
|
|
||||||
"x86-skylake": "skylake_avx512",
|
|
||||||
"mic-knl": "mic_knl",
|
|
||||||
"interlagos": "bulldozer",
|
|
||||||
"abudhabi": "piledriver",
|
|
||||||
}
|
|
||||||
|
|
||||||
_ex_craype_dir = "/opt/cray/pe/cpe"
|
|
||||||
_xc_craype_dir = "/opt/cray/pe/cdt"
|
|
||||||
|
|
||||||
|
|
||||||
def slingshot_network():
|
def slingshot_network():
|
||||||
return os.path.exists("/opt/cray/pe") and (
|
return os.path.exists("/opt/cray/pe") and (
|
||||||
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
os.path.exists("/lib64/libcxi.so") or os.path.exists("/usr/lib64/libcxi.so")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _target_name_from_craype_target_name(name):
|
|
||||||
return _craype_name_to_target_name.get(name, name)
|
|
||||||
|
|
||||||
|
|
||||||
class Cray(Platform):
|
|
||||||
priority = 10
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Create a Cray system platform.
|
|
||||||
|
|
||||||
Target names should use craype target names but not include the
|
|
||||||
'craype-' prefix. Uses first viable target from:
|
|
||||||
self
|
|
||||||
envars [SPACK_FRONT_END, SPACK_BACK_END]
|
|
||||||
configuration file "targets.yaml" with keys 'front_end', 'back_end'
|
|
||||||
scanning /etc/bash/bashrc.local for back_end only
|
|
||||||
"""
|
|
||||||
super().__init__("cray")
|
|
||||||
|
|
||||||
# Make all craype targets available.
|
|
||||||
for target in self._avail_targets():
|
|
||||||
name = _target_name_from_craype_target_name(target)
|
|
||||||
self.add_target(name, spack.target.Target(name, "craype-%s" % target))
|
|
||||||
|
|
||||||
self.back_end = os.environ.get("SPACK_BACK_END", self._default_target_from_env())
|
|
||||||
self.default = self.back_end
|
|
||||||
if self.back_end not in self.targets:
|
|
||||||
# We didn't find a target module for the backend
|
|
||||||
raise NoPlatformError()
|
|
||||||
|
|
||||||
# Setup frontend targets
|
|
||||||
for name in archspec.cpu.TARGETS:
|
|
||||||
if name not in self.targets:
|
|
||||||
self.add_target(name, spack.target.Target(name))
|
|
||||||
self.front_end = os.environ.get("SPACK_FRONT_END", archspec.cpu.host().name)
|
|
||||||
if self.front_end not in self.targets:
|
|
||||||
self.add_target(self.front_end, spack.target.Target(self.front_end))
|
|
||||||
|
|
||||||
front_distro = CrayFrontend()
|
|
||||||
back_distro = CrayBackend()
|
|
||||||
|
|
||||||
self.default_os = str(back_distro)
|
|
||||||
self.back_os = self.default_os
|
|
||||||
self.front_os = str(front_distro)
|
|
||||||
|
|
||||||
self.add_operating_system(self.back_os, back_distro)
|
|
||||||
if self.front_os != self.back_os:
|
|
||||||
self.add_operating_system(self.front_os, front_distro)
|
|
||||||
|
|
||||||
def setup_platform_environment(self, pkg, env):
|
|
||||||
"""Change the linker to default dynamic to be more
|
|
||||||
similar to linux/standard linker behavior
|
|
||||||
"""
|
|
||||||
# Unload these modules to prevent any silent linking or unnecessary
|
|
||||||
# I/O profiling in the case of darshan.
|
|
||||||
modules_to_unload = ["cray-mpich", "darshan", "cray-libsci", "altd"]
|
|
||||||
for mod in modules_to_unload:
|
|
||||||
module("unload", mod)
|
|
||||||
|
|
||||||
env.set("CRAYPE_LINK_TYPE", "dynamic")
|
|
||||||
cray_wrapper_names = os.path.join(build_env_path, "cray")
|
|
||||||
|
|
||||||
if os.path.isdir(cray_wrapper_names):
|
|
||||||
env.prepend_path("PATH", cray_wrapper_names)
|
|
||||||
env.prepend_path("SPACK_ENV_PATH", cray_wrapper_names)
|
|
||||||
|
|
||||||
# Makes spack installed pkg-config work on Crays
|
|
||||||
env.append_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig")
|
|
||||||
env.append_path("PKG_CONFIG_PATH", "/usr/local/lib64/pkgconfig")
|
|
||||||
|
|
||||||
# CRAY_LD_LIBRARY_PATH is used at build time by the cray compiler
|
|
||||||
# wrappers to augment LD_LIBRARY_PATH. This is to avoid long load
|
|
||||||
# times at runtime. This behavior is not always respected on cray
|
|
||||||
# "cluster" systems, so we reproduce it here.
|
|
||||||
if os.environ.get("CRAY_LD_LIBRARY_PATH"):
|
|
||||||
env.prepend_path("LD_LIBRARY_PATH", os.environ["CRAY_LD_LIBRARY_PATH"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def craype_type_and_version(cls):
|
|
||||||
if os.path.isdir(_ex_craype_dir):
|
|
||||||
craype_dir = _ex_craype_dir
|
|
||||||
craype_type = "EX"
|
|
||||||
elif os.path.isdir(_xc_craype_dir):
|
|
||||||
craype_dir = _xc_craype_dir
|
|
||||||
craype_type = "XC"
|
|
||||||
else:
|
|
||||||
return (None, None)
|
|
||||||
|
|
||||||
# Take the default version from known symlink path
|
|
||||||
default_path = os.path.join(craype_dir, "default")
|
|
||||||
if os.path.islink(default_path):
|
|
||||||
version = spack.version.Version(os.readlink(default_path))
|
|
||||||
return (craype_type, version)
|
|
||||||
|
|
||||||
# If no default version, sort available versions and return latest
|
|
||||||
versions_available = [spack.version.Version(v) for v in os.listdir(craype_dir)]
|
|
||||||
versions_available.sort(reverse=True)
|
|
||||||
if not versions_available:
|
|
||||||
return (craype_type, None)
|
|
||||||
return (craype_type, versions_available[0])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def detect(cls):
|
|
||||||
"""
|
|
||||||
Detect whether this system requires CrayPE module support.
|
|
||||||
|
|
||||||
Systems with newer CrayPE (21.10 for EX systems, future work for CS and
|
|
||||||
XC systems) have compilers and MPI wrappers that can be used directly
|
|
||||||
by path. These systems are considered ``linux`` platforms.
|
|
||||||
|
|
||||||
For systems running an older CrayPE, we detect the Cray platform based
|
|
||||||
on the availability through `module` of the Cray programming
|
|
||||||
environment. If this environment is available, we can use it to find
|
|
||||||
compilers, target modules, etc. If the Cray programming environment is
|
|
||||||
not available via modules, then we will treat it as a standard linux
|
|
||||||
system, as the Cray compiler wrappers and other components of the Cray
|
|
||||||
programming environment are irrelevant without module support.
|
|
||||||
"""
|
|
||||||
if "opt/cray" not in os.environ.get("MODULEPATH", ""):
|
|
||||||
return False
|
|
||||||
|
|
||||||
craype_type, craype_version = cls.craype_type_and_version()
|
|
||||||
if craype_type == "XC":
|
|
||||||
return True
|
|
||||||
if craype_type == "EX" and craype_version < spack.version.Version("21.10"):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _default_target_from_env(self):
|
|
||||||
"""Set and return the default CrayPE target loaded in a clean login
|
|
||||||
session.
|
|
||||||
|
|
||||||
A bash subshell is launched with a wiped environment and the list of
|
|
||||||
loaded modules is parsed for the first acceptable CrayPE target.
|
|
||||||
"""
|
|
||||||
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
|
|
||||||
if getattr(self, "default", None) is None:
|
|
||||||
bash = Executable("/bin/bash")
|
|
||||||
output = bash(
|
|
||||||
"--norc",
|
|
||||||
"--noprofile",
|
|
||||||
"-lc",
|
|
||||||
"echo $CRAY_CPU_TARGET",
|
|
||||||
env={"TERM": os.environ.get("TERM", "")},
|
|
||||||
output=str,
|
|
||||||
error=os.devnull,
|
|
||||||
)
|
|
||||||
|
|
||||||
default_from_module = "".join(output.split()) # rm all whitespace
|
|
||||||
if default_from_module:
|
|
||||||
tty.debug("Found default module:%s" % default_from_module)
|
|
||||||
return default_from_module
|
|
||||||
else:
|
|
||||||
front_end = archspec.cpu.host()
|
|
||||||
# Look for the frontend architecture or closest ancestor
|
|
||||||
# available in cray target modules
|
|
||||||
avail = [_target_name_from_craype_target_name(x) for x in self._avail_targets()]
|
|
||||||
for front_end_possibility in [front_end] + front_end.ancestors:
|
|
||||||
if front_end_possibility.name in avail:
|
|
||||||
tty.debug("using front-end architecture or available ancestor")
|
|
||||||
return front_end_possibility.name
|
|
||||||
else:
|
|
||||||
tty.debug("using platform.machine as default")
|
|
||||||
return platform.machine()
|
|
||||||
|
|
||||||
def _avail_targets(self):
|
|
||||||
"""Return a list of available CrayPE CPU targets."""
|
|
||||||
|
|
||||||
def modules_in_output(output):
|
|
||||||
"""Returns a list of valid modules parsed from modulecmd output"""
|
|
||||||
return [i for i in re.split(r"\s\s+|\n", output)]
|
|
||||||
|
|
||||||
def target_names_from_modules(modules):
|
|
||||||
# Craype- module prefixes that are not valid CPU targets.
|
|
||||||
targets = []
|
|
||||||
for mod in modules:
|
|
||||||
if "craype-" in mod:
|
|
||||||
name = mod[7:]
|
|
||||||
name = name.split()[0]
|
|
||||||
_n = name.replace("-", "_") # test for mic-knl/mic_knl
|
|
||||||
is_target_name = name in archspec.cpu.TARGETS or _n in archspec.cpu.TARGETS
|
|
||||||
is_cray_target_name = name in _craype_name_to_target_name
|
|
||||||
if is_target_name or is_cray_target_name:
|
|
||||||
targets.append(name)
|
|
||||||
|
|
||||||
return targets
|
|
||||||
|
|
||||||
def modules_from_listdir():
|
|
||||||
craype_default_path = "/opt/cray/pe/craype/default/modulefiles"
|
|
||||||
if os.path.isdir(craype_default_path):
|
|
||||||
return os.listdir(craype_default_path)
|
|
||||||
return []
|
|
||||||
|
|
||||||
if getattr(self, "_craype_targets", None) is None:
|
|
||||||
strategies = [
|
|
||||||
lambda: modules_in_output(module("avail", "-t", "craype-")),
|
|
||||||
modules_from_listdir,
|
|
||||||
]
|
|
||||||
for available_craype_modules in strategies:
|
|
||||||
craype_modules = available_craype_modules()
|
|
||||||
craype_targets = target_names_from_modules(craype_modules)
|
|
||||||
if craype_targets:
|
|
||||||
self._craype_targets = craype_targets
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# If nothing is found add platform.machine()
|
|
||||||
# to avoid Spack erroring out
|
|
||||||
self._craype_targets = [platform.machine()]
|
|
||||||
|
|
||||||
return self._craype_targets
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
@@ -25,6 +25,7 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.elf as elf
|
import spack.util.elf as elf
|
||||||
import spack.util.executable as executable
|
import spack.util.executable as executable
|
||||||
|
import spack.util.path
|
||||||
|
|
||||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||||
|
|
||||||
@@ -565,7 +566,7 @@ def make_link_relative(new_links, orig_links):
|
|||||||
orig_links (list): original links
|
orig_links (list): original links
|
||||||
"""
|
"""
|
||||||
for new_link, orig_link in zip(new_links, orig_links):
|
for new_link, orig_link in zip(new_links, orig_links):
|
||||||
target = os.readlink(orig_link)
|
target = readlink(orig_link)
|
||||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||||
os.unlink(new_link)
|
os.unlink(new_link)
|
||||||
symlink(relative_target, new_link)
|
symlink(relative_target, new_link)
|
||||||
@@ -613,7 +614,7 @@ def relocate_links(links, prefix_to_prefix):
|
|||||||
"""Relocate links to a new install prefix."""
|
"""Relocate links to a new install prefix."""
|
||||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||||
for link in links:
|
for link in links:
|
||||||
old_target = os.readlink(link)
|
old_target = readlink(link)
|
||||||
match = regex.match(old_target)
|
match = regex.match(old_target)
|
||||||
|
|
||||||
# No match.
|
# No match.
|
||||||
|
|||||||
@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
type (str): String containing one or more of 'A', 'B', 'C'
|
type (str): String containing one or more of 'A', 'R', 'C'
|
||||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||||
|
|
||||||
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
|||||||
lines = [] if not out else re.split(r"\s+", out)
|
lines = [] if not out else re.split(r"\s+", out)
|
||||||
changed = set()
|
changed = set()
|
||||||
for path in lines:
|
for path in lines:
|
||||||
pkg_name, _, _ = path.partition(os.sep)
|
pkg_name, _, _ = path.partition("/")
|
||||||
if pkg_name not in added and pkg_name not in removed:
|
if pkg_name not in added and pkg_name not in removed:
|
||||||
changed.add(pkg_name)
|
changed.add(pkg_name)
|
||||||
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@
|
|||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.util.crypto import checksum
|
from spack.util.crypto import checksum
|
||||||
from spack.util.log_parse import parse_log_events
|
from spack.util.log_parse import parse_log_events
|
||||||
from spack.util.web import urllib_ssl_cert_handler
|
from spack.util.web import ssl_create_default_context
|
||||||
|
|
||||||
from .base import Reporter
|
from .base import Reporter
|
||||||
from .extract import extract_test_parts
|
from .extract import extract_test_parts
|
||||||
@@ -58,7 +58,8 @@
|
|||||||
# Initialize data structures common to each phase's report.
|
# Initialize data structures common to each phase's report.
|
||||||
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
||||||
CDASH_PHASES.add("update")
|
CDASH_PHASES.add("update")
|
||||||
|
# CDash request timeout in seconds
|
||||||
|
SPACK_CDASH_TIMEOUT = 45
|
||||||
|
|
||||||
CDashConfiguration = collections.namedtuple(
|
CDashConfiguration = collections.namedtuple(
|
||||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||||
@@ -428,7 +429,7 @@ def upload(self, filename):
|
|||||||
# Compute md5 checksum for the contents of this file.
|
# Compute md5 checksum for the contents of this file.
|
||||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||||
|
|
||||||
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
|
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
||||||
with open(filename, "rb") as f:
|
with open(filename, "rb") as f:
|
||||||
params_dict = {
|
params_dict = {
|
||||||
"build": self.buildname,
|
"build": self.buildname,
|
||||||
@@ -447,7 +448,7 @@ def upload(self, filename):
|
|||||||
# By default, urllib2 only support GET and POST.
|
# By default, urllib2 only support GET and POST.
|
||||||
# CDash expects this file to be uploaded via PUT.
|
# CDash expects this file to be uploaded via PUT.
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
if self.current_package_name not in self.buildIds:
|
if self.current_package_name not in self.buildIds:
|
||||||
resp_value = response.read()
|
resp_value = response.read()
|
||||||
if isinstance(resp_value, bytes):
|
if isinstance(resp_value, bytes):
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
|
|||||||
in our case. This still needs to be called after the copy to destination
|
in our case. This still needs to be called after the copy to destination
|
||||||
because it expects the new directory structure to be in place."""
|
because it expects the new directory structure to be in place."""
|
||||||
for link in links:
|
for link in links:
|
||||||
link_target = os.readlink(os.path.join(orig_prefix, link))
|
link_target = readlink(os.path.join(orig_prefix, link))
|
||||||
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
||||||
new_link_path = os.path.join(new_prefix, link)
|
new_link_path = os.path.join(new_prefix, link)
|
||||||
os.unlink(new_link_path)
|
os.unlink(new_link_path)
|
||||||
|
|||||||
@@ -9,13 +9,40 @@
|
|||||||
"""
|
"""
|
||||||
from typing import Any, Dict
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
LIST_OF_SPECS = {"type": "array", "items": {"type": "string"}}
|
||||||
|
|
||||||
properties: Dict[str, Any] = {
|
properties: Dict[str, Any] = {
|
||||||
"concretizer": {
|
"concretizer": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
"properties": {
|
"properties": {
|
||||||
"reuse": {
|
"reuse": {
|
||||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["dependencies"]}]
|
"oneOf": [
|
||||||
|
{"type": "boolean"},
|
||||||
|
{"type": "string", "enum": ["dependencies"]},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"roots": {"type": "boolean"},
|
||||||
|
"include": LIST_OF_SPECS,
|
||||||
|
"exclude": LIST_OF_SPECS,
|
||||||
|
"from": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["local", "buildcache", "external"],
|
||||||
|
},
|
||||||
|
"include": LIST_OF_SPECS,
|
||||||
|
"exclude": LIST_OF_SPECS,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"enable_node_namespace": {"type": "boolean"},
|
"enable_node_namespace": {"type": "boolean"},
|
||||||
"targets": {
|
"targets": {
|
||||||
|
|||||||
@@ -35,6 +35,7 @@
|
|||||||
{
|
{
|
||||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||||
"specs": spec_list_schema,
|
"specs": spec_list_schema,
|
||||||
|
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -141,7 +141,7 @@
|
|||||||
"deprecatedProperties": {
|
"deprecatedProperties": {
|
||||||
"properties": ["version"],
|
"properties": ["version"],
|
||||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||||
"of the same file.\n",
|
"of the same file.\n",
|
||||||
"error": False,
|
"error": False,
|
||||||
@@ -197,7 +197,7 @@
|
|||||||
"properties": ["target", "compiler", "providers"],
|
"properties": ["target", "compiler", "providers"],
|
||||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||||
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
|
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||||
"can be set only in the 'all' section of the same file. "
|
"can be set only in the 'all' section of the same file. "
|
||||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||||
"including files:lines where the deprecated attributes are used.\n\n"
|
"including files:lines where the deprecated attributes are used.\n\n"
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import enum
|
import enum
|
||||||
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
@@ -313,6 +314,10 @@ def using_libc_compatibility() -> bool:
|
|||||||
return spack.platforms.host().name == "linux"
|
return spack.platforms.host().name == "linux"
|
||||||
|
|
||||||
|
|
||||||
|
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool:
|
||||||
|
return compiler.compiler_verbose_output is not None
|
||||||
|
|
||||||
|
|
||||||
def extend_flag_list(flag_list, new_flags):
|
def extend_flag_list(flag_list, new_flags):
|
||||||
"""Extend a list of flags, preserving order and precedence.
|
"""Extend a list of flags, preserving order and precedence.
|
||||||
|
|
||||||
@@ -808,12 +813,22 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
A tuple of the solve result, the timer for the different phases of the
|
A tuple of the solve result, the timer for the different phases of the
|
||||||
solve, and the internal statistics from clingo.
|
solve, and the internal statistics from clingo.
|
||||||
"""
|
"""
|
||||||
|
# avoid circular import
|
||||||
|
import spack.bootstrap
|
||||||
|
|
||||||
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
||||||
timer = spack.util.timer.Timer()
|
timer = spack.util.timer.Timer()
|
||||||
|
|
||||||
# Initialize the control object for the solver
|
# Initialize the control object for the solver
|
||||||
self.control = control or default_clingo_control()
|
self.control = control or default_clingo_control()
|
||||||
|
|
||||||
|
# ensure core deps are present on Windows
|
||||||
|
# needs to modify active config scope, so cannot be run within
|
||||||
|
# bootstrap config scope
|
||||||
|
if sys.platform == "win32":
|
||||||
|
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||||
|
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||||
|
|
||||||
timer.start("setup")
|
timer.start("setup")
|
||||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||||
if output.out is not None:
|
if output.out is not None:
|
||||||
@@ -933,14 +948,26 @@ class ConcreteSpecsByHash(collections.abc.Mapping):
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.data: Dict[str, spack.spec.Spec] = {}
|
self.data: Dict[str, spack.spec.Spec] = {}
|
||||||
|
self.explicit: Set[str] = set()
|
||||||
|
|
||||||
def __getitem__(self, dag_hash: str) -> spack.spec.Spec:
|
def __getitem__(self, dag_hash: str) -> spack.spec.Spec:
|
||||||
return self.data[dag_hash]
|
return self.data[dag_hash]
|
||||||
|
|
||||||
|
def explicit_items(self) -> Iterator[Tuple[str, spack.spec.Spec]]:
|
||||||
|
"""Iterate on items that have been added explicitly, and not just as a dependency
|
||||||
|
of other nodes.
|
||||||
|
"""
|
||||||
|
for h, s in self.items():
|
||||||
|
# We need to make an exception for gcc-runtime, until we can splice it.
|
||||||
|
if h in self.explicit or s.name == "gcc-runtime":
|
||||||
|
yield h, s
|
||||||
|
|
||||||
def add(self, spec: spack.spec.Spec) -> bool:
|
def add(self, spec: spack.spec.Spec) -> bool:
|
||||||
"""Adds a new concrete spec to the mapping. Returns True if the spec was just added,
|
"""Adds a new concrete spec to the mapping. Returns True if the spec was just added,
|
||||||
False if the spec was already in the mapping.
|
False if the spec was already in the mapping.
|
||||||
|
|
||||||
|
Calling this function marks the spec as added explicitly.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec: spec to be added
|
spec: spec to be added
|
||||||
|
|
||||||
@@ -955,6 +982,7 @@ def add(self, spec: spack.spec.Spec) -> bool:
|
|||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
dag_hash = spec.dag_hash()
|
dag_hash = spec.dag_hash()
|
||||||
|
self.explicit.add(dag_hash)
|
||||||
if dag_hash in self.data:
|
if dag_hash in self.data:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -1221,6 +1249,9 @@ def pkg_rules(self, pkg, tests):
|
|||||||
|
|
||||||
def trigger_rules(self):
|
def trigger_rules(self):
|
||||||
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
||||||
|
if not self._trigger_cache:
|
||||||
|
return
|
||||||
|
|
||||||
self.gen.h2("Trigger conditions")
|
self.gen.h2("Trigger conditions")
|
||||||
for name in self._trigger_cache:
|
for name in self._trigger_cache:
|
||||||
cache = self._trigger_cache[name]
|
cache = self._trigger_cache[name]
|
||||||
@@ -1234,6 +1265,9 @@ def trigger_rules(self):
|
|||||||
|
|
||||||
def effect_rules(self):
|
def effect_rules(self):
|
||||||
"""Flushes all the effect rules collected so far, and clears the cache."""
|
"""Flushes all the effect rules collected so far, and clears the cache."""
|
||||||
|
if not self._effect_cache:
|
||||||
|
return
|
||||||
|
|
||||||
self.gen.h2("Imposed requirements")
|
self.gen.h2("Imposed requirements")
|
||||||
for name in self._effect_cache:
|
for name in self._effect_cache:
|
||||||
cache = self._effect_cache[name]
|
cache = self._effect_cache[name]
|
||||||
@@ -1396,7 +1430,6 @@ def condition(
|
|||||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||||
|
|
||||||
with spec_with_name(required_spec, name):
|
with spec_with_name(required_spec, name):
|
||||||
|
|
||||||
# Check if we can emit the requirements before updating the condition ID counter.
|
# Check if we can emit the requirements before updating the condition ID counter.
|
||||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||||
# caller, we won't emit partial facts.
|
# caller, we won't emit partial facts.
|
||||||
@@ -1614,6 +1647,31 @@ def external_packages(self):
|
|||||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||||
|
|
||||||
self.gen.h1("External packages")
|
self.gen.h1("External packages")
|
||||||
|
spec_filters = []
|
||||||
|
concretizer_yaml = spack.config.get("concretizer")
|
||||||
|
reuse_yaml = concretizer_yaml.get("reuse")
|
||||||
|
if isinstance(reuse_yaml, typing.Mapping):
|
||||||
|
default_include = reuse_yaml.get("include", [])
|
||||||
|
default_exclude = reuse_yaml.get("exclude", [])
|
||||||
|
libc_externals = list(all_libcs())
|
||||||
|
for source in reuse_yaml.get("from", []):
|
||||||
|
if source["type"] != "external":
|
||||||
|
continue
|
||||||
|
|
||||||
|
include = source.get("include", default_include)
|
||||||
|
if include:
|
||||||
|
# Since libcs are implicit externals, we need to implicitly include them
|
||||||
|
include = include + libc_externals
|
||||||
|
exclude = source.get("exclude", default_exclude)
|
||||||
|
spec_filters.append(
|
||||||
|
SpecFilter(
|
||||||
|
factory=lambda: [],
|
||||||
|
is_usable=lambda x: True,
|
||||||
|
include=include,
|
||||||
|
exclude=exclude,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for pkg_name, data in packages_yaml.items():
|
for pkg_name, data in packages_yaml.items():
|
||||||
if pkg_name == "all":
|
if pkg_name == "all":
|
||||||
continue
|
continue
|
||||||
@@ -1622,7 +1680,6 @@ def external_packages(self):
|
|||||||
if pkg_name not in spack.repo.PATH:
|
if pkg_name not in spack.repo.PATH:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
self.gen.h2("External package: {0}".format(pkg_name))
|
|
||||||
# Check if the external package is buildable. If it is
|
# Check if the external package is buildable. If it is
|
||||||
# not then "external(<pkg>)" is a fact, unless we can
|
# not then "external(<pkg>)" is a fact, unless we can
|
||||||
# reuse an already installed spec.
|
# reuse an already installed spec.
|
||||||
@@ -1632,7 +1689,17 @@ def external_packages(self):
|
|||||||
|
|
||||||
# Read a list of all the specs for this package
|
# Read a list of all the specs for this package
|
||||||
externals = data.get("externals", [])
|
externals = data.get("externals", [])
|
||||||
external_specs = [spack.spec.parse_with_version_concrete(x["spec"]) for x in externals]
|
candidate_specs = [
|
||||||
|
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||||
|
]
|
||||||
|
|
||||||
|
external_specs = []
|
||||||
|
if spec_filters:
|
||||||
|
for current_filter in spec_filters:
|
||||||
|
current_filter.factory = lambda: candidate_specs
|
||||||
|
external_specs.extend(current_filter.selected_specs())
|
||||||
|
else:
|
||||||
|
external_specs.extend(candidate_specs)
|
||||||
|
|
||||||
# Order the external versions to prefer more recent versions
|
# Order the external versions to prefer more recent versions
|
||||||
# even if specs in packages.yaml are not ordered that way
|
# even if specs in packages.yaml are not ordered that way
|
||||||
@@ -1872,6 +1939,11 @@ def _spec_clauses(
|
|||||||
for virtual in virtuals:
|
for virtual in virtuals:
|
||||||
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
||||||
|
|
||||||
|
# If the spec is external and concrete, we allow all the libcs on the system
|
||||||
|
if spec.external and spec.concrete and using_libc_compatibility():
|
||||||
|
for libc in self.libcs:
|
||||||
|
clauses.append(fn.attr("compatible_libc", spec.name, libc.name, libc.version))
|
||||||
|
|
||||||
# add all clauses from dependencies
|
# add all clauses from dependencies
|
||||||
if transitive:
|
if transitive:
|
||||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||||
@@ -2026,7 +2098,7 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
|
|||||||
try:
|
try:
|
||||||
with warnings.catch_warnings():
|
with warnings.catch_warnings():
|
||||||
warnings.simplefilter("ignore")
|
warnings.simplefilter("ignore")
|
||||||
target.optimization_flags(compiler_name, compiler_version)
|
target.optimization_flags(compiler_name, str(compiler_version))
|
||||||
supported.append(target)
|
supported.append(target)
|
||||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||||
continue
|
continue
|
||||||
@@ -2303,7 +2375,7 @@ def register_concrete_spec(self, spec, possible):
|
|||||||
|
|
||||||
def concrete_specs(self):
|
def concrete_specs(self):
|
||||||
"""Emit facts for reusable specs"""
|
"""Emit facts for reusable specs"""
|
||||||
for h, spec in self.reusable_and_possible.items():
|
for h, spec in self.reusable_and_possible.explicit_items():
|
||||||
# this indicates that there is a spec like this installed
|
# this indicates that there is a spec like this installed
|
||||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||||
# this describes what constraints it imposes on the solve
|
# this describes what constraints it imposes on the solve
|
||||||
@@ -2368,7 +2440,7 @@ def setup(
|
|||||||
|
|
||||||
if using_libc_compatibility():
|
if using_libc_compatibility():
|
||||||
for libc in self.libcs:
|
for libc in self.libcs:
|
||||||
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
|
self.gen.fact(fn.host_libc(libc.name, libc.version))
|
||||||
|
|
||||||
if not allow_deprecated:
|
if not allow_deprecated:
|
||||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||||
@@ -2912,6 +2984,13 @@ class CompilerParser:
|
|||||||
def __init__(self, configuration) -> None:
|
def __init__(self, configuration) -> None:
|
||||||
self.compilers: Set[KnownCompiler] = set()
|
self.compilers: Set[KnownCompiler] = set()
|
||||||
for c in all_compilers_in_config(configuration):
|
for c in all_compilers_in_config(configuration):
|
||||||
|
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||||
|
tty.debug(
|
||||||
|
f"the C compiler {c.cc} does not exist, or does not run correctly."
|
||||||
|
f" The compiler {c.spec} will not be used during concretization."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
if using_libc_compatibility() and not c.default_libc:
|
if using_libc_compatibility() and not c.default_libc:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
||||||
@@ -3187,13 +3266,16 @@ class SpecBuilder:
|
|||||||
r"^.*_propagate$",
|
r"^.*_propagate$",
|
||||||
r"^.*_satisfies$",
|
r"^.*_satisfies$",
|
||||||
r"^.*_set$",
|
r"^.*_set$",
|
||||||
|
r"^compatible_libc$",
|
||||||
r"^dependency_holds$",
|
r"^dependency_holds$",
|
||||||
|
r"^external_conditions_hold$",
|
||||||
r"^node_compiler$",
|
r"^node_compiler$",
|
||||||
r"^package_hash$",
|
r"^package_hash$",
|
||||||
r"^root$",
|
r"^root$",
|
||||||
r"^track_dependencies$",
|
r"^track_dependencies$",
|
||||||
r"^variant_default_value_from_cli$",
|
r"^variant_default_value_from_cli$",
|
||||||
r"^virtual_node$",
|
r"^virtual_node$",
|
||||||
|
r"^virtual_on_incoming_edges$",
|
||||||
r"^virtual_root$",
|
r"^virtual_root$",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -3564,25 +3646,159 @@ def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class SpecFilter:
|
||||||
|
"""Given a method to produce a list of specs, this class can filter them according to
|
||||||
|
different criteria.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
factory: Callable[[], List[spack.spec.Spec]],
|
||||||
|
is_usable: Callable[[spack.spec.Spec], bool],
|
||||||
|
include: List[str],
|
||||||
|
exclude: List[str],
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
factory: factory to produce a list of specs
|
||||||
|
is_usable: predicate that takes a spec in input and returns False if the spec
|
||||||
|
should not be considered for this filter, True otherwise.
|
||||||
|
include: if present, a "good" spec must match at least one entry in the list
|
||||||
|
exclude: if present, a "good" spec must not match any entry in the list
|
||||||
|
"""
|
||||||
|
self.factory = factory
|
||||||
|
self.is_usable = is_usable
|
||||||
|
self.include = include
|
||||||
|
self.exclude = exclude
|
||||||
|
|
||||||
|
def is_selected(self, s: spack.spec.Spec) -> bool:
|
||||||
|
if not self.is_usable(s):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.include and not any(s.satisfies(c) for c in self.include):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.exclude and any(s.satisfies(c) for c in self.exclude):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def selected_specs(self) -> List[spack.spec.Spec]:
|
||||||
|
return [s for s in self.factory() if self.is_selected(s)]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||||
|
"""Constructs a filter that takes the specs from the current store."""
|
||||||
|
packages = _external_config_with_implicit_externals(configuration)
|
||||||
|
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||||
|
factory = functools.partial(_specs_from_store, configuration=configuration)
|
||||||
|
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||||
|
"""Constructs a filter that takes the specs from the configured buildcaches."""
|
||||||
|
packages = _external_config_with_implicit_externals(configuration)
|
||||||
|
is_reusable = functools.partial(_is_reusable, packages=packages, local=False)
|
||||||
|
return SpecFilter(
|
||||||
|
factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _specs_from_store(configuration):
|
||||||
|
store = spack.store.create(configuration)
|
||||||
|
with store.db.read_transaction():
|
||||||
|
return store.db.query(installed=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _specs_from_mirror():
|
||||||
|
try:
|
||||||
|
return spack.binary_distribution.update_cache_and_get_specs()
|
||||||
|
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||||
|
# this is raised when no mirrors had indices.
|
||||||
|
# TODO: update mirror configuration so it can indicate that the
|
||||||
|
# TODO: source cache (or any mirror really) doesn't have binaries.
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class ReuseStrategy(enum.Enum):
|
||||||
|
ROOTS = enum.auto()
|
||||||
|
DEPENDENCIES = enum.auto()
|
||||||
|
NONE = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
class ReusableSpecsSelector:
|
||||||
|
"""Selects specs that can be reused during concretization."""
|
||||||
|
|
||||||
|
def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||||
|
self.configuration = configuration
|
||||||
|
self.store = spack.store.create(configuration)
|
||||||
|
self.reuse_strategy = ReuseStrategy.ROOTS
|
||||||
|
|
||||||
|
reuse_yaml = self.configuration.get("concretizer:reuse", False)
|
||||||
|
self.reuse_sources = []
|
||||||
|
if not isinstance(reuse_yaml, typing.Mapping):
|
||||||
|
if reuse_yaml is False:
|
||||||
|
self.reuse_strategy = ReuseStrategy.NONE
|
||||||
|
if reuse_yaml == "dependencies":
|
||||||
|
self.reuse_strategy = ReuseStrategy.DEPENDENCIES
|
||||||
|
self.reuse_sources.extend(
|
||||||
|
[
|
||||||
|
SpecFilter.from_store(
|
||||||
|
configuration=self.configuration, include=[], exclude=[]
|
||||||
|
),
|
||||||
|
SpecFilter.from_buildcache(
|
||||||
|
configuration=self.configuration, include=[], exclude=[]
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
roots = reuse_yaml.get("roots", True)
|
||||||
|
if roots is True:
|
||||||
|
self.reuse_strategy = ReuseStrategy.ROOTS
|
||||||
|
else:
|
||||||
|
self.reuse_strategy = ReuseStrategy.DEPENDENCIES
|
||||||
|
default_include = reuse_yaml.get("include", [])
|
||||||
|
default_exclude = reuse_yaml.get("exclude", [])
|
||||||
|
default_sources = [{"type": "local"}, {"type": "buildcache"}]
|
||||||
|
for source in reuse_yaml.get("from", default_sources):
|
||||||
|
include = source.get("include", default_include)
|
||||||
|
exclude = source.get("exclude", default_exclude)
|
||||||
|
if source["type"] == "local":
|
||||||
|
self.reuse_sources.append(
|
||||||
|
SpecFilter.from_store(self.configuration, include=include, exclude=exclude)
|
||||||
|
)
|
||||||
|
elif source["type"] == "buildcache":
|
||||||
|
self.reuse_sources.append(
|
||||||
|
SpecFilter.from_buildcache(
|
||||||
|
self.configuration, include=include, exclude=exclude
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||||
|
if self.reuse_strategy == ReuseStrategy.NONE:
|
||||||
|
return []
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for reuse_source in self.reuse_sources:
|
||||||
|
result.extend(reuse_source.selected_specs())
|
||||||
|
|
||||||
|
# If we only want to reuse dependencies, remove the root specs
|
||||||
|
if self.reuse_strategy == ReuseStrategy.DEPENDENCIES:
|
||||||
|
result = [spec for spec in result if not any(root in spec for root in specs)]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class Solver:
|
class Solver:
|
||||||
"""This is the main external interface class for solving.
|
"""This is the main external interface class for solving.
|
||||||
|
|
||||||
It manages solver configuration and preferences in one place. It sets up the solve
|
It manages solver configuration and preferences in one place. It sets up the solve
|
||||||
and passes the setup method to the driver, as well.
|
and passes the setup method to the driver, as well.
|
||||||
|
|
||||||
Properties of interest:
|
|
||||||
|
|
||||||
``reuse (bool)``
|
|
||||||
Whether to try to reuse existing installs/binaries
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.driver = PyclingoDriver()
|
self.driver = PyclingoDriver()
|
||||||
|
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
||||||
# These properties are settable via spack configuration, and overridable
|
|
||||||
# by setting them directly as properties.
|
|
||||||
self.reuse = spack.config.get("concretizer:reuse", True)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_input_and_extract_concrete_specs(specs):
|
def _check_input_and_extract_concrete_specs(specs):
|
||||||
@@ -3596,39 +3812,6 @@ def _check_input_and_extract_concrete_specs(specs):
|
|||||||
spack.spec.Spec.ensure_valid_variants(s)
|
spack.spec.Spec.ensure_valid_variants(s)
|
||||||
return reusable
|
return reusable
|
||||||
|
|
||||||
def _reusable_specs(self, specs):
|
|
||||||
reusable_specs = []
|
|
||||||
if self.reuse:
|
|
||||||
packages = _external_config_with_implicit_externals(spack.config.CONFIG)
|
|
||||||
# Specs from the local Database
|
|
||||||
with spack.store.STORE.db.read_transaction():
|
|
||||||
reusable_specs.extend(
|
|
||||||
s
|
|
||||||
for s in spack.store.STORE.db.query(installed=True)
|
|
||||||
if _is_reusable(s, packages, local=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Specs from buildcaches
|
|
||||||
try:
|
|
||||||
reusable_specs.extend(
|
|
||||||
s
|
|
||||||
for s in spack.binary_distribution.update_cache_and_get_specs()
|
|
||||||
if _is_reusable(s, packages, local=False)
|
|
||||||
)
|
|
||||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
|
||||||
# this is raised when no mirrors had indices.
|
|
||||||
# TODO: update mirror configuration so it can indicate that the
|
|
||||||
# TODO: source cache (or any mirror really) doesn't have binaries.
|
|
||||||
pass
|
|
||||||
|
|
||||||
# If we only want to reuse dependencies, remove the root specs
|
|
||||||
if self.reuse == "dependencies":
|
|
||||||
reusable_specs = [
|
|
||||||
spec for spec in reusable_specs if not any(root in spec for root in specs)
|
|
||||||
]
|
|
||||||
|
|
||||||
return reusable_specs
|
|
||||||
|
|
||||||
def solve(
|
def solve(
|
||||||
self,
|
self,
|
||||||
specs,
|
specs,
|
||||||
@@ -3654,7 +3837,7 @@ def solve(
|
|||||||
# Check upfront that the variants are admissible
|
# Check upfront that the variants are admissible
|
||||||
specs = [s.lookup_hash() for s in specs]
|
specs = [s.lookup_hash() for s in specs]
|
||||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||||
reusable_specs.extend(self._reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests)
|
||||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||||
result, _, _ = self.driver.solve(
|
result, _, _ = self.driver.solve(
|
||||||
@@ -3683,7 +3866,7 @@ def solve_in_rounds(
|
|||||||
"""
|
"""
|
||||||
specs = [s.lookup_hash() for s in specs]
|
specs = [s.lookup_hash() for s in specs]
|
||||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||||
reusable_specs.extend(self._reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests)
|
||||||
|
|
||||||
# Tell clingo that we don't have to solve all the inputs at once
|
# Tell clingo that we don't have to solve all the inputs at once
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user