Compare commits
230 Commits
v0.22.3
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
72deb53832 | ||
![]() |
7c87253fd8 | ||
![]() |
1136aedd08 | ||
![]() |
24e1b56268 | ||
![]() |
eef6a79b35 | ||
![]() |
556a36cbd7 | ||
![]() |
8aa490d6b7 | ||
![]() |
d9d085da10 | ||
![]() |
d88d720577 | ||
![]() |
1d670ae744 | ||
![]() |
35ad6f52c1 | ||
![]() |
b61bae7640 | ||
![]() |
8b7abace8b | ||
![]() |
5cf98d9564 | ||
![]() |
973a961cb5 | ||
![]() |
868d0cb957 | ||
![]() |
497f3a3832 | ||
![]() |
9843f41bce | ||
![]() |
e54fefc2b7 | ||
![]() |
90c0889533 | ||
![]() |
6696e82ce7 | ||
![]() |
dcc55d53db | ||
![]() |
92000e81b8 | ||
![]() |
125175ae25 | ||
![]() |
f60e548a0d | ||
![]() |
04dc16a6b1 | ||
![]() |
27b90e38db | ||
![]() |
7e5ce3ba48 | ||
![]() |
f5f7cfdc8f | ||
![]() |
3e1a562312 | ||
![]() |
ce4d962faa | ||
![]() |
b9816a97fc | ||
![]() |
f7b9c30456 | ||
![]() |
884620a38a | ||
![]() |
7503a41773 | ||
![]() |
9a5fc6b4a3 | ||
![]() |
a31aeed167 | ||
![]() |
71f542a951 | ||
![]() |
322bd48788 | ||
![]() |
b752fa59d4 | ||
![]() |
d53e4cc426 | ||
![]() |
ee4b7fa3a1 | ||
![]() |
d6f02c86d9 | ||
![]() |
62efde8e3c | ||
![]() |
bda1d94d49 | ||
![]() |
3f472039c5 | ||
![]() |
912ef34206 | ||
![]() |
9c88a48a73 | ||
![]() |
4bf5cc9a9a | ||
![]() |
08834e2b03 | ||
![]() |
8020a111df | ||
![]() |
86fb547f7c | ||
![]() |
b9556c7c44 | ||
![]() |
7bdb106b1b | ||
![]() |
2b191cd7f4 | ||
![]() |
774f0a4e60 | ||
![]() |
faf11efa72 | ||
![]() |
5a99142b41 | ||
![]() |
a3aca0242a | ||
![]() |
72f276fab3 | ||
![]() |
21139945df | ||
![]() |
900bd2f477 | ||
![]() |
29d4a5af44 | ||
![]() |
dd9b7ed6a7 | ||
![]() |
09ff74be62 | ||
![]() |
a94ebfea11 | ||
![]() |
8f5fe1d123 | ||
![]() |
d4fb58efa3 | ||
![]() |
ce900346cc | ||
![]() |
7cb64e465f | ||
![]() |
eb70c9f5b9 | ||
![]() |
a28405700e | ||
![]() |
f8f4d94d7a | ||
![]() |
32dfb522d6 | ||
![]() |
c61c707aa5 | ||
![]() |
60d10848c8 | ||
![]() |
dcd6b530f9 | ||
![]() |
419f0742a0 | ||
![]() |
c99174798b | ||
![]() |
8df2a4b511 | ||
![]() |
c174cf6830 | ||
![]() |
5eebd65366 | ||
![]() |
625f5323c0 | ||
![]() |
e05a32cead | ||
![]() |
c69af5d1e5 | ||
![]() |
1ac2ee8043 | ||
![]() |
36af1c1c73 | ||
![]() |
e2fa087002 | ||
![]() |
df02bfbad2 | ||
![]() |
fecb63843e | ||
![]() |
b33e2d09d3 | ||
![]() |
f8054aa21a | ||
![]() |
8f3a2acc54 | ||
![]() |
d1a20908b8 | ||
![]() |
dd781f7368 | ||
![]() |
9bcc43c4c1 | ||
![]() |
77c83af17d | ||
![]() |
574bd2db99 | ||
![]() |
a76f37da96 | ||
![]() |
9e75f3ec0a | ||
![]() |
4d42d45897 | ||
![]() |
a4b4bfda73 | ||
![]() |
1bcdd3a57e | ||
![]() |
297a3a1bc9 | ||
![]() |
8d01e8c978 | ||
![]() |
6be28aa303 | ||
![]() |
5e38310515 | ||
![]() |
ddfed65485 | ||
![]() |
2a16d8bfa8 | ||
![]() |
6a40a50a29 | ||
![]() |
b2924f68c0 | ||
![]() |
41ffe36636 | ||
![]() |
24edc72252 | ||
![]() |
83b38a26a0 | ||
![]() |
914d785e3b | ||
![]() |
f99f642fa8 | ||
![]() |
e0bf3667e3 | ||
![]() |
a24ca50fed | ||
![]() |
51e9f37252 | ||
![]() |
453900c884 | ||
![]() |
4696459d2d | ||
![]() |
ad1e3231e5 | ||
![]() |
2ef7eb1826 | ||
![]() |
fe86019f9a | ||
![]() |
9dbb18219f | ||
![]() |
451a977de0 | ||
![]() |
e604929a4c | ||
![]() |
9d591f9f7c | ||
![]() |
f8ad915100 | ||
![]() |
cbbabe6920 | ||
![]() |
81fe460194 | ||
![]() |
b894f996c0 | ||
![]() |
1ce09847d9 | ||
![]() |
722d401394 | ||
![]() |
e6f04d5ef9 | ||
![]() |
b8e3ecbf00 | ||
![]() |
d189387c24 | ||
![]() |
9e96ddc5ae | ||
![]() |
543bd189af | ||
![]() |
43291aa723 | ||
![]() |
d0589285f7 | ||
![]() |
d079aaa083 | ||
![]() |
6c65977e0d | ||
![]() |
1b5d786cf5 | ||
![]() |
4cf00645bd | ||
![]() |
e9149cfc3c | ||
![]() |
a5c8111076 | ||
![]() |
c3576f712d | ||
![]() |
410e6a59b7 | ||
![]() |
bd2b2fb75a | ||
![]() |
7ae318efd0 | ||
![]() |
73e9d56647 | ||
![]() |
f87a752b63 | ||
![]() |
ae2fec30c3 | ||
![]() |
1af5564cbe | ||
![]() |
a8f057a701 | ||
![]() |
7f3dd38ccc | ||
![]() |
8e9adefcd5 | ||
![]() |
d276f9700f | ||
![]() |
4f111659ec | ||
![]() |
eaf330f2a8 | ||
![]() |
cdaeb74dc7 | ||
![]() |
fbaac46604 | ||
![]() |
7f6210ee90 | ||
![]() |
63f6e6079a | ||
![]() |
d4fd6caae0 | ||
![]() |
fd3c18b6fd | ||
![]() |
725f427f25 | ||
![]() |
32b3e91ef7 | ||
![]() |
b7e4602268 | ||
![]() |
4a98d4db93 | ||
![]() |
9d6bf373be | ||
![]() |
cff35c4987 | ||
![]() |
d594f84b8f | ||
![]() |
f8f01c336c | ||
![]() |
12e3665df3 | ||
![]() |
fa4778b9fc | ||
![]() |
66d297d420 | ||
![]() |
56251c11f3 | ||
![]() |
40bf9a179b | ||
![]() |
095aba0b9f | ||
![]() |
4270136598 | ||
![]() |
f73d7d2dce | ||
![]() |
567566da08 | ||
![]() |
30a9ab749d | ||
![]() |
8160a96b27 | ||
![]() |
10414d3e6c | ||
![]() |
1d96c09094 | ||
![]() |
e7112fbc6a | ||
![]() |
b79761b7eb | ||
![]() |
3381899c69 | ||
![]() |
c7cf5eabc1 | ||
![]() |
d88fa5cf8e | ||
![]() |
2ed0e3d737 | ||
![]() |
506a40cac1 | ||
![]() |
447739fcef | ||
![]() |
e60f6f4a6e | ||
![]() |
7df35d0da0 | ||
![]() |
71b035ece1 | ||
![]() |
86a134235e | ||
![]() |
24cd0da7fb | ||
![]() |
762833a663 | ||
![]() |
636d479e5f | ||
![]() |
f2184f26fa | ||
![]() |
e1686eef7c | ||
![]() |
314893982e | ||
![]() |
9ab6c30a3d | ||
![]() |
ddf94291d4 | ||
![]() |
5d1038c512 | ||
![]() |
2e40c88d50 | ||
![]() |
2bcba57757 | ||
![]() |
37330e5e2b | ||
![]() |
b4411cf2db | ||
![]() |
65d1ae083c | ||
![]() |
0b8faa3918 | ||
![]() |
f077c7e33b | ||
![]() |
9d7410d22e | ||
![]() |
e295730d0e | ||
![]() |
868327ee14 | ||
![]() |
f5430b16bc | ||
![]() |
2446695113 | ||
![]() |
e0c6cca65c | ||
![]() |
84ed4cd331 | ||
![]() |
f6d50f790e | ||
![]() |
d3c3d23d1e | ||
![]() |
33752c2b55 | ||
![]() |
26759249ca | ||
![]() |
8b4cbbe7b3 | ||
![]() |
be71f9fdc4 | ||
![]() |
05c1e7ecc2 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
8
.github/workflows/bootstrap.yml
vendored
8
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
types-six==1.16.21.20240513
|
||||
vermin==1.6.0
|
||||
|
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -141,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -195,10 +195,10 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-14]
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,3 +1,48 @@
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||
- OCI buildcaches:
|
||||
- only push in parallel when forking (#42143)
|
||||
- use pickleable errors (#42160)
|
||||
- Fix using sticky variants in externals (#42253)
|
||||
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||
|
||||
## Package updates
|
||||
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||
- py-transformers: add v4.35.2 (#41266)
|
||||
- mgard: fix OpenMP on AppleClang (#42933)
|
||||
|
||||
# v0.21.1 (2024-01-11)
|
||||
|
||||
## New features
|
||||
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
- Improve the error message for deprecated preferences (#41075)
|
||||
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||
- Fix multi-word aliases (#41126)
|
||||
- Add a warning for unconfigured compiler (#41213)
|
||||
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||
|
||||
## Package updates
|
||||
- root: add a webgui patch to address security issue (#41404)
|
||||
- BerkeleyGW: update source urls (#38218)
|
||||
|
||||
# v0.21.0 (2023-11-11)
|
||||
|
||||
`v0.21.0` is a major feature release.
|
||||
|
@@ -32,7 +32,7 @@
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
|
||||
|
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
exit $LASTEXITCODE
|
||||
|
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -38,10 +38,9 @@ packages:
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
|
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AutotoolsBuilder(AutotoolsBuilder):
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
@@ -11,7 +11,8 @@ Chaining Spack Installations
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
you can add it as an entry to ``upstreams.yaml``:
|
||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
:ref:`configuration-scopes`:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
||||
install_tree: /path/to/another/spack/opt/spack
|
||||
|
||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||
Spack base directory.
|
||||
Spack base directory, or the location of the ``install_tree`` defined
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
|
||||
Once the upstream Spack instance has been added, ``spack find`` will
|
||||
automatically check the upstream instance when querying installed packages,
|
||||
|
@@ -460,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
|
||||
.. _environment_include_concrete:
|
||||
|
||||
------------------------------
|
||||
Included Concrete Environments
|
||||
------------------------------
|
||||
|
||||
Spack environments can create an environment based off of information in already
|
||||
established environments. You can think of it as a combination of existing
|
||||
environments. It will gather information from the existing environment's
|
||||
``spack.lock`` and use that during the creation of this included concrete
|
||||
environment. When an included concrete environment is created it will generate
|
||||
a ``spack.lock`` file for the newly created environment.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating included environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To create a combined concrete environment, you must have at least one existing
|
||||
concrete environment. You will use the command ``spack env create`` with the
|
||||
argument ``--include-concrete`` followed by the name or path of the environment
|
||||
you'd like to include. Here is an example of how to create a combined environment
|
||||
from the command line.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
get the concrete specs from the included environments.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating an included environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If changes were made to the base environment and you want that reflected in the
|
||||
included environment you will need to reconcretize both the base environment and the
|
||||
included environment for the change to be implemented. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
Here we see that ``included_env`` has access to the python package through
|
||||
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||
``included_env`` will not be able to access the new information.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add perl
|
||||
$ spack -e myenv concretize
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
It isn't until you run the ``spack concretize`` command that the combined
|
||||
environment will get the updated information from the reconcretized base environmennt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e included_env concretize
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
@@ -811,6 +930,7 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
@@ -1044,7 +1164,7 @@ other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
@@ -1137,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
@@ -1159,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
||||
variable. Assuming we have an active and concrete environment, we generate the
|
||||
associated ``Makefile`` with a prefix ``example``:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o env.mk --make-prefix example
|
||||
|
||||
|
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
@@ -4,9 +4,9 @@ sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
pytest==8.2.1
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
|
@@ -98,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
def sanitize_win_longpath(path: str) -> str:
|
||||
"""Strip Windows extended path prefix from strings
|
||||
Returns sanitized string.
|
||||
no-op if extended path prefix is not present"""
|
||||
return path.lstrip("\\\\?\\")
|
||||
|
@@ -187,12 +187,18 @@ def polite_filename(filename: str) -> str:
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
def getuid() -> Union[str, int]:
|
||||
"""Returns os getuid on non Windows
|
||||
On Windows returns 0 for admin users, login string otherwise
|
||||
This is in line with behavior from get_owner_uid which
|
||||
always returns the login string on Windows
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
# If not admin, use the string name of the login as a unique ID
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return 1
|
||||
return os.getlogin()
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
@@ -213,6 +219,15 @@ def _win_rename(src, dst):
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def msdos_escape_parens(path):
|
||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||
if sys.platform == "win32":
|
||||
return path.replace("(", "^(").replace(")", "^)")
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
@@ -553,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||
"""Returns owner UID of path destination
|
||||
On non Windows this is the value of st_uid
|
||||
On Windows this is the login string associated with the
|
||||
owning user.
|
||||
|
||||
"""
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
@@ -822,7 +843,7 @@ def copy_tree(
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
target = readlink(s)
|
||||
if os.path.isabs(target):
|
||||
|
||||
def escaped_path(path):
|
||||
@@ -2429,9 +2450,10 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
else:
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
new_pth = pathlib.Path(pth)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2509,8 +2531,14 @@ def establish_link(self):
|
||||
|
||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||
# install a symlink to each dependent library
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
# do not rpath for system libraries included in the dag
|
||||
# we should not be modifying libraries managed by the Windows system
|
||||
# as this will negatively impact linker behavior and can result in permission
|
||||
# errors if those system libs are not modifiable by Spack
|
||||
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import system_path_filter
|
||||
from ..path import sanitize_win_longpath, system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
err_str = err.decode()
|
||||
tty.error(err_str)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
def readlink(path: str, *, dir_fd=None):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.0.dev0"
|
||||
__version__ = "0.23.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
github_pull_commits_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
if re.match(github_pull_commits_re, patch.url):
|
||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ "must not be a pull request commit; "
|
||||
+ f"instead use {url}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ f"must end with {full_index_arg}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
|
@@ -29,6 +29,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
@@ -658,7 +659,7 @@ def get_buildfile_manifest(spec):
|
||||
# 2. paths are used as strings.
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = os.readlink(abs_path)
|
||||
link = readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
@@ -2001,6 +2002,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
@@ -43,7 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Set, Tuple
|
||||
from typing import Dict, List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -730,12 +730,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
|
||||
def get_rpath_deps(pkg):
|
||||
"""Return immediate or transitive RPATHs depending on the package."""
|
||||
if pkg.transitive_rpaths:
|
||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
||||
else:
|
||||
return pkg.spec.dependencies(deptype="link")
|
||||
def _get_rpath_deps_from_spec(
|
||||
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||
) -> List[spack.spec.Spec]:
|
||||
if not transitive_rpaths:
|
||||
return spec.dependencies(deptype=dt.LINK)
|
||||
|
||||
by_name: Dict[str, spack.spec.Spec] = {}
|
||||
|
||||
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||
lookup = by_name.get(dep.name)
|
||||
if lookup is None:
|
||||
by_name[dep.name] = dep
|
||||
elif lookup.version < dep.version:
|
||||
by_name[dep.name] = dep
|
||||
|
||||
return list(by_name.values())
|
||||
|
||||
|
||||
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
|
@@ -137,11 +137,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
||||
"^mpich@2:" in spec_root
|
||||
or "^cray-mpich" in spec_root
|
||||
or "^mvapich2" in spec_root
|
||||
or "^mvapich" in spec_root
|
||||
or "^intel-mpi" in spec_root
|
||||
or "^intel-oneapi-mpi" in spec_root
|
||||
or "^intel-parallel-studio" in spec_root
|
||||
|
@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
||||
opts += self.nmake_install_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
|
@@ -120,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
def view_file_conflicts(self, view, merge_map):
|
||||
"""Report all file conflicts, excepting special cases for python.
|
||||
Specifically, this does not report errors for duplicate
|
||||
@@ -146,8 +140,12 @@ def view_file_conflicts(self, view, merge_map):
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||
# view.
|
||||
python = self.python_spec
|
||||
if not self.extendee_spec or python.external:
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
|
||||
if python.external:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
@@ -368,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
|
@@ -44,6 +44,7 @@
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
@@ -683,6 +684,22 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
for posix separators only if copy_only_pipeline is enabled
|
||||
|
||||
This is required as copy_only_pipelines are a unique scenario where
|
||||
the generate job and child pipelines are run on different platforms.
|
||||
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||
that will be consumed on by the Posix copy job runner.
|
||||
|
||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||
style paths
|
||||
"""
|
||||
if copy_only_pipeline and path:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
@@ -806,7 +823,7 @@ def generate_gitlab_ci_yaml(
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
@@ -1227,6 +1244,9 @@ def main_script_replacements(cmd):
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
output_vars = output_object["variables"]
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
@@ -1283,7 +1303,6 @@ def main_script_replacements(cmd):
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||
@@ -1478,6 +1497,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
if IS_WINDOWS:
|
||||
quote_str = f'"{quote_str}"'
|
||||
return quote_str
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
@@ -1500,7 +1525,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
@@ -1942,9 +1967,9 @@ def compose_command_err_handling(args):
|
||||
# but we need to handle EXEs (git, etc) ourselves
|
||||
catch_exe_failure = (
|
||||
"""
|
||||
if ($LASTEXITCODE -ne 0){
|
||||
throw "Command {} has failed"
|
||||
}
|
||||
if ($LASTEXITCODE -ne 0){{
|
||||
throw 'Command {} has failed'
|
||||
}}
|
||||
"""
|
||||
if IS_WINDOWS
|
||||
else ""
|
||||
@@ -2176,13 +2201,13 @@ def __init__(self, ci_cdash):
|
||||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
self.upload_url,
|
||||
win_quote(self.upload_url),
|
||||
"--cdash-build",
|
||||
self.build_name,
|
||||
win_quote(self.build_name),
|
||||
"--cdash-site",
|
||||
self.site,
|
||||
win_quote(self.site),
|
||||
"--cdash-buildstamp",
|
||||
self.build_stamp,
|
||||
win_quote(self.build_stamp),
|
||||
]
|
||||
|
||||
@property # type: ignore
|
||||
@@ -2248,7 +2273,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
@@ -2294,7 +2319,7 @@ def populate_buildgroup(self, job_names):
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
|
@@ -13,7 +13,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -54,6 +53,7 @@
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
@@ -856,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
request = urllib.request.Request(url=image_ref.tags_url())
|
||||
response = spack.oci.opener.urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
|
@@ -31,7 +31,6 @@
|
||||
level = "long"
|
||||
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
@@ -40,6 +39,12 @@ def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def unicode_escape(path: str) -> str:
|
||||
"""Returns transformed path with any unicode
|
||||
characters replaced with their corresponding escapes"""
|
||||
return path.encode("unicode-escape").decode("utf-8")
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -551,75 +556,35 @@ def ci_rebuild(args):
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||
|
||||
config = cfg.get("config")
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = []
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
]
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
"Makefile",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash, # limit to spec we're building
|
||||
],
|
||||
[
|
||||
# --output-sync requires GNU make 4.x.
|
||||
# Old make errors when you pass it a flag it doesn't recognize,
|
||||
# but it doesn't error or warn when you set unrecognized flags in
|
||||
# this variable.
|
||||
"export",
|
||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||
],
|
||||
[
|
||||
MAKE_COMMAND,
|
||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||
spack_cmd + deps_install_args + [slash_hash],
|
||||
spack_cmd + root_install_args + [slash_hash],
|
||||
]
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||
|
||||
|
@@ -106,7 +106,8 @@ def clean(parser, args):
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -87,6 +87,9 @@ def env_create_setup_parser(subparser):
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
@@ -104,12 +107,17 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
include_concrete = None
|
||||
if hasattr(args, "include_concrete"):
|
||||
include_concrete = args.include_concrete
|
||||
|
||||
env = _env_create(
|
||||
args.env_name,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
@@ -123,31 +131,43 @@ def _env_create(
|
||||
dir: bool = False,
|
||||
with_view: Optional[str] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
name_or_path: name of the environment to create, or path to it
|
||||
init_file: optional initialization file -- can be a JSON lockfile (*.lock, *.json) or YAML
|
||||
manifest file
|
||||
dir: if True, create an environment in a directory instead of a managed environment
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they may be made absolute if the new environment is in a different location
|
||||
name_or_path (str): name of the environment to create, or path to it
|
||||
init_file (str or file): optional initialization file -- can be
|
||||
a JSON lockfile (*.lock, *.json) or YAML manifest file
|
||||
dir (bool): if True, create an environment in a directory instead
|
||||
of a named environment
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
include_concrete (list): list of the included concrete environments
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg(
|
||||
colorize(
|
||||
f"Created environment @c{{{cescape(env.name)}}} in: @c{{{cescape(env.path)}}}"
|
||||
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg(colorize(f"Created anonymous environment in: @c{{{cescape(env.path)}}}"))
|
||||
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||
return env
|
||||
|
||||
@@ -434,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
@@ -443,13 +469,35 @@ def env_remove(args):
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
for env_name in args.rm_env:
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
bad_envs.append(env_name)
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if name == env_name:
|
||||
continue
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -271,6 +272,27 @@ def root_decorator(spec, string):
|
||||
|
||||
print()
|
||||
|
||||
if env.included_concrete_envs:
|
||||
tty.msg("Included specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
root_args = copy.copy(args)
|
||||
root_args.paths = False
|
||||
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
cmd.display_specs(
|
||||
env.included_user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
|
@@ -23,7 +23,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.21"
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -151,7 +151,8 @@ def is_installed(spec):
|
||||
key=lambda s: s.dag_hash(),
|
||||
)
|
||||
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
print()
|
||||
tty.info("The following environments still reference these specs:")
|
||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||
if env:
|
||||
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||
msgs.append("use `spack env remove` to remove environments")
|
||||
msgs.append("use `spack uninstall --force` to override")
|
||||
print()
|
||||
|
@@ -220,10 +220,10 @@ def _compiler_config_from_external(config):
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
|
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
||||
|
||||
openmp_flag = "-fopenmp"
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("3.3"):
|
||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
||||
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("12.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||
elif self.real_version < Version("17.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
return "-std=c2x"
|
||||
elif self.real_version < Version("18.0"):
|
||||
return "-std=c2x"
|
||||
else:
|
||||
return "-std=c23"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
|
@@ -15,6 +15,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.config
|
||||
import spack.hash_types as ht
|
||||
@@ -181,7 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
base_dir = (
|
||||
self.path_for_spec(deprecator_spec)
|
||||
if deprecator_spec
|
||||
else os.readlink(deprecated_spec.prefix)
|
||||
else readlink(deprecated_spec.prefix)
|
||||
)
|
||||
|
||||
yaml_path = os.path.join(
|
||||
|
@@ -34,6 +34,9 @@
|
||||
* ``spec``: a string representation of the abstract spec that was concretized
|
||||
|
||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||
and concrete specs from the included environments, keyed by the path to that
|
||||
environment
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
@@ -50,26 +53,37 @@
|
||||
- ``v2``
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
- ``v5``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.22:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
|
||||
Version 1
|
||||
---------
|
||||
@@ -334,6 +348,118 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 5
|
||||
---------
|
||||
|
||||
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||
environments, which are keyed by the path to that environment. Since this is optional
|
||||
if the environment does not have any included environments ``include_concrete`` will
|
||||
not be a part of the lockfile.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 5,
|
||||
"specfile-version": 3
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
"include_concrete": {
|
||||
"<path to environment>": {
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
|
@@ -16,13 +16,13 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -159,6 +159,8 @@ def default_manifest_yaml():
|
||||
default_view_name = "default"
|
||||
# Default behavior to link all packages into views (vs. only root packages)
|
||||
default_view_link = "all"
|
||||
# The name for any included concrete specs
|
||||
included_concrete_name = "include_concrete"
|
||||
|
||||
|
||||
def installed_specs():
|
||||
@@ -293,6 +295,7 @@ def create(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create a managed environment in Spack and returns it.
|
||||
|
||||
@@ -309,10 +312,15 @@ def create(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: list of concrete environment names/paths to be included
|
||||
"""
|
||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||
return create_in_dir(
|
||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
environment_dir,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
|
||||
@@ -321,6 +329,7 @@ def create_in_dir(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create an environment in the directory passed as input and returns it.
|
||||
|
||||
@@ -334,6 +343,7 @@ def create_in_dir(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: concrete environment names/paths to be included
|
||||
"""
|
||||
initialize_environment_dir(root, envfile=init_file)
|
||||
|
||||
@@ -346,6 +356,12 @@ def create_in_dir(
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if include_concrete is not None:
|
||||
set_included_envs_to_env_paths(include_concrete)
|
||||
validate_included_envs_exists(include_concrete)
|
||||
validate_included_envs_concrete(include_concrete)
|
||||
manifest.set_include_concrete(include_concrete)
|
||||
|
||||
manifest.flush()
|
||||
|
||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||
@@ -419,6 +435,67 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||
"""If the included environment(s) is the environment name
|
||||
it is replaced by the path to the environment
|
||||
|
||||
Args:
|
||||
include_concrete: list of env name or path to env"""
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if is_env_dir(env_name):
|
||||
include_concrete[i] = env_name
|
||||
elif exists(env_name):
|
||||
include_concrete[i] = root(env_name)
|
||||
|
||||
|
||||
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments exist
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments do not exist
|
||||
"""
|
||||
|
||||
missing_envs = set()
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if not is_env_dir(env_name):
|
||||
missing_envs.add(env_name)
|
||||
|
||||
if missing_envs:
|
||||
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments are concrete
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments are not concrete
|
||||
"""
|
||||
|
||||
non_concrete_envs = set()
|
||||
|
||||
for env_path in include_concrete:
|
||||
if not os.path.exists(Environment(env_path).lock_path):
|
||||
non_concrete_envs.add(Environment(env_path).name)
|
||||
|
||||
if non_concrete_envs:
|
||||
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||
", ".join(non_concrete_envs)
|
||||
)
|
||||
for env in non_concrete_envs:
|
||||
msg += f"\n\t`spack -e {env} concretize`"
|
||||
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -585,7 +662,7 @@ def _current_root(self):
|
||||
if not os.path.islink(self.root):
|
||||
return None
|
||||
|
||||
root = os.readlink(self.root)
|
||||
root = readlink(self.root)
|
||||
if os.path.isabs(root):
|
||||
return root
|
||||
|
||||
@@ -821,6 +898,18 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.specs_by_hash: Dict[str, Spec] = {}
|
||||
#: Repository for this environment (memoized)
|
||||
self._repo = None
|
||||
|
||||
#: Environment paths for concrete (lockfile) included environments
|
||||
self.included_concrete_envs: List[str] = []
|
||||
#: First-level included concretized spec data from/to the lockfile.
|
||||
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||
#: User specs from included environments from the last concretization
|
||||
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||
#: Roots from included environments with the last concretization, in order
|
||||
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||
#: Concretized specs by hash from the included environments
|
||||
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||
|
||||
#: Previously active environment
|
||||
self._previous_active = None
|
||||
self._dev_specs = None
|
||||
@@ -858,7 +947,7 @@ def _read(self):
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
read_lock_version = self._read_lockfile(f)
|
||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||
|
||||
if read_lock_version == 1:
|
||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||
@@ -926,6 +1015,20 @@ def add_view(name, values):
|
||||
if self.views == dict():
|
||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||
|
||||
def _process_concrete_includes(self):
|
||||
"""Extract and load into memory included concrete spec data."""
|
||||
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
data = self._read_lockfile(f)
|
||||
|
||||
if included_concrete_name in data:
|
||||
self.included_concrete_spec_data = data[included_concrete_name]
|
||||
else:
|
||||
self.include_concrete_envs()
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
@@ -942,6 +1045,31 @@ def _construct_state_from_manifest(self):
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_user_specs = self.concretized_user_specs[:]
|
||||
for included_specs in self.included_concretized_user_specs.values():
|
||||
for included in included_specs:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_user_specs:
|
||||
concretized_user_specs.append(included)
|
||||
|
||||
return concretized_user_specs
|
||||
|
||||
def all_concretized_orders(self) -> List[str]:
|
||||
"""Returns all of the concretized order of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_order = self.concretized_order[:]
|
||||
for included_concretized_order in self.included_concretized_order.values():
|
||||
for included in included_concretized_order:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_order:
|
||||
concretized_order.append(included)
|
||||
|
||||
return concretized_order
|
||||
|
||||
@property
|
||||
def user_specs(self):
|
||||
@@ -966,6 +1094,26 @@ def _read_dev_specs(self):
|
||||
dev_specs[name] = local_entry
|
||||
return dev_specs
|
||||
|
||||
@property
|
||||
def included_user_specs(self) -> SpecList:
|
||||
"""Included concrete user (or root) specs from last concretization."""
|
||||
spec_list = SpecList()
|
||||
|
||||
if not self.included_concrete_envs:
|
||||
return spec_list
|
||||
|
||||
def add_root_specs(included_concrete_specs):
|
||||
# add specs from the include *and* any nested includes it may have
|
||||
for env, info in included_concrete_specs.items():
|
||||
for root_list in info["roots"]:
|
||||
spec_list.add(root_list["spec"])
|
||||
|
||||
if "include_concrete" in info:
|
||||
add_root_specs(info["include_concrete"])
|
||||
|
||||
add_root_specs(self.included_concrete_spec_data)
|
||||
return spec_list
|
||||
|
||||
def clear(self, re_read=False):
|
||||
"""Clear the contents of the environment
|
||||
|
||||
@@ -977,9 +1125,15 @@ def clear(self, re_read=False):
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self._dev_specs = {}
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.specs_by_hash = {} # concretized specs by hash
|
||||
|
||||
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||
|
||||
self.invalidate_repository_cache()
|
||||
self._previous_active = None # previously active environment
|
||||
if not re_read:
|
||||
@@ -1033,6 +1187,55 @@ def scope_name(self):
|
||||
"""Name of the config scope of this environment's manifest file."""
|
||||
return self.manifest.scope_name
|
||||
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
|
||||
for env_path in self.included_concrete_envs:
|
||||
# Check that environment exists
|
||||
if not is_env_dir(env_path):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
|
||||
# Copy unique root specs from env
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
@@ -1232,6 +1435,10 @@ def concretize(self, force=False, tests=False):
|
||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||
self.deconcretize(spec, concrete=False)
|
||||
|
||||
# If a combined env, check updated spec is in the linked envs
|
||||
if self.included_concrete_envs:
|
||||
self.include_concrete_envs()
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == "when_possible":
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
@@ -1704,8 +1911,14 @@ def _partition_roots_by_install_status(self):
|
||||
of per spec."""
|
||||
installed, uninstalled = [], []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
for concretized_hash in self.all_concretized_orders():
|
||||
if concretized_hash in self.specs_by_hash:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||
break
|
||||
if not spec.installed or (
|
||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||
):
|
||||
@@ -1785,8 +1998,14 @@ def added_specs(self):
|
||||
|
||||
def concretized_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||
if h in self.specs_by_hash:
|
||||
yield (s, self.specs_by_hash[h])
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if h in self.included_specs_by_hash[env_path]:
|
||||
yield (s, self.included_specs_by_hash[env_path][h])
|
||||
break
|
||||
|
||||
def concrete_roots(self):
|
||||
"""Same as concretized_specs, except it returns the list of concrete
|
||||
@@ -1915,8 +2134,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
If these specs appear under different user_specs, only one copy
|
||||
is added to the list returned.
|
||||
"""
|
||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
|
||||
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||
if recurse_dependencies:
|
||||
specs.extend(
|
||||
traverse.traverse_nodes(
|
||||
@@ -1961,31 +2179,76 @@ def _to_lockfile_dict(self):
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
|
||||
if self.included_concrete_envs:
|
||||
data[included_concrete_name] = self.included_concrete_spec_data
|
||||
|
||||
return data
|
||||
|
||||
def _read_lockfile(self, file_or_json):
|
||||
"""Read a lockfile from a file or from a raw string."""
|
||||
lockfile_dict = sjson.load(file_or_json)
|
||||
self._read_lockfile_dict(lockfile_dict)
|
||||
return lockfile_dict["_meta"]["lockfile-version"]
|
||||
return lockfile_dict
|
||||
|
||||
def set_included_concretized_user_specs(
|
||||
self,
|
||||
env_name: str,
|
||||
env_info: Dict[str, Dict[str, Any]],
|
||||
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""Sets all of the concretized user specs from included environments
|
||||
to include those from nested included environments.
|
||||
|
||||
Args:
|
||||
env_name: the name (technically the path) of the included environment
|
||||
env_info: included concrete environment data
|
||||
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||
|
||||
Returns: updated specs_by_hash
|
||||
"""
|
||||
self.included_concretized_order[env_name] = []
|
||||
self.included_concretized_user_specs[env_name] = []
|
||||
|
||||
def add_specs(name, info, specs_by_hash):
|
||||
# Add specs from the environment as well as any of its nested
|
||||
# environments.
|
||||
for root_info in info["roots"]:
|
||||
self.included_concretized_order[name].append(root_info["hash"])
|
||||
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||
if "concrete_specs" in info:
|
||||
specs_by_hash.update(info["concrete_specs"])
|
||||
|
||||
if included_concrete_name in info:
|
||||
for included_name, included_info in info[included_concrete_name].items():
|
||||
if included_name not in self.included_concretized_order:
|
||||
self.included_concretized_order[included_name] = []
|
||||
self.included_concretized_user_specs[included_name] = []
|
||||
add_specs(included_name, included_info, specs_by_hash)
|
||||
|
||||
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||
return included_json_specs_by_hash
|
||||
|
||||
def _read_lockfile_dict(self, d):
|
||||
"""Read a lockfile dictionary into this environment."""
|
||||
self.specs_by_hash = {}
|
||||
self.included_specs_by_hash = {}
|
||||
self.included_concretized_user_specs = {}
|
||||
self.included_concretized_order = {}
|
||||
|
||||
roots = d["roots"]
|
||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||
self.concretized_order = [r["hash"] for r in roots]
|
||||
json_specs_by_hash = d["concrete_specs"]
|
||||
included_json_specs_by_hash = {}
|
||||
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
if included_concrete_name in d:
|
||||
for env_name, env_info in d[included_concrete_name].items():
|
||||
included_json_specs_by_hash.update(
|
||||
self.set_included_concretized_user_specs(
|
||||
env_name, env_info, included_json_specs_by_hash
|
||||
)
|
||||
)
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||
try:
|
||||
reader = READER_CLS[current_lockfile_format]
|
||||
@@ -1998,6 +2261,39 @@ def _read_lockfile_dict(self, d):
|
||||
msg += " You need to use a newer Spack version."
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
first_seen, self.concretized_order = self.filter_specs(
|
||||
reader, json_specs_by_hash, self.concretized_order
|
||||
)
|
||||
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
if any(self.included_concretized_order.values()):
|
||||
first_seen = {}
|
||||
|
||||
for env_name, concretized_order in self.included_concretized_order.items():
|
||||
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||
reader, included_json_specs_by_hash, concretized_order
|
||||
)
|
||||
first_seen.update(filtered_spec)
|
||||
|
||||
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||
self.included_specs_by_hash[env_path] = {}
|
||||
for spec_dag_hash in spec_hashes:
|
||||
self.included_specs_by_hash[env_path].update(
|
||||
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||
)
|
||||
|
||||
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = reader.from_node_dict(node_dict)
|
||||
@@ -2020,7 +2316,8 @@ def _read_lockfile_dict(self, d):
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||
# one-to-many.
|
||||
for lockfile_key in self.concretized_order:
|
||||
|
||||
for lockfile_key in order_concretized:
|
||||
for s in specs_by_hash[lockfile_key].traverse():
|
||||
if s.dag_hash() not in first_seen:
|
||||
first_seen[s.dag_hash()] = s
|
||||
@@ -2028,12 +2325,10 @@ def _read_lockfile_dict(self, d):
|
||||
# Now make sure concretized_order and our internal specs dict
|
||||
# contains the keys used by modern spack (i.e. the dag_hash
|
||||
# that includes build deps and package hash).
|
||||
self.concretized_order = [
|
||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
||||
]
|
||||
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||
|
||||
return first_seen, order_concretized
|
||||
|
||||
def write(self, regenerate: bool = True) -> None:
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
@@ -2046,7 +2341,7 @@ def write(self, regenerate: bool = True) -> None:
|
||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||
"""
|
||||
self.manifest_uptodate_or_warn()
|
||||
if self.specs_by_hash:
|
||||
if self.specs_by_hash or self.included_concrete_envs:
|
||||
self.ensure_env_directory_exists(dot_env=True)
|
||||
self.update_environment_repository()
|
||||
self.manifest.flush()
|
||||
@@ -2545,6 +2840,19 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
self.changed = True
|
||||
|
||||
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
"""
|
||||
self.pristine_configuration[included_concrete_name] = []
|
||||
|
||||
for env_path in include_concrete:
|
||||
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||
|
||||
self.changed = True
|
||||
|
||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||
|
||||
@@ -2728,54 +3036,56 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
# If scheme is not valid, config_path is not a url
|
||||
# of a type Spack is generally aware
|
||||
if spack.util.url.validate_scheme(include_url.scheme):
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
|
@@ -13,7 +13,6 @@
|
||||
import spack.config
|
||||
import spack.relocate
|
||||
from spack.util.elf import ElfParsingError, parse_elf
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
def is_shared_library_elf(filepath):
|
||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
||||
return
|
||||
|
||||
# Should failing to locate patchelf be a hard error?
|
||||
patchelf_path = spack.relocate._patchelf()
|
||||
if not patchelf_path:
|
||||
patchelf = spack.relocate._patchelf()
|
||||
if not patchelf:
|
||||
return
|
||||
patchelf = Executable(patchelf_path)
|
||||
|
||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||
|
||||
|
@@ -488,6 +488,7 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.client import HTTPResponse
|
||||
from typing import NamedTuple, Tuple
|
||||
from typing import List, NamedTuple, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -27,6 +27,7 @@
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.url
|
||||
|
||||
from .image import Digest, ImageReference
|
||||
|
||||
@@ -69,6 +70,42 @@ def with_query_param(url: str, param: str, value: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
|
||||
"""Retrieves the list of tags associated with an image, handling pagination."""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
tags = set()
|
||||
fetch_url = ref.tags_url()
|
||||
|
||||
while True:
|
||||
# Fetch tags
|
||||
request = Request(url=fetch_url)
|
||||
response = _urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags.update(json.load(response)["tags"])
|
||||
|
||||
# Check for pagination
|
||||
link_header = response.headers["Link"]
|
||||
|
||||
if link_header is None:
|
||||
break
|
||||
|
||||
tty.debug(f"OCI tag pagination: {link_header}")
|
||||
|
||||
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
|
||||
|
||||
if rel_next_value is None:
|
||||
break
|
||||
|
||||
rel_next = urllib.parse.urlparse(rel_next_value)
|
||||
|
||||
if rel_next.scheme not in ("https", ""):
|
||||
break
|
||||
|
||||
fetch_url = ref.endpoint(rel_next_value)
|
||||
|
||||
return sorted(tags)
|
||||
|
||||
|
||||
def upload_blob(
|
||||
ref: ImageReference,
|
||||
file: str,
|
||||
|
@@ -418,18 +418,27 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
|
||||
)
|
||||
|
||||
|
||||
def default_retry(f, retries: int = 3, sleep=None):
|
||||
def default_retry(f, retries: int = 5, sleep=None):
|
||||
sleep = sleep or time.sleep
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except urllib.error.HTTPError as e:
|
||||
except (urllib.error.URLError, TimeoutError) as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
||||
if i + 1 != retries and (
|
||||
(
|
||||
isinstance(e, urllib.error.HTTPError)
|
||||
and (500 <= e.code < 600 or e.code == 429)
|
||||
)
|
||||
or (
|
||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||
)
|
||||
or isinstance(e, TimeoutError)
|
||||
):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
continue
|
||||
|
@@ -161,7 +161,11 @@ def windows_establish_runtime_linkage(self):
|
||||
|
||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# If spec is an external, we should not be modifying its bin directory, as we would
|
||||
# be doing in this method
|
||||
# Spack should in general not modify things it has not installed
|
||||
# we can reasonably expect externals to have their link interface properly established
|
||||
if sys.platform == "win32" and not self.spec.external:
|
||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||
self.win_rpath.establish_link()
|
||||
@@ -1240,7 +1244,7 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
||||
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
@@ -1898,7 +1902,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
@@ -2446,9 +2450,18 @@ def rpath(self):
|
||||
|
||||
# on Windows, libraries of runtime interest are typically
|
||||
# stored in the bin directory
|
||||
# Do not include Windows system libraries in the rpath interface
|
||||
# these libraries are handled automatically by VS/VCVARS and adding
|
||||
# Spack derived system libs into the link path or address space of a program
|
||||
# can result in conflicting versions, which makes Spack packages less useable
|
||||
if sys.platform == "win32":
|
||||
rpaths = [self.prefix.bin]
|
||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
||||
rpaths.extend(
|
||||
d.prefix.bin
|
||||
for d in deps
|
||||
if os.path.isdir(d.prefix.bin)
|
||||
and "windows-system" not in getattr(d.package, "tags", [])
|
||||
)
|
||||
else:
|
||||
rpaths = [self.prefix.lib, self.prefix.lib64]
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.target
|
||||
import spack.version
|
||||
@@ -133,7 +134,7 @@ def craype_type_and_version(cls):
|
||||
# Take the default version from known symlink path
|
||||
default_path = os.path.join(craype_dir, "default")
|
||||
if os.path.islink(default_path):
|
||||
version = spack.version.Version(os.readlink(default_path))
|
||||
version = spack.version.Version(readlink(default_path))
|
||||
return (craype_type, version)
|
||||
|
||||
# If no default version, sort available versions and return latest
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
@@ -25,6 +25,7 @@
|
||||
import spack.store
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
import spack.util.path
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
|
||||
@@ -565,7 +566,7 @@ def make_link_relative(new_links, orig_links):
|
||||
orig_links (list): original links
|
||||
"""
|
||||
for new_link, orig_link in zip(new_links, orig_links):
|
||||
target = os.readlink(orig_link)
|
||||
target = readlink(orig_link)
|
||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||
os.unlink(new_link)
|
||||
symlink(relative_target, new_link)
|
||||
@@ -613,7 +614,7 @@ def relocate_links(links, prefix_to_prefix):
|
||||
"""Relocate links to a new install prefix."""
|
||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for link in links:
|
||||
old_target = os.readlink(link)
|
||||
old_target = readlink(link)
|
||||
match = regex.match(old_target)
|
||||
|
||||
# No match.
|
||||
|
@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str): String containing one or more of 'A', 'B', 'C'
|
||||
type (str): String containing one or more of 'A', 'R', 'C'
|
||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||
|
||||
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
lines = [] if not out else re.split(r"\s+", out)
|
||||
changed = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition(os.sep)
|
||||
pkg_name, _, _ = path.partition("/")
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
|
@@ -58,7 +58,8 @@
|
||||
# Initialize data structures common to each phase's report.
|
||||
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
||||
CDASH_PHASES.add("update")
|
||||
|
||||
# CDash request timeout in seconds
|
||||
SPACK_CDASH_TIMEOUT = 45
|
||||
|
||||
CDashConfiguration = collections.namedtuple(
|
||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||
@@ -447,7 +448,7 @@ def upload(self, filename):
|
||||
# By default, urllib2 only support GET and POST.
|
||||
# CDash expects this file to be uploaded via PUT.
|
||||
request.get_method = lambda: "PUT"
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
if self.current_package_name not in self.buildIds:
|
||||
resp_value = response.read()
|
||||
if isinstance(resp_value, bytes):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.error
|
||||
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
|
||||
in our case. This still needs to be called after the copy to destination
|
||||
because it expects the new directory structure to be in place."""
|
||||
for link in links:
|
||||
link_target = os.readlink(os.path.join(orig_prefix, link))
|
||||
link_target = readlink(os.path.join(orig_prefix, link))
|
||||
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
||||
new_link_path = os.path.join(new_prefix, link)
|
||||
os.unlink(new_link_path)
|
||||
|
@@ -35,6 +35,7 @@
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
},
|
||||
),
|
||||
}
|
||||
|
@@ -141,7 +141,7 @@
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
||||
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
@@ -197,7 +197,7 @@
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"can be set only in the 'all' section of the same file. "
|
||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||
"including files:lines where the deprecated attributes are used.\n\n"
|
||||
|
@@ -314,6 +314,10 @@ def using_libc_compatibility() -> bool:
|
||||
return spack.platforms.host().name == "linux"
|
||||
|
||||
|
||||
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool:
|
||||
return compiler.compiler_verbose_output is not None
|
||||
|
||||
|
||||
def extend_flag_list(flag_list, new_flags):
|
||||
"""Extend a list of flags, preserving order and precedence.
|
||||
|
||||
@@ -1649,11 +1653,15 @@ def external_packages(self):
|
||||
if isinstance(reuse_yaml, typing.Mapping):
|
||||
default_include = reuse_yaml.get("include", [])
|
||||
default_exclude = reuse_yaml.get("exclude", [])
|
||||
libc_externals = list(all_libcs())
|
||||
for source in reuse_yaml.get("from", []):
|
||||
if source["type"] != "external":
|
||||
continue
|
||||
|
||||
include = source.get("include", default_include)
|
||||
if include:
|
||||
# Since libcs are implicit externals, we need to implicitly include them
|
||||
include = include + libc_externals
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
spec_filters.append(
|
||||
SpecFilter(
|
||||
@@ -1931,6 +1939,11 @@ def _spec_clauses(
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
||||
|
||||
# If the spec is external and concrete, we allow all the libcs on the system
|
||||
if spec.external and spec.concrete and using_libc_compatibility():
|
||||
for libc in self.libcs:
|
||||
clauses.append(fn.attr("compatible_libc", spec.name, libc.name, libc.version))
|
||||
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
@@ -2971,6 +2984,13 @@ class CompilerParser:
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||
tty.debug(
|
||||
f"the C compiler {c.cc} does not exist, or does not run correctly."
|
||||
f" The compiler {c.spec} will not be used during concretization."
|
||||
)
|
||||
continue
|
||||
|
||||
if using_libc_compatibility() and not c.default_libc:
|
||||
warnings.warn(
|
||||
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
||||
|
@@ -1345,8 +1345,10 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
%
|
||||
% The priority ranges are:
|
||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
||||
% 1000+ Optimizations for concretization errors
|
||||
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||
% 200 - 299 Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds and minimizing dupes.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode).
|
||||
build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", PackageNode).
|
||||
@@ -1394,6 +1396,16 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
|
||||
% is displayed (clingo doesn't display sums over empty sets by default)
|
||||
|
||||
% A condition group specifies one or more specs that must be satisfied.
|
||||
% Specs declared first are preferred, so we assign increasing weights and
|
||||
% minimize the weights.
|
||||
opt_criterion(310, "requirement weight").
|
||||
#minimize{ 0@310: #true }.
|
||||
#minimize {
|
||||
Weight@310,PackageNode,Group
|
||||
: requirement_weight(PackageNode, Group, Weight)
|
||||
}.
|
||||
|
||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||
opt_criterion(110, "number of packages to build (vs. reuse)").
|
||||
#minimize { 0@110: #true }.
|
||||
@@ -1405,18 +1417,6 @@ opt_criterion(100, "number of nodes from the same package").
|
||||
#minimize { ID@100,Package : attr("virtual_node", node(ID, Package)) }.
|
||||
#defined optimize_for_reuse/0.
|
||||
|
||||
% A condition group specifies one or more specs that must be satisfied.
|
||||
% Specs declared first are preferred, so we assign increasing weights and
|
||||
% minimize the weights.
|
||||
opt_criterion(75, "requirement weight").
|
||||
#minimize{ 0@275: #true }.
|
||||
#minimize{ 0@75: #true }.
|
||||
#minimize {
|
||||
Weight@75+Priority,PackageNode,Group
|
||||
: requirement_weight(PackageNode, Group, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
% Minimize the number of deprecated versions being used
|
||||
opt_criterion(73, "deprecated versions used").
|
||||
#minimize{ 0@273: #true }.
|
||||
@@ -1424,6 +1424,7 @@ opt_criterion(73, "deprecated versions used").
|
||||
#minimize{
|
||||
1@73+Priority,PackageNode
|
||||
: attr("deprecated", PackageNode, _),
|
||||
not external(PackageNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
@@ -1431,11 +1432,11 @@ opt_criterion(73, "deprecated versions used").
|
||||
% 1. Version weight
|
||||
% 2. Number of variants with a non default value, if not set
|
||||
% for the root package.
|
||||
opt_criterion(70, "version weight").
|
||||
opt_criterion(70, "version badness (roots)").
|
||||
#minimize{ 0@270: #true }.
|
||||
#minimize{ 0@70: #true }.
|
||||
#minimize {
|
||||
Weight@70+Priority
|
||||
Weight@70+Priority,PackageNode
|
||||
: attr("root", PackageNode),
|
||||
version_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
@@ -1525,13 +1526,14 @@ opt_criterion(30, "non-preferred OS's").
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(25, "version badness").
|
||||
opt_criterion(25, "version badness (non roots)").
|
||||
#minimize{ 0@225: #true }.
|
||||
#minimize{ 0@25: #true }.
|
||||
#minimize{
|
||||
Weight@25+Priority,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not attr("root", node(X, Package)),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
|
@@ -10,12 +10,13 @@
|
||||
%=============================================================================
|
||||
|
||||
% A package cannot be reused if the libc is not compatible with it
|
||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
attr("hash", node(R, ReusedPackage), Hash),
|
||||
% Libc packages can be reused without the "compatible_libc" attribute
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage)
|
||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
attr("hash", node(R, ReusedPackage), Hash),
|
||||
% Libc packages can be reused without the "compatible_libc" attribute
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
|
||||
% Check whether the DAG has any built package
|
||||
has_built_packages() :- build(X), not external(X).
|
||||
|
@@ -19,6 +19,8 @@
|
||||
(["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# The package use a non existing variant in a depends_on directive
|
||||
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub pull request commit patch URL
|
||||
(["invalid-github-pull-commits-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.caches
|
||||
@@ -1062,10 +1063,10 @@ def test_tarball_common_prefix(dummy_prefix, tmpdir):
|
||||
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
|
||||
|
||||
# Relative symlink should still be correct
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||
assert readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||
|
||||
# Absolute symlink should remain absolute -- this is for relocation to fix up.
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||
assert readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||
dummy_prefix, "bin", "app"
|
||||
)
|
||||
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -716,3 +717,21 @@ def test_build_system_globals_only_set_on_root_during_build(default_mock_concret
|
||||
for depth, spec in root.traverse(depth=True, root=True):
|
||||
for variable in build_variables:
|
||||
assert hasattr(spec.package.module, variable) == should_be_set(depth)
|
||||
|
||||
|
||||
def test_rpath_with_duplicate_link_deps():
|
||||
"""If we have two instances of one package in the same link sub-dag, only the newest version is
|
||||
rpath'ed. This is for runtime support without splicing."""
|
||||
runtime_1 = spack.spec.Spec("runtime@=1.0")
|
||||
runtime_2 = spack.spec.Spec("runtime@=2.0")
|
||||
child = spack.spec.Spec("child@=1.0")
|
||||
root = spack.spec.Spec("root@=1.0")
|
||||
|
||||
root.add_dependency_edge(child, depflag=dt.LINK, virtuals=())
|
||||
root.add_dependency_edge(runtime_2, depflag=dt.LINK, virtuals=())
|
||||
child.add_dependency_edge(runtime_1, depflag=dt.LINK, virtuals=())
|
||||
|
||||
rpath_deps = spack.build_environment._get_rpath_deps_from_spec(root, transitive_rpaths=True)
|
||||
assert child in rpath_deps
|
||||
assert runtime_2 in rpath_deps
|
||||
assert runtime_1 not in rpath_deps
|
||||
|
@@ -51,7 +51,7 @@ def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._content = content_to_read
|
||||
self._read = [False for c in content_to_read]
|
||||
|
||||
def open(self, request):
|
||||
def open(self, request, data=None, timeout=object()):
|
||||
return self
|
||||
|
||||
def getcode(self):
|
||||
|
@@ -760,7 +760,6 @@ def test_ci_rebuild_mock_success(
|
||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo")
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||
@@ -843,7 +842,6 @@ def test_ci_rebuild(
|
||||
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
|
@@ -11,6 +11,7 @@
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd.clean
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.stage
|
||||
@@ -68,6 +69,20 @@ def test_function_calls(command_line, effects, mock_calls_for_clean):
|
||||
assert mock_calls_for_clean[name] == (1 if name in effects else 0)
|
||||
|
||||
|
||||
def test_env_aware_clean(mock_stage, install_mockery, mutable_mock_env_path, monkeypatch):
|
||||
e = ev.create("test", with_view=False)
|
||||
e.add("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
def fail(*args, **kwargs):
|
||||
raise Exception("This should not have been called")
|
||||
|
||||
monkeypatch.setattr(spack.spec.Spec, "concretize", fail)
|
||||
|
||||
with e:
|
||||
clean("mpileaks")
|
||||
|
||||
|
||||
def test_remove_python_cache(tmpdir, monkeypatch):
|
||||
cache_files = ["file1.pyo", "file2.pyc"]
|
||||
source_file = "file1.py"
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.link_tree
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.cmd.env
|
||||
import spack.config
|
||||
@@ -60,6 +61,27 @@
|
||||
sep = os.sep
|
||||
|
||||
|
||||
def setup_combined_multiple_env():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||
combined = ev.read("combined_env")
|
||||
|
||||
return test1, test2, combined
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def environment_from_manifest(tmp_path):
|
||||
"""Returns a new environment named 'test' from the content of a manifest file."""
|
||||
@@ -369,6 +391,29 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with combined:
|
||||
install()
|
||||
|
||||
test1_roots = test1.concretized_order
|
||||
test2_roots = test2.concretized_order
|
||||
combined_included_roots = combined.included_concretized_order
|
||||
|
||||
for spec in combined.all_specs():
|
||||
assert spec.installed
|
||||
|
||||
assert test1_roots == combined_included_roots[test1.path]
|
||||
assert test2_roots == combined_included_roots[test2.path]
|
||||
|
||||
|
||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||
install = SpackCommand("install")
|
||||
install("dependent-install")
|
||||
@@ -557,6 +602,41 @@ def test_remove_command():
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
|
||||
def test_bad_remove_included_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
with pytest.raises(SpackCommandError):
|
||||
env("remove", "test")
|
||||
|
||||
|
||||
def test_force_remove_included_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
rm_output = env("remove", "-f", "-y", "test")
|
||||
list_output = env("list")
|
||||
|
||||
assert '"test" is being used by environment "combined_env"' in rm_output
|
||||
assert "test" not in list_output
|
||||
|
||||
|
||||
def test_environment_status(capsys, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with capsys.disabled():
|
||||
@@ -1636,6 +1716,275 @@ def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
|
||||
check_mpileaks_and_deps_in_view(view_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("env_name", [True, False])
|
||||
def test_env_include_concrete_env_yaml(env_name):
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
environ = "test" if env_name else test.path
|
||||
|
||||
env("create", "--include-concrete", environ, "combined_env")
|
||||
|
||||
combined = ev.read("combined_env")
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert "include_concrete" in combined_yaml
|
||||
assert test.path in combined_yaml["include_concrete"]
|
||||
|
||||
|
||||
def test_env_bad_include_concrete_env():
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
env("create", "--include-concrete", "nonexistant_env", "combined_env")
|
||||
|
||||
|
||||
def test_env_not_concrete_include_concrete_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
|
||||
def test_env_multiple_include_concrete_envs():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert test1.path in combined_yaml["include_concrete"][0]
|
||||
assert test2.path in combined_yaml["include_concrete"][1]
|
||||
|
||||
# No local specs in the combined env
|
||||
assert not combined_yaml["specs"]
|
||||
|
||||
|
||||
def test_env_include_concrete_envs_lockfile():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert "include_concrete" in combined_yaml
|
||||
assert test1.path in combined_yaml["include_concrete"]
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert set(
|
||||
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test1.path]["roots"]
|
||||
) == set(test1.specs_by_hash)
|
||||
assert set(
|
||||
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test2.path]["roots"]
|
||||
) == set(test2.specs_by_hash)
|
||||
|
||||
|
||||
def test_env_include_concrete_add_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# crete new env & crecretize
|
||||
env("create", "new")
|
||||
new_env = ev.read("new")
|
||||
with new_env:
|
||||
add("mpileaks")
|
||||
|
||||
new_env.concretize()
|
||||
new_env.write()
|
||||
|
||||
# add new env to combined
|
||||
combined.included_concrete_envs.append(new_env.path)
|
||||
|
||||
# assert thing haven't changed yet
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
# concretize combined env with new env
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
# assert changes
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
def test_env_include_concrete_remove_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# remove test2 from combined
|
||||
combined.included_concrete_envs = [test1.path]
|
||||
|
||||
# assert test2 is still in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
# reconcretize combined
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
# assert test2 is not in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_include_concrete_env_reconcretized(unify):
|
||||
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||
after recocnretizing.
|
||||
"""
|
||||
_, _, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
assert not lockfile_as_dict["concrete_specs"]
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
assert not lockfile_as_dict["concrete_specs"]
|
||||
|
||||
|
||||
def test_concretize_include_concrete_env():
|
||||
test1, _, combined = setup_combined_multiple_env()
|
||||
|
||||
with test1:
|
||||
add("mpileaks")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
assert Spec("mpileaks") in test1.concretized_user_specs
|
||||
assert Spec("mpileaks") not in combined.included_concretized_user_specs[test1.path]
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
assert Spec("mpileaks") in combined.included_concretized_user_specs[test1.path]
|
||||
|
||||
|
||||
def test_concretize_nested_include_concrete_envs():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test2", "test3")
|
||||
test3 = ev.read("test3")
|
||||
|
||||
with open(test3.lock_path) as f:
|
||||
lockfile_as_dict = test3._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"]
|
||||
assert test1.path in lockfile_as_dict["include_concrete"][test2.path]["include_concrete"]
|
||||
|
||||
assert Spec("zlib") in test3.included_concretized_user_specs[test1.path]
|
||||
|
||||
|
||||
def test_concretize_nested_included_concrete():
|
||||
"""Confirm that nested included environments use specs concretized at
|
||||
environment creation time and change with reconcretization."""
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
# test2 should include test1 with zlib
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
assert Spec("zlib") in test2.included_concretized_user_specs[test1.path]
|
||||
|
||||
# Modify/re-concretize test1 to replace zlib with mpileaks
|
||||
with test1:
|
||||
remove("zlib")
|
||||
add("mpileaks")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
# test3 should include the latest concretization of test1
|
||||
env("create", "--include-concrete", "test1", "test3")
|
||||
test3 = ev.read("test3")
|
||||
with test3:
|
||||
add("callpath")
|
||||
test3.concretize()
|
||||
test3.write()
|
||||
|
||||
included_specs = test3.included_concretized_user_specs[test1.path]
|
||||
assert len(included_specs) == 1
|
||||
assert Spec("mpileaks") in included_specs
|
||||
|
||||
# The last concretization of test4's included environments should have test2
|
||||
# with the original concretized test1 spec and test3 with the re-concretized
|
||||
# test1 spec.
|
||||
env("create", "--include-concrete", "test2", "--include-concrete", "test3", "test4")
|
||||
test4 = ev.read("test4")
|
||||
|
||||
def included_included_spec(path1, path2):
|
||||
included_path1 = test4.included_concrete_spec_data[path1]
|
||||
included_path2 = included_path1["include_concrete"][path2]
|
||||
return included_path2["roots"][0]["spec"]
|
||||
|
||||
included_test2_test1 = included_included_spec(test2.path, test1.path)
|
||||
assert "zlib" in included_test2_test1
|
||||
|
||||
included_test3_test1 = included_included_spec(test3.path, test1.path)
|
||||
assert "mpileaks" in included_test3_test1
|
||||
|
||||
# test4's concretized specs should reflect the original concretization.
|
||||
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||
expected = [Spec(s) for s in ["libelf", "zlib", "mpileaks", "callpath"]]
|
||||
assert all(s in concrete_specs for s in expected)
|
||||
|
||||
# Re-concretize test2 to reflect the new concretization of included test1
|
||||
# to remove zlib and write it out so it can be picked up by test4.
|
||||
# Re-concretize test4 to reflect the re-concretization of included test2
|
||||
# and ensure that its included specs are up-to-date
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
test4.concretize()
|
||||
|
||||
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||
assert Spec("zlib") not in concrete_specs
|
||||
|
||||
# Expecting mpileaks to appear only once
|
||||
expected = [Spec(s) for s in ["libelf", "mpileaks", "callpath"]]
|
||||
assert len(concrete_specs) == 3 and all(s in concrete_specs for s in expected)
|
||||
|
||||
|
||||
def test_env_config_view_default(
|
||||
environment_from_manifest, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
@@ -4066,8 +4415,8 @@ def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, m
|
||||
# view-file/bin/
|
||||
# x # expect this x to be linked
|
||||
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
||||
assert readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
||||
assert readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
||||
|
||||
|
||||
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
|
||||
@@ -4078,4 +4427,4 @@ def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mo
|
||||
install()
|
||||
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
|
||||
# The dependent's file is linked into the view
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
||||
assert readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
||||
|
@@ -349,6 +349,87 @@ def test_find_prefix_in_env(
|
||||
# Would throw error on regression
|
||||
|
||||
|
||||
def test_find_specs_include_concrete_env(mutable_mock_env_path, config, mutable_mock_repo, tmpdir):
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
env("create", "test1", "spack.yaml")
|
||||
|
||||
test1 = ev.read("test1")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- libelf
|
||||
"""
|
||||
)
|
||||
env("create", "test2", "spack.yaml")
|
||||
|
||||
test2 = ev.read("test2")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||
|
||||
with ev.read("combined_env"):
|
||||
output = find()
|
||||
|
||||
assert "No root specs" in output
|
||||
assert "Included specs" in output
|
||||
assert "mpileaks" in output
|
||||
assert "libelf" in output
|
||||
|
||||
|
||||
def test_find_specs_nested_include_concrete_env(
|
||||
mutable_mock_env_path, config, mutable_mock_repo, tmpdir
|
||||
):
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
env("create", "test1", "spack.yaml")
|
||||
|
||||
test1 = ev.read("test1")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
test2.add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test2", "test3")
|
||||
|
||||
with ev.read("test3"):
|
||||
output = find()
|
||||
|
||||
assert "No root specs" in output
|
||||
assert "Included specs" in output
|
||||
assert "mpileaks" in output
|
||||
assert "libelf" in output
|
||||
|
||||
|
||||
def test_find_loaded(database, working_env):
|
||||
output = find("--loaded", "--group")
|
||||
assert output == ""
|
||||
|
@@ -384,9 +384,18 @@ def test_clang_flags():
|
||||
unsupported_flag_test("cxx17_flag", "clang@3.4")
|
||||
supported_flag_test("cxx17_flag", "-std=c++1z", "clang@3.5")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "clang@5.0")
|
||||
unsupported_flag_test("cxx20_flag", "clang@4.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++2a", "clang@5.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++20", "clang@11.0")
|
||||
unsupported_flag_test("cxx23_flag", "clang@11.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++2b", "clang@12.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++23", "clang@17.0")
|
||||
supported_flag_test("c99_flag", "-std=c99", "clang@3.3")
|
||||
unsupported_flag_test("c11_flag", "clang@2.0")
|
||||
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0")
|
||||
unsupported_flag_test("c23_flag", "clang@8.0")
|
||||
supported_flag_test("c23_flag", "-std=c2x", "clang@9.0")
|
||||
supported_flag_test("c23_flag", "-std=c23", "clang@18.0")
|
||||
supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3")
|
||||
|
@@ -1914,11 +1914,11 @@ def test_version_weight_and_provenance(self):
|
||||
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
|
||||
criteria = [
|
||||
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
|
||||
(2, 0, "version badness"),
|
||||
(2, 0, "version badness (non roots)"),
|
||||
]
|
||||
|
||||
for criterion in criteria:
|
||||
assert criterion in result.criteria, result_spec
|
||||
assert criterion in result.criteria, criterion
|
||||
assert result_spec.satisfies("^b@1.0")
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@@ -2464,6 +2464,7 @@ def test_spec_with_build_dep_from_json(self, tmp_path):
|
||||
assert s["dttop"].dag_hash() == build_dep.dag_hash()
|
||||
|
||||
@pytest.mark.regression("44040")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_exclude_specs_from_reuse(self, monkeypatch):
|
||||
"""Tests that we can exclude a spec from reuse when concretizing, and that the spec
|
||||
is not added back to the solve as a dependency of another reusable spec.
|
||||
@@ -2503,6 +2504,72 @@ def test_exclude_specs_from_reuse(self, monkeypatch):
|
||||
for dep in result["dyninst"].traverse(root=False):
|
||||
assert dep.dag_hash() == reused[dep.name].dag_hash()
|
||||
|
||||
@pytest.mark.regression("44091")
|
||||
@pytest.mark.parametrize(
|
||||
"included_externals",
|
||||
[
|
||||
["deprecated-versions"],
|
||||
# Try the empty list, to ensure that in that case everything will be included
|
||||
# since filtering should happen only when the list is non-empty
|
||||
[],
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_include_specs_from_externals_and_libcs(
|
||||
self, included_externals, mutable_config, tmp_path
|
||||
):
|
||||
"""Tests that when we include specs from externals, we always include libcs."""
|
||||
mutable_config.set(
|
||||
"packages",
|
||||
{
|
||||
"deprecated-versions": {
|
||||
"externals": [{"spec": "deprecated-versions@1.1.0", "prefix": str(tmp_path)}]
|
||||
}
|
||||
},
|
||||
)
|
||||
request_str = "deprecated-client"
|
||||
|
||||
# When using the external the version is selected even if deprecated
|
||||
with spack.config.override(
|
||||
"concretizer:reuse", {"from": [{"type": "external", "include": included_externals}]}
|
||||
):
|
||||
result = Spec(request_str).concretized()
|
||||
|
||||
assert result["deprecated-versions"].satisfies("@1.1.0")
|
||||
|
||||
# When excluding it, we pick the non-deprecated version
|
||||
with spack.config.override(
|
||||
"concretizer:reuse",
|
||||
{"from": [{"type": "external", "exclude": ["deprecated-versions"]}]},
|
||||
):
|
||||
result = Spec(request_str).concretized()
|
||||
|
||||
assert result["deprecated-versions"].satisfies("@1.0.0")
|
||||
|
||||
@pytest.mark.regression("44085")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_can_reuse_concrete_externals_for_dependents(self, mutable_config, tmp_path):
|
||||
"""Test that external specs that are in the DB can be reused. This means they are
|
||||
preferred to concretizing another external from packages.yaml
|
||||
"""
|
||||
packages_yaml = {
|
||||
"externaltool": {"externals": [{"spec": "externaltool@2.0", "prefix": "/fake/path"}]}
|
||||
}
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
# Concretize with gcc@9 to get a suboptimal spec, since we have gcc@10 available
|
||||
external_spec = Spec("externaltool@2 %gcc@9").concretized()
|
||||
assert external_spec.external
|
||||
|
||||
root_specs = [Spec("sombrero")]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=[external_spec])
|
||||
|
||||
assert len(result.specs) == 1
|
||||
sombrero = result.specs[0]
|
||||
assert sombrero["externaltool"].dag_hash() == external_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
|
@@ -1176,3 +1176,46 @@ def test_forward_multi_valued_variant_using_requires(
|
||||
|
||||
for constraint in not_expected:
|
||||
assert not s.satisfies(constraint)
|
||||
|
||||
|
||||
def test_strong_preferences_higher_priority_than_reuse(concretize_scope, mock_packages):
|
||||
"""Tests that strong preferences have a higher priority than reusing specs."""
|
||||
reused_spec = Spec("adios2~bzip2").concretized()
|
||||
reuse_nodes = list(reused_spec.traverse())
|
||||
root_specs = [Spec("ascent+adios2")]
|
||||
|
||||
# Check that without further configuration adios2 is reused
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reuse_nodes)
|
||||
ascent = result.specs[0]
|
||||
assert ascent["adios2"].dag_hash() == reused_spec.dag_hash(), ascent
|
||||
|
||||
# If we stick a preference, adios2 is not reused
|
||||
update_packages_config(
|
||||
"""
|
||||
packages:
|
||||
adios2:
|
||||
prefer:
|
||||
- "+bzip2"
|
||||
"""
|
||||
)
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reuse_nodes)
|
||||
ascent = result.specs[0]
|
||||
|
||||
assert ascent["adios2"].dag_hash() != reused_spec.dag_hash()
|
||||
assert ascent["adios2"].satisfies("+bzip2")
|
||||
|
||||
# A preference is still preference, so we can override from input
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(
|
||||
setup, [Spec("ascent+adios2 ^adios2~bzip2")], reuse=reuse_nodes
|
||||
)
|
||||
ascent = result.specs[0]
|
||||
assert ascent["adios2"].dag_hash() == reused_spec.dag_hash(), ascent
|
||||
|
@@ -2053,3 +2053,11 @@ def _true(x):
|
||||
@pytest.fixture()
|
||||
def do_not_check_runtimes_on_reuse(monkeypatch):
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope="session")
|
||||
def _c_compiler_always_exists():
|
||||
fn = spack.solver.asp.c_compiler_runs
|
||||
spack.solver.asp.c_compiler_runs = _true
|
||||
yield
|
||||
spack.solver.asp.c_compiler_runs = fn
|
||||
|
@@ -813,3 +813,33 @@ def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path, mock
|
||||
assert len(e.concrete_roots()) == 3
|
||||
all_root_hashes = set(x.dag_hash() for x in e.concrete_roots())
|
||||
assert len(all_root_hashes) == 2
|
||||
|
||||
|
||||
@pytest.mark.regression("44216")
|
||||
@pytest.mark.only_clingo()
|
||||
def test_root_version_weights_for_old_versions(mutable_mock_env_path, mock_packages):
|
||||
"""Tests that, when we select two old versions of root specs that have the same version
|
||||
optimization penalty, both are considered.
|
||||
"""
|
||||
mutable_mock_env_path.mkdir()
|
||||
spack_yaml = mutable_mock_env_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
"""spack:
|
||||
specs:
|
||||
# allow any version, but the most recent
|
||||
- bowtie@:1.3
|
||||
# allows only the third most recent, so penalty is 2
|
||||
- gcc@1
|
||||
concretizer:
|
||||
unify: true
|
||||
"""
|
||||
)
|
||||
e = ev.Environment(mutable_mock_env_path)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
bowtie = [x for x in e.concrete_roots() if x.name == "bowtie"][0]
|
||||
gcc = [x for x in e.concrete_roots() if x.name == "gcc"][0]
|
||||
|
||||
assert bowtie.satisfies("@=1.3.0")
|
||||
assert gcc.satisfies("@=1.0")
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -181,7 +181,7 @@ def test_symlinks_true(self, stage):
|
||||
|
||||
assert os.path.exists("dest/a/b2")
|
||||
with fs.working_dir("dest/a"):
|
||||
assert os.path.exists(os.readlink("b2"))
|
||||
assert os.path.exists(readlink("b2"))
|
||||
|
||||
assert os.path.realpath("dest/f/2") == os.path.abspath("dest/a/b/2")
|
||||
assert os.path.realpath("dest/2") == os.path.abspath("dest/1")
|
||||
@@ -281,7 +281,7 @@ def test_allow_broken_symlinks(self, stage):
|
||||
symlink("nonexistant.txt", "source/broken", allow_broken_symlinks=True)
|
||||
fs.install_tree("source", "dest", symlinks=True, allow_broken_symlinks=True)
|
||||
assert os.path.islink("dest/broken")
|
||||
assert not os.path.exists(os.readlink("dest/broken"))
|
||||
assert not os.path.exists(readlink("dest/broken"))
|
||||
|
||||
def test_glob_src(self, stage):
|
||||
"""Test using a glob as the source."""
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -78,7 +80,7 @@ def test_modules_default_symlink(
|
||||
|
||||
link_path = os.path.join(os.path.dirname(mock_module_filename), "default")
|
||||
assert os.path.islink(link_path)
|
||||
assert os.readlink(link_path) == mock_module_filename
|
||||
assert readlink(link_path) == mock_module_filename
|
||||
|
||||
generator.remove()
|
||||
assert not os.path.lexists(link_path)
|
||||
|
@@ -151,7 +151,9 @@ class InMemoryOCIRegistry(DummyServer):
|
||||
A third option is to use the chunked upload, but this is not implemented here, because
|
||||
it's typically a major performance hit in upload speed, so we're not using it in Spack."""
|
||||
|
||||
def __init__(self, domain: str, allow_single_post: bool = True) -> None:
|
||||
def __init__(
|
||||
self, domain: str, allow_single_post: bool = True, tags_per_page: int = 100
|
||||
) -> None:
|
||||
super().__init__(domain)
|
||||
self.router.register("GET", r"/v2/", self.index)
|
||||
self.router.register("HEAD", r"/v2/(?P<name>.+)/blobs/(?P<digest>.+)", self.head_blob)
|
||||
@@ -165,6 +167,9 @@ def __init__(self, domain: str, allow_single_post: bool = True) -> None:
|
||||
# If True, allow single POST upload, not all registries support this
|
||||
self.allow_single_post = allow_single_post
|
||||
|
||||
# How many tags are returned in a single request
|
||||
self.tags_per_page = tags_per_page
|
||||
|
||||
# Used for POST + PUT upload. This is a map from session ID to image name
|
||||
self.sessions: Dict[str, str] = {}
|
||||
|
||||
@@ -280,10 +285,34 @@ def handle_upload(self, req: Request, name: str, digest: Digest):
|
||||
return MockHTTPResponse(201, "Created", headers={"Location": f"/v2/{name}/blobs/{digest}"})
|
||||
|
||||
def list_tags(self, req: Request, name: str):
|
||||
# Paginate using Link headers, this was added to the spec in the following commit:
|
||||
# https://github.com/opencontainers/distribution-spec/commit/2ed79d930ecec11dd755dc8190409a3b10f01ca9
|
||||
|
||||
# List all tags, exclude digests.
|
||||
tags = [_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag]
|
||||
tags.sort()
|
||||
return MockHTTPResponse.with_json(200, "OK", body={"tags": tags})
|
||||
all_tags = sorted(
|
||||
_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag
|
||||
)
|
||||
|
||||
query = urllib.parse.parse_qs(urllib.parse.urlparse(req.full_url).query)
|
||||
|
||||
n = int(query["n"][0]) if "n" in query else self.tags_per_page
|
||||
|
||||
if "last" in query:
|
||||
try:
|
||||
offset = all_tags.index(query["last"][0]) + 1
|
||||
except ValueError:
|
||||
return MockHTTPResponse(404, "Not found")
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
tags = all_tags[offset : offset + n]
|
||||
|
||||
if offset + n < len(all_tags):
|
||||
headers = {"Link": f'</v2/{name}/tags/list?last={tags[-1]}&n={n}>; rel="next"'}
|
||||
else:
|
||||
headers = None
|
||||
|
||||
return MockHTTPResponse.with_json(200, "OK", headers=headers, body={"tags": tags})
|
||||
|
||||
|
||||
class DummyServerUrllibHandler(urllib.request.BaseHandler):
|
||||
|
@@ -6,6 +6,7 @@
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@@ -19,6 +20,7 @@
|
||||
copy_missing_layers,
|
||||
get_manifest_and_config,
|
||||
image_from_mirror,
|
||||
list_tags,
|
||||
upload_blob,
|
||||
upload_manifest,
|
||||
)
|
||||
@@ -670,3 +672,31 @@ def test_retry(url, max_retries, expect_failure, expect_requests):
|
||||
|
||||
assert len(server.requests) == expect_requests
|
||||
assert sleep_time == [2**i for i in range(expect_requests - 1)]
|
||||
|
||||
|
||||
def test_list_tags():
|
||||
# Follows a relatively new rewording of the OCI distribution spec, which is not yet tagged.
|
||||
# https://github.com/opencontainers/distribution-spec/commit/2ed79d930ecec11dd755dc8190409a3b10f01ca9
|
||||
N = 20
|
||||
urlopen = create_opener(InMemoryOCIRegistry("example.com", tags_per_page=5)).open
|
||||
image = ImageReference.from_string("example.com/image")
|
||||
to_tag = lambda i: f"tag-{i:02}"
|
||||
|
||||
# Create N tags in arbitrary order
|
||||
_tags_to_create = [to_tag(i) for i in range(N)]
|
||||
random.shuffle(_tags_to_create)
|
||||
for tag in _tags_to_create:
|
||||
upload_manifest(image.with_tag(tag), default_manifest(), tag=True, _urlopen=urlopen)
|
||||
|
||||
# list_tags should return all tags from all pages in order
|
||||
tags = list_tags(image, urlopen)
|
||||
assert len(tags) == N
|
||||
assert [to_tag(i) for i in range(N)] == tags
|
||||
|
||||
# Test a single request, which should give the first 5 tags
|
||||
assert json.loads(urlopen(image.tags_url()).read())["tags"] == [to_tag(i) for i in range(5)]
|
||||
|
||||
# Test response at an offset, which should exclude the `last` tag.
|
||||
assert json.loads(urlopen(image.tags_url() + f"?last={to_tag(N - 3)}").read())["tags"] == [
|
||||
to_tag(i) for i in range(N - 2, N)
|
||||
]
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import pytest
|
||||
|
||||
from llnl.util import filesystem as fs
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd.buildcache as buildcache
|
||||
@@ -181,12 +181,12 @@ def test_relocate_links(tmpdir):
|
||||
relocate_links(["to_self", "to_dependency", "to_system"], prefix_to_prefix)
|
||||
|
||||
# These two are relocated
|
||||
assert os.readlink("to_self") == str(tmpdir.join("new_prefix_a", "file"))
|
||||
assert os.readlink("to_dependency") == str(tmpdir.join("new_prefix_b", "file"))
|
||||
assert readlink("to_self") == str(tmpdir.join("new_prefix_a", "file"))
|
||||
assert readlink("to_dependency") == str(tmpdir.join("new_prefix_b", "file"))
|
||||
|
||||
# These two are not.
|
||||
assert os.readlink("to_system") == system_path
|
||||
assert os.readlink("to_self_but_relative") == "relative"
|
||||
assert readlink("to_system") == system_path
|
||||
assert readlink("to_self_but_relative") == "relative"
|
||||
|
||||
|
||||
def test_needs_relocation():
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import getuid, mkdirp, partition_path, touch, working_dir
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@@ -872,7 +873,7 @@ def _create_files_from_tree(base, tree):
|
||||
|
||||
def _create_tree_from_dir_recursive(path):
|
||||
if os.path.islink(path):
|
||||
return os.readlink(path)
|
||||
return readlink(path)
|
||||
elif os.path.isdir(path):
|
||||
tree = {}
|
||||
for name in os.listdir(path):
|
||||
|
@@ -87,6 +87,13 @@ def test_url_strip_name_suffixes(url, version, expected):
|
||||
59,
|
||||
"https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow",
|
||||
),
|
||||
(
|
||||
"hpcviewer",
|
||||
30,
|
||||
"2024.02",
|
||||
51,
|
||||
"https://gitlab.com/hpctoolkit/hpcviewer/-/releases/2024.02/downloads/hpcviewer.tgz",
|
||||
),
|
||||
# Version in stem
|
||||
("zlib", 24, "1.2.10", 29, "http://zlib.net/fossils/zlib-1.2.10.tar.gz"),
|
||||
(
|
||||
|
@@ -207,3 +207,29 @@ def test_default_download_name_dot_dot():
|
||||
assert url_util.default_download_filename("https://example.com/.") == "_"
|
||||
assert url_util.default_download_filename("https://example.com/..") == "_."
|
||||
assert url_util.default_download_filename("https://example.com/.abcdef") == "_abcdef"
|
||||
|
||||
|
||||
def test_parse_link_rel_next():
|
||||
parse = url_util.parse_link_rel_next
|
||||
assert parse(r'</abc>; rel="next"') == "/abc"
|
||||
assert parse(r'</abc>; x=y; rel="next", </def>; x=y; rel="prev"') == "/abc"
|
||||
assert parse(r'</abc>; rel="prev"; x=y, </def>; x=y; rel="next"') == "/def"
|
||||
|
||||
# example from RFC5988
|
||||
assert (
|
||||
parse(
|
||||
r"""</TheBook/chapter2>; title*=UTF-8'de'letztes%20Kapitel; rel="previous","""
|
||||
r"""</TheBook/chapter4>; title*=UTF-8'de'n%c3%a4chstes%20Kapitel; rel="next" """
|
||||
)
|
||||
== "/TheBook/chapter4"
|
||||
)
|
||||
|
||||
assert (
|
||||
parse(r"""<https://example.com/example>; key=";a=b, </c/d>; e=f"; rel="next" """)
|
||||
== "https://example.com/example"
|
||||
)
|
||||
|
||||
assert parse("https://example.com/example") is None
|
||||
assert parse("<https://example.com/example; broken=broken") is None
|
||||
assert parse("https://example.com/example; rel=prev") is None
|
||||
assert parse("https://example.com/example; a=b; c=d; g=h") is None
|
||||
|
@@ -258,7 +258,9 @@ def parse_version_offset(path):
|
||||
# 9th Pass: Version in path
|
||||
# github.com/repo/name/releases/download/vver/name
|
||||
# e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
|
||||
# e.g. https://gitlab.com/hpctoolkit/hpcviewer/-/releases/2024.02/downloads/hpcviewer.tgz
|
||||
(r"github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/", path),
|
||||
(r"gitlab\.com/[^/]+/.+/-/releases/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/downloads/", path),
|
||||
# e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz
|
||||
(r"(\d[\da-zA-Z._-]*)/[^/]+$", path),
|
||||
]
|
||||
|
@@ -12,6 +12,8 @@
|
||||
from gzip import GzipFile
|
||||
from typing import Callable, Dict, Tuple
|
||||
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
@@ -193,12 +195,14 @@ def reproducible_tarfile_from_prefix(
|
||||
file_info = tarfile.TarInfo(path_to_name(entry.path))
|
||||
|
||||
if entry.is_symlink():
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
# strip off long path reg prefix on Windows
|
||||
link_dest = readlink(entry.path)
|
||||
file_info.linkname = link_dest
|
||||
# According to POSIX: "the value of the file mode bits returned in the
|
||||
# st_mode field of the stat structure is unspecified." So we set it to
|
||||
# something sensible without lstat'ing the link.
|
||||
file_info.mode = 0o755
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
|
@@ -21,16 +21,6 @@ def get_version_lines(version_hashes_dict: dict, url_dict: Optional[dict] = None
|
||||
version_lines = []
|
||||
|
||||
for v, h in version_hashes_dict.items():
|
||||
expand_arg = ""
|
||||
|
||||
# Extract the url for a version if url_dict is provided.
|
||||
url = ""
|
||||
if url_dict is not None and v in url_dict:
|
||||
url = url_dict[v]
|
||||
|
||||
# Add expand_arg since wheels should not be expanded during stanging
|
||||
if url.endswith(".whl") or ".whl#" in url:
|
||||
expand_arg = ", expand=False"
|
||||
version_lines.append(f' version("{v}", sha256="{h}"{expand_arg})')
|
||||
version_lines.append(f' version("{v}", sha256="{h}")')
|
||||
|
||||
return "\n".join(version_lines)
|
||||
|
@@ -8,6 +8,8 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
@@ -385,7 +387,7 @@ def _socket_dir(gpgconf):
|
||||
os.mkdir(var_run_user)
|
||||
os.chmod(var_run_user, 0o777)
|
||||
|
||||
user_dir = os.path.join(var_run_user, str(os.getuid()))
|
||||
user_dir = os.path.join(var_run_user, str(llnl.util.filesystem.getuid()))
|
||||
|
||||
if not os.path.exists(user_dir):
|
||||
os.mkdir(user_dir)
|
||||
|
@@ -22,7 +22,7 @@ def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
if not re.search(r"\bFree Software Foundation\b", stdout):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
@@ -75,7 +75,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
elif re.search(r"\bFree Software Foundation\b", stdout):
|
||||
# output is like "ld.so (...) stable release version 2.33."
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
|
@@ -296,8 +296,8 @@ def process_scalar(self):
|
||||
if marked(self.event.value):
|
||||
self.saved = self.event.value
|
||||
|
||||
def write_line_break(self):
|
||||
super().write_line_break()
|
||||
def write_line_break(self, data=None):
|
||||
super().write_line_break(data)
|
||||
if self.saved is None:
|
||||
_ANNOTATIONS.append(colorize("@K{---}"))
|
||||
return
|
||||
|
@@ -10,9 +10,11 @@
|
||||
import itertools
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Optional
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
@@ -76,14 +78,7 @@ def is_path_instead_of_url(path_or_url):
|
||||
"""Historically some config files and spack commands used paths
|
||||
where urls should be used. This utility can be used to validate
|
||||
and promote paths to urls."""
|
||||
scheme = urllib.parse.urlparse(path_or_url).scheme
|
||||
|
||||
# On non-Windows, no scheme means it's likely a path
|
||||
if not sys.platform == "win32":
|
||||
return not scheme
|
||||
|
||||
# On Windows, we may have drive letters.
|
||||
return "A" <= scheme <= "Z"
|
||||
return not validate_scheme(urllib.parse.urlparse(path_or_url).scheme)
|
||||
|
||||
|
||||
def format(parsed_url):
|
||||
@@ -261,3 +256,43 @@ def default_download_filename(url: str) -> str:
|
||||
valid_name = "_" + valid_name[1:]
|
||||
|
||||
return valid_name
|
||||
|
||||
|
||||
def parse_link_rel_next(link_value: str) -> Optional[str]:
|
||||
"""Return the next link from a Link header value, if any."""
|
||||
|
||||
# Relaxed version of RFC5988
|
||||
uri = re.compile(r"\s*<([^>]+)>\s*")
|
||||
param_key = r"[^;=\s]+"
|
||||
quoted_string = r"\"([^\"]+)\""
|
||||
unquoted_param_value = r"([^;,\s]+)"
|
||||
param = re.compile(rf";\s*({param_key})\s*=\s*(?:{quoted_string}|{unquoted_param_value})\s*")
|
||||
|
||||
data = link_value
|
||||
|
||||
# Parse a list of <url>; key=value; key=value, <url>; key=value; key=value, ... links.
|
||||
while True:
|
||||
uri_match = re.match(uri, data)
|
||||
if not uri_match:
|
||||
break
|
||||
uri_reference = uri_match.group(1)
|
||||
data = data[uri_match.end() :]
|
||||
|
||||
# Parse parameter list
|
||||
while True:
|
||||
param_match = re.match(param, data)
|
||||
if not param_match:
|
||||
break
|
||||
key, quoted_value, unquoted_value = param_match.groups()
|
||||
value = quoted_value or unquoted_value
|
||||
data = data[param_match.end() :]
|
||||
|
||||
if key == "rel" and value == "next":
|
||||
return uri_reference
|
||||
|
||||
if not data.startswith(","):
|
||||
break
|
||||
|
||||
data = data[1:]
|
||||
|
||||
return None
|
||||
|
@@ -9,6 +9,7 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.filesystem_view
|
||||
import spack.store
|
||||
@@ -38,7 +39,7 @@ def create_manifest_entry(path: str) -> Dict[str, Any]:
|
||||
data: Dict[str, Any] = {"mode": s.st_mode, "owner": s.st_uid, "group": s.st_gid}
|
||||
|
||||
if stat.S_ISLNK(s.st_mode):
|
||||
data["dest"] = os.readlink(path)
|
||||
data["dest"] = readlink(path)
|
||||
|
||||
elif stat.S_ISREG(s.st_mode):
|
||||
data["hash"] = compute_hash(path)
|
||||
@@ -90,7 +91,7 @@ def check_entry(path, data):
|
||||
# instead of `lstat(...).st_mode`. So, ignore mode errors for symlinks.
|
||||
if not stat.S_ISLNK(s.st_mode) and s.st_mode != data["mode"]:
|
||||
res.add_error(path, "mode")
|
||||
elif stat.S_ISLNK(s.st_mode) and os.readlink(path) != data.get("dest"):
|
||||
elif stat.S_ISLNK(s.st_mode) and readlink(path) != data.get("dest"):
|
||||
res.add_error(path, "link")
|
||||
elif stat.S_ISREG(s.st_mode):
|
||||
# Check file contents against hash and listed as file
|
||||
|
@@ -64,6 +64,11 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "ppc64le"
|
||||
|
||||
.win64-msvc2019:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "win64"
|
||||
SPACK_TARGET_ARCH: "x86_64"
|
||||
|
||||
########################################
|
||||
# Job templates
|
||||
########################################
|
||||
@@ -72,6 +77,8 @@ default:
|
||||
PIPELINE_MIRROR_TEMPLATE: "single-src-protected-mirrors.yaml.in"
|
||||
# TODO: We can remove this when we drop the "deprecated" stack
|
||||
PUSH_BUILDCACHE_DEPRECATED: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
SPACK_CI_CONFIG_ROOT: "${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/configs"
|
||||
SPACK_CI_SCRIPTS_ROOT: "${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts"
|
||||
|
||||
rules:
|
||||
- if: $SPACK_CI_DISABLE_STACKS =~ /.+/ && $SPACK_CI_STACK_NAME =~ $SPACK_CI_DISABLE_STACKS
|
||||
@@ -114,16 +121,8 @@ default:
|
||||
.generate-common:
|
||||
stage: generate
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack env activate --without-view .
|
||||
- export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs"
|
||||
- spack env activate --without-view share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
@@ -134,29 +133,25 @@ default:
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
audit configs
|
||||
- spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))"
|
||||
< "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
# Command below needs to be `spack python` due to naming differences accross platforms
|
||||
- spack python ${SPACK_CI_SCRIPTS_ROOT}/common/expand_vars.py
|
||||
"${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}"
|
||||
"${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
- spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
- mkdir -p "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
- mkdir "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
config blame > "${CI_PROJECT_DIR}/jobs_scratch_dir/spack.yaml.blame"
|
||||
- spack -v --color=always
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
ci generate --check-index-only
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
@@ -179,6 +174,16 @@ default:
|
||||
# Generate without tags for cases using external runners
|
||||
.generate-base:
|
||||
extends: [ ".base-job", ".generate-common" ]
|
||||
before_script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-x86_64:
|
||||
extends: [ ".generate-base" ]
|
||||
@@ -196,6 +201,25 @@ default:
|
||||
extends: [ ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "neoverse_v2"]
|
||||
|
||||
.generate-win64:
|
||||
extends: [ ".base-job", ".generate-common" ]
|
||||
before_script:
|
||||
- $ErrorActionOld=$ErrorActionPreference
|
||||
- $ErrorActionPreference="SilentlyContinue"
|
||||
- python -c"import psutil;print(psutil.getloadavg())"
|
||||
- (Get-WmiObject Win32_PhysicalMemory | measure-object Capacity -sum).sum/1kb
|
||||
- $ErrorActionPreference=$ErrorActionOld
|
||||
- . .\share\spack\setup-env.ps1
|
||||
after_script:
|
||||
- $ErrorActionOld=$ErrorActionPreference
|
||||
- $ErrorActionPreference="SilentlyContinue"
|
||||
- python -c"import psutil;print(psutil.getloadavg())"
|
||||
- (Get-WmiObject Win32_PhysicalMemory | measure-object Capacity -sum).sum/1kb
|
||||
- $ErrorActionPreference=$ErrorActionOld
|
||||
|
||||
tags: ["spack", "public", "medium", "x86_64-win"]
|
||||
image: "ghcr.io/johnwparent/windows-server21h2:sha-c749cf3"
|
||||
|
||||
.generate-deprecated:
|
||||
extends: [ ".base-job" ]
|
||||
stage: generate
|
||||
@@ -718,7 +742,7 @@ tutorial-build:
|
||||
|
||||
ml-linux-x86_64-cpu-generate:
|
||||
extends: [ ".generate-x86_64", .ml-linux-x86_64-cpu, ".tags-x86_64_v4" ]
|
||||
image: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
image: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
|
||||
ml-linux-x86_64-cpu-build:
|
||||
extends: [ ".build", ".ml-linux-x86_64-cpu" ]
|
||||
@@ -741,7 +765,7 @@ ml-linux-x86_64-cpu-build:
|
||||
|
||||
ml-linux-x86_64-cuda-generate:
|
||||
extends: [ ".generate-x86_64", .ml-linux-x86_64-cuda, ".tags-x86_64_v4" ]
|
||||
image: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
image: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
|
||||
ml-linux-x86_64-cuda-build:
|
||||
extends: [ ".build", ".ml-linux-x86_64-cuda" ]
|
||||
@@ -859,6 +883,15 @@ aws-pcluster-build-neoverse_v1:
|
||||
- echo $PATH
|
||||
- module avail
|
||||
- module list
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-cray-rhel:
|
||||
tags: [ "cray-rhel-zen4", "public" ]
|
||||
@@ -912,3 +945,25 @@ e4s-cray-sles-build:
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-cray-sles-generate
|
||||
|
||||
#######################################
|
||||
# Windows Visualization Tools
|
||||
#######################################
|
||||
.windows-vis:
|
||||
extends: [".win64-msvc2019"]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: windows-vis
|
||||
|
||||
windows-vis-generate:
|
||||
extends: [ ".generate-win64", ".windows-vis" ]
|
||||
|
||||
windows-vis-build:
|
||||
extends: [ ".build", ".windows-vis"]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: windows-vis-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: windows-vis-generate
|
||||
|
@@ -241,7 +241,6 @@ ci:
|
||||
- mvapich2
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openblas
|
||||
- openjpeg
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
@@ -394,7 +393,6 @@ ci:
|
||||
- ncurses
|
||||
- ninja
|
||||
- numactl
|
||||
- openblas
|
||||
- openjdk
|
||||
- openssh
|
||||
- openssl
|
||||
|
18
share/spack/gitlab/cloud_pipelines/configs/win64/ci.yaml
Normal file
18
share/spack/gitlab/cloud_pipelines/configs/win64/ci.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
after_script::
|
||||
- Write-Output "Done"
|
||||
|
||||
before_script::
|
||||
- fsutil 8dot3name set C:\ 0
|
||||
- . .\share\spack\setup-env.ps1
|
||||
- If (Test-Path -path C:\\key\intermediate_ci_signing_key.gpg) { spack.ps1 gpg trust C:\\key\intermediate_ci_signing_key.gpg }
|
||||
- If (Test-Path -path C:\\key\spack_public_key.gpg) { spack.ps1 gpg trust C:\\key\spack_public_key.gpg }
|
||||
|
||||
script::
|
||||
- spack.ps1 env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack.ps1 config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{hash}'"
|
||||
- mkdir ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- spack.ps1 --backtrace ci rebuild | Tee-Object -FilePath "${env:SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt" 2>&1 | Tee-Object -FilePath "${env:SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt"
|
||||
image: "ghcr.io/johnwparent/windows-server21h2:sha-c749cf3"
|
10
share/spack/gitlab/cloud_pipelines/configs/win64/config.yaml
Normal file
10
share/spack/gitlab/cloud_pipelines/configs/win64/config.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
config:
|
||||
build_stage::
|
||||
- 'C:/spack stage'
|
||||
install_tree:
|
||||
root: "C:/spack install"
|
||||
# Path lengths on windows doesn't support much padding
|
||||
padded_length: 0
|
||||
# Reduce the projections to only including the hash to avoid path length issues
|
||||
projections:
|
||||
all: '{hash}'
|
@@ -0,0 +1,25 @@
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.28.0-msvc1
|
||||
prefix: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\Common7\\IDE\\CommonExtensions\\Microsoft\\CMake\\CMake"
|
||||
buildable: False
|
||||
ninja:
|
||||
externals:
|
||||
- spec: ninja@1.11.0
|
||||
prefix: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\Common7\\IDE\\CommonExtensions\\Microsoft\\CMake\\Ninja"
|
||||
buildable: False
|
||||
wgl:
|
||||
externals:
|
||||
- spec: wgl@10.0.22621 plat=x64
|
||||
prefix: "C:\\Program Files (x86)\\Windows Kits\\10"
|
||||
buildable: False
|
||||
win-sdk:
|
||||
externals:
|
||||
- spec: win-sdk@10.0.22621 plat=x64
|
||||
prefix: "C:\\Program Files (x86)\\Windows Kits\\10"
|
||||
buildable: False
|
@@ -0,0 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: [x86_64-win]
|
@@ -0,0 +1,3 @@
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
@@ -0,0 +1,10 @@
|
||||
import argparse
|
||||
import os
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("input", type=argparse.FileType("r"))
|
||||
parser.add_argument("out", type=argparse.FileType("w"))
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
args.out.write(os.path.expandvars(args.input.read()))
|
@@ -10,6 +10,7 @@ set -e
|
||||
# The best solution would be to have the compilers hash (or packages contents) be part of the
|
||||
# individual packages hashes. I don't see this at the moment.
|
||||
# Set to the latest tag including a recent oneapi compiler.
|
||||
# NOTE: If we update this spack version in the future make sure the compiler version also updates.
|
||||
spack_intel_compiler_commit="develop-2023-08-06"
|
||||
|
||||
set_pcluster_defaults() {
|
||||
@@ -23,10 +24,9 @@ set_pcluster_defaults() {
|
||||
|
||||
setup_spack() {
|
||||
spack compiler add --scope site
|
||||
spack external find --scope site
|
||||
# Remove all autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
# Do not add autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
# eventually end up in a version mismatch (e.g. when compiling gmp).
|
||||
spack tags build-tools | xargs -I {} spack config --scope site rm packages:{}
|
||||
spack external find --scope site --tag core-packages
|
||||
}
|
||||
|
||||
patch_compilers_yaml() {
|
||||
@@ -99,7 +99,7 @@ install_compilers() {
|
||||
# The compilers needs to be in the same install tree as the rest of the software such that the path
|
||||
# relocation works correctly. This holds the danger that this part will fail when the current spack gets
|
||||
# incompatible with the one in $spack_intel_compiler_commit. Therefore, we make intel installations optional
|
||||
# in package.yaml files.
|
||||
# in package.yaml files and add a fallback `%gcc` version for each application.
|
||||
if [ "x86_64" == "$(arch)" ]; then
|
||||
(
|
||||
CURRENT_SPACK_ROOT=${SPACK_ROOT}
|
||||
|
@@ -30,8 +30,6 @@ spack:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mesa18:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
@@ -81,7 +79,7 @@ spack:
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
- paraview
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
- raja
|
||||
|
@@ -30,8 +30,6 @@ spack:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mesa18:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
@@ -87,7 +85,7 @@ spack:
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
- paraview
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user