Compare commits
175 Commits
develop-20
...
develop-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72deb53832 | ||
|
|
7c87253fd8 | ||
|
|
1136aedd08 | ||
|
|
24e1b56268 | ||
|
|
eef6a79b35 | ||
|
|
556a36cbd7 | ||
|
|
8aa490d6b7 | ||
|
|
d9d085da10 | ||
|
|
d88d720577 | ||
|
|
1d670ae744 | ||
|
|
35ad6f52c1 | ||
|
|
b61bae7640 | ||
|
|
8b7abace8b | ||
|
|
5cf98d9564 | ||
|
|
973a961cb5 | ||
|
|
868d0cb957 | ||
|
|
497f3a3832 | ||
|
|
9843f41bce | ||
|
|
e54fefc2b7 | ||
|
|
90c0889533 | ||
|
|
6696e82ce7 | ||
|
|
dcc55d53db | ||
|
|
92000e81b8 | ||
|
|
125175ae25 | ||
|
|
f60e548a0d | ||
|
|
04dc16a6b1 | ||
|
|
27b90e38db | ||
|
|
7e5ce3ba48 | ||
|
|
f5f7cfdc8f | ||
|
|
3e1a562312 | ||
|
|
ce4d962faa | ||
|
|
b9816a97fc | ||
|
|
f7b9c30456 | ||
|
|
884620a38a | ||
|
|
7503a41773 | ||
|
|
9a5fc6b4a3 | ||
|
|
a31aeed167 | ||
|
|
71f542a951 | ||
|
|
322bd48788 | ||
|
|
b752fa59d4 | ||
|
|
d53e4cc426 | ||
|
|
ee4b7fa3a1 | ||
|
|
d6f02c86d9 | ||
|
|
62efde8e3c | ||
|
|
bda1d94d49 | ||
|
|
3f472039c5 | ||
|
|
912ef34206 | ||
|
|
9c88a48a73 | ||
|
|
4bf5cc9a9a | ||
|
|
08834e2b03 | ||
|
|
8020a111df | ||
|
|
86fb547f7c | ||
|
|
b9556c7c44 | ||
|
|
7bdb106b1b | ||
|
|
2b191cd7f4 | ||
|
|
774f0a4e60 | ||
|
|
faf11efa72 | ||
|
|
5a99142b41 | ||
|
|
a3aca0242a | ||
|
|
72f276fab3 | ||
|
|
21139945df | ||
|
|
900bd2f477 | ||
|
|
29d4a5af44 | ||
|
|
dd9b7ed6a7 | ||
|
|
09ff74be62 | ||
|
|
a94ebfea11 | ||
|
|
8f5fe1d123 | ||
|
|
d4fb58efa3 | ||
|
|
ce900346cc | ||
|
|
7cb64e465f | ||
|
|
eb70c9f5b9 | ||
|
|
a28405700e | ||
|
|
f8f4d94d7a | ||
|
|
32dfb522d6 | ||
|
|
c61c707aa5 | ||
|
|
60d10848c8 | ||
|
|
dcd6b530f9 | ||
|
|
419f0742a0 | ||
|
|
c99174798b | ||
|
|
8df2a4b511 | ||
|
|
c174cf6830 | ||
|
|
5eebd65366 | ||
|
|
625f5323c0 | ||
|
|
e05a32cead | ||
|
|
c69af5d1e5 | ||
|
|
1ac2ee8043 | ||
|
|
36af1c1c73 | ||
|
|
e2fa087002 | ||
|
|
df02bfbad2 | ||
|
|
fecb63843e | ||
|
|
b33e2d09d3 | ||
|
|
f8054aa21a | ||
|
|
8f3a2acc54 | ||
|
|
d1a20908b8 | ||
|
|
dd781f7368 | ||
|
|
9bcc43c4c1 | ||
|
|
77c83af17d | ||
|
|
574bd2db99 | ||
|
|
a76f37da96 | ||
|
|
9e75f3ec0a | ||
|
|
4d42d45897 | ||
|
|
a4b4bfda73 | ||
|
|
1bcdd3a57e | ||
|
|
297a3a1bc9 | ||
|
|
8d01e8c978 | ||
|
|
6be28aa303 | ||
|
|
5e38310515 | ||
|
|
ddfed65485 | ||
|
|
2a16d8bfa8 | ||
|
|
6a40a50a29 | ||
|
|
b2924f68c0 | ||
|
|
41ffe36636 | ||
|
|
24edc72252 | ||
|
|
83b38a26a0 | ||
|
|
914d785e3b | ||
|
|
f99f642fa8 | ||
|
|
e0bf3667e3 | ||
|
|
a24ca50fed | ||
|
|
51e9f37252 | ||
|
|
453900c884 | ||
|
|
4696459d2d | ||
|
|
ad1e3231e5 | ||
|
|
2ef7eb1826 | ||
|
|
fe86019f9a | ||
|
|
9dbb18219f | ||
|
|
451a977de0 | ||
|
|
e604929a4c | ||
|
|
9d591f9f7c | ||
|
|
f8ad915100 | ||
|
|
cbbabe6920 | ||
|
|
81fe460194 | ||
|
|
b894f996c0 | ||
|
|
1ce09847d9 | ||
|
|
722d401394 | ||
|
|
e6f04d5ef9 | ||
|
|
b8e3ecbf00 | ||
|
|
d189387c24 | ||
|
|
9e96ddc5ae | ||
|
|
543bd189af | ||
|
|
43291aa723 | ||
|
|
d0589285f7 | ||
|
|
d079aaa083 | ||
|
|
6c65977e0d | ||
|
|
1b5d786cf5 | ||
|
|
4cf00645bd | ||
|
|
e9149cfc3c | ||
|
|
a5c8111076 | ||
|
|
c3576f712d | ||
|
|
410e6a59b7 | ||
|
|
bd2b2fb75a | ||
|
|
7ae318efd0 | ||
|
|
73e9d56647 | ||
|
|
f87a752b63 | ||
|
|
ae2fec30c3 | ||
|
|
1af5564cbe | ||
|
|
a8f057a701 | ||
|
|
7f3dd38ccc | ||
|
|
8e9adefcd5 | ||
|
|
d276f9700f | ||
|
|
4f111659ec | ||
|
|
eaf330f2a8 | ||
|
|
cdaeb74dc7 | ||
|
|
fbaac46604 | ||
|
|
7f6210ee90 | ||
|
|
63f6e6079a | ||
|
|
d4fd6caae0 | ||
|
|
fd3c18b6fd | ||
|
|
725f427f25 | ||
|
|
32b3e91ef7 | ||
|
|
b7e4602268 | ||
|
|
4a98d4db93 | ||
|
|
9d6bf373be | ||
|
|
cff35c4987 | ||
|
|
d594f84b8f | ||
|
|
f8f01c336c |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
||||
8
.github/workflows/bootstrap.yml
vendored
8
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -60,7 +60,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -121,7 +121,7 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
||||
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
types-six==1.16.21.20240513
|
||||
vermin==1.6.0
|
||||
|
||||
18
.github/workflows/unit_tests.yaml
vendored
18
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -141,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -198,7 +198,7 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
||||
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
|
||||
|
||||
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
exit $LASTEXITCODE
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
||||
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
||||
@@ -38,10 +38,9 @@ packages:
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
|
||||
@@ -147,6 +147,15 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AutotoolsBuilder(AutotoolsBuilder):
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
@@ -11,7 +11,8 @@ Chaining Spack Installations
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
you can add it as an entry to ``upstreams.yaml``:
|
||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
:ref:`configuration-scopes`:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -22,7 +23,8 @@ you can add it as an entry to ``upstreams.yaml``:
|
||||
install_tree: /path/to/another/spack/opt/spack
|
||||
|
||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||
Spack base directory.
|
||||
Spack base directory, or the location of the ``install_tree`` defined
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
|
||||
Once the upstream Spack instance has been added, ``spack find`` will
|
||||
automatically check the upstream instance when querying installed packages,
|
||||
|
||||
@@ -476,9 +476,3 @@ implemented using Python's built-in `sys.path
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
||||
@@ -6,7 +6,7 @@ python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
pytest==8.2.1
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
|
||||
@@ -843,7 +843,7 @@ def copy_tree(
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
target = readlink(s)
|
||||
if os.path.isabs(target):
|
||||
|
||||
def escaped_path(path):
|
||||
@@ -2531,8 +2531,14 @@ def establish_link(self):
|
||||
|
||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||
# install a symlink to each dependent library
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
# do not rpath for system libraries included in the dag
|
||||
# we should not be modifying libraries managed by the Windows system
|
||||
# as this will negatively impact linker behavior and can result in permission
|
||||
# errors if those system libs are not modifiable by Spack
|
||||
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
||||
@@ -421,6 +421,10 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
github_pull_commits_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
@@ -436,14 +440,24 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
if re.match(github_pull_commits_re, patch.url):
|
||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ "must not be a pull request commit; "
|
||||
+ f"instead use {url}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ f"must end with {full_index_arg}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -451,9 +465,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -43,7 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Set, Tuple
|
||||
from typing import Dict, List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -730,12 +730,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
|
||||
def get_rpath_deps(pkg):
|
||||
"""Return immediate or transitive RPATHs depending on the package."""
|
||||
if pkg.transitive_rpaths:
|
||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
||||
else:
|
||||
return pkg.spec.dependencies(deptype="link")
|
||||
def _get_rpath_deps_from_spec(
|
||||
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||
) -> List[spack.spec.Spec]:
|
||||
if not transitive_rpaths:
|
||||
return spec.dependencies(deptype=dt.LINK)
|
||||
|
||||
by_name: Dict[str, spack.spec.Spec] = {}
|
||||
|
||||
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||
lookup = by_name.get(dep.name)
|
||||
if lookup is None:
|
||||
by_name[dep.name] = dep
|
||||
elif lookup.version < dep.version:
|
||||
by_name[dep.name] = dep
|
||||
|
||||
return list(by_name.values())
|
||||
|
||||
|
||||
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
|
||||
@@ -137,11 +137,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
@@ -846,6 +846,7 @@ def scalapack_libs(self):
|
||||
"^mpich@2:" in spec_root
|
||||
or "^cray-mpich" in spec_root
|
||||
or "^mvapich2" in spec_root
|
||||
or "^mvapich" in spec_root
|
||||
or "^intel-mpi" in spec_root
|
||||
or "^intel-oneapi-mpi" in spec_root
|
||||
or "^intel-parallel-studio" in spec_root
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
@@ -683,6 +684,22 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
for posix separators only if copy_only_pipeline is enabled
|
||||
|
||||
This is required as copy_only_pipelines are a unique scenario where
|
||||
the generate job and child pipelines are run on different platforms.
|
||||
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||
that will be consumed on by the Posix copy job runner.
|
||||
|
||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||
style paths
|
||||
"""
|
||||
if copy_only_pipeline and path:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
@@ -806,7 +823,7 @@ def generate_gitlab_ci_yaml(
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
@@ -1227,6 +1244,9 @@ def main_script_replacements(cmd):
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
output_vars = output_object["variables"]
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
@@ -1283,7 +1303,6 @@ def main_script_replacements(cmd):
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||
@@ -1506,7 +1525,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
@@ -2254,7 +2273,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
@@ -2300,7 +2319,7 @@ def populate_buildgroup(self, job_names):
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -54,6 +53,7 @@
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
@@ -856,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
request = urllib.request.Request(url=image_ref.tags_url())
|
||||
response = spack.oci.opener.urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
|
||||
@@ -106,7 +106,8 @@ def clean(parser, args):
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
||||
@@ -151,7 +151,8 @@ def is_installed(spec):
|
||||
key=lambda s: s.dag_hash(),
|
||||
)
|
||||
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
return [spec for spec in specs if is_installed(spec)]
|
||||
|
||||
|
||||
def dependent_environments(
|
||||
@@ -239,6 +240,8 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
||||
print()
|
||||
tty.info("The following environments still reference these specs:")
|
||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||
if env:
|
||||
msgs.append("use `spack remove` to remove the spec from the current environment")
|
||||
msgs.append("use `spack env remove` to remove environments")
|
||||
msgs.append("use `spack uninstall --force` to override")
|
||||
print()
|
||||
|
||||
@@ -220,10 +220,10 @@ def _compiler_config_from_external(config):
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.target
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
host_platform = spack.platforms.host()
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
|
||||
@@ -96,6 +96,8 @@ def verbose_flag(self):
|
||||
|
||||
openmp_flag = "-fopenmp"
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("3.3"):
|
||||
@@ -120,6 +122,24 @@ def cxx17_flag(self):
|
||||
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("12.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||
elif self.real_version < Version("17.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
@@ -142,7 +162,10 @@ def c17_flag(self):
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
return "-std=c2x"
|
||||
elif self.real_version < Version("18.0"):
|
||||
return "-std=c2x"
|
||||
else:
|
||||
return "-std=c23"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.config
|
||||
import spack.hash_types as ht
|
||||
@@ -181,7 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
base_dir = (
|
||||
self.path_for_spec(deprecator_spec)
|
||||
if deprecator_spec
|
||||
else os.readlink(deprecated_spec.prefix)
|
||||
else readlink(deprecated_spec.prefix)
|
||||
)
|
||||
|
||||
yaml_path = os.path.join(
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -662,7 +662,7 @@ def _current_root(self):
|
||||
if not os.path.islink(self.root):
|
||||
return None
|
||||
|
||||
root = os.readlink(self.root)
|
||||
root = readlink(self.root)
|
||||
if os.path.isabs(root):
|
||||
return root
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
import spack.config
|
||||
import spack.relocate
|
||||
from spack.util.elf import ElfParsingError, parse_elf
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
def is_shared_library_elf(filepath):
|
||||
@@ -149,10 +148,9 @@ def post_install(spec, explicit=None):
|
||||
return
|
||||
|
||||
# Should failing to locate patchelf be a hard error?
|
||||
patchelf_path = spack.relocate._patchelf()
|
||||
if not patchelf_path:
|
||||
patchelf = spack.relocate._patchelf()
|
||||
if not patchelf:
|
||||
return
|
||||
patchelf = Executable(patchelf_path)
|
||||
|
||||
fixes = find_and_patch_sonames(spec.prefix, spec.package.non_bindable_shared_objects, patchelf)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from http.client import HTTPResponse
|
||||
from typing import NamedTuple, Tuple
|
||||
from typing import List, NamedTuple, Tuple
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -27,6 +27,7 @@
|
||||
import spack.stage
|
||||
import spack.traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.url
|
||||
|
||||
from .image import Digest, ImageReference
|
||||
|
||||
@@ -69,6 +70,42 @@ def with_query_param(url: str, param: str, value: str) -> str:
|
||||
)
|
||||
|
||||
|
||||
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
|
||||
"""Retrieves the list of tags associated with an image, handling pagination."""
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
tags = set()
|
||||
fetch_url = ref.tags_url()
|
||||
|
||||
while True:
|
||||
# Fetch tags
|
||||
request = Request(url=fetch_url)
|
||||
response = _urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags.update(json.load(response)["tags"])
|
||||
|
||||
# Check for pagination
|
||||
link_header = response.headers["Link"]
|
||||
|
||||
if link_header is None:
|
||||
break
|
||||
|
||||
tty.debug(f"OCI tag pagination: {link_header}")
|
||||
|
||||
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
|
||||
|
||||
if rel_next_value is None:
|
||||
break
|
||||
|
||||
rel_next = urllib.parse.urlparse(rel_next_value)
|
||||
|
||||
if rel_next.scheme not in ("https", ""):
|
||||
break
|
||||
|
||||
fetch_url = ref.endpoint(rel_next_value)
|
||||
|
||||
return sorted(tags)
|
||||
|
||||
|
||||
def upload_blob(
|
||||
ref: ImageReference,
|
||||
file: str,
|
||||
|
||||
@@ -161,7 +161,11 @@ def windows_establish_runtime_linkage(self):
|
||||
|
||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# If spec is an external, we should not be modifying its bin directory, as we would
|
||||
# be doing in this method
|
||||
# Spack should in general not modify things it has not installed
|
||||
# we can reasonably expect externals to have their link interface properly established
|
||||
if sys.platform == "win32" and not self.spec.external:
|
||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||
self.win_rpath.establish_link()
|
||||
@@ -2446,9 +2450,18 @@ def rpath(self):
|
||||
|
||||
# on Windows, libraries of runtime interest are typically
|
||||
# stored in the bin directory
|
||||
# Do not include Windows system libraries in the rpath interface
|
||||
# these libraries are handled automatically by VS/VCVARS and adding
|
||||
# Spack derived system libs into the link path or address space of a program
|
||||
# can result in conflicting versions, which makes Spack packages less useable
|
||||
if sys.platform == "win32":
|
||||
rpaths = [self.prefix.bin]
|
||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
||||
rpaths.extend(
|
||||
d.prefix.bin
|
||||
for d in deps
|
||||
if os.path.isdir(d.prefix.bin)
|
||||
and "windows-system" not in getattr(d.package, "tags", [])
|
||||
)
|
||||
else:
|
||||
rpaths = [self.prefix.lib, self.prefix.lib64]
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.target
|
||||
import spack.version
|
||||
@@ -133,7 +134,7 @@ def craype_type_and_version(cls):
|
||||
# Take the default version from known symlink path
|
||||
default_path = os.path.join(craype_dir, "default")
|
||||
if os.path.islink(default_path):
|
||||
version = spack.version.Version(os.readlink(default_path))
|
||||
version = spack.version.Version(readlink(default_path))
|
||||
return (craype_type, version)
|
||||
|
||||
# If no default version, sort available versions and return latest
|
||||
|
||||
@@ -566,7 +566,7 @@ def make_link_relative(new_links, orig_links):
|
||||
orig_links (list): original links
|
||||
"""
|
||||
for new_link, orig_link in zip(new_links, orig_links):
|
||||
target = os.readlink(orig_link)
|
||||
target = readlink(orig_link)
|
||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||
os.unlink(new_link)
|
||||
symlink(relative_target, new_link)
|
||||
|
||||
@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str): String containing one or more of 'A', 'B', 'C'
|
||||
type (str): String containing one or more of 'A', 'R', 'C'
|
||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||
|
||||
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
||||
lines = [] if not out else re.split(r"\s+", out)
|
||||
changed = set()
|
||||
for path in lines:
|
||||
pkg_name, _, _ = path.partition(os.sep)
|
||||
pkg_name, _, _ = path.partition("/")
|
||||
if pkg_name not in added and pkg_name not in removed:
|
||||
changed.add(pkg_name)
|
||||
|
||||
|
||||
@@ -58,7 +58,8 @@
|
||||
# Initialize data structures common to each phase's report.
|
||||
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
||||
CDASH_PHASES.add("update")
|
||||
|
||||
# CDash request timeout in seconds
|
||||
SPACK_CDASH_TIMEOUT = 45
|
||||
|
||||
CDashConfiguration = collections.namedtuple(
|
||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||
@@ -447,7 +448,7 @@ def upload(self, filename):
|
||||
# By default, urllib2 only support GET and POST.
|
||||
# CDash expects this file to be uploaded via PUT.
|
||||
request.get_method = lambda: "PUT"
|
||||
response = opener.open(request)
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
if self.current_package_name not in self.buildIds:
|
||||
resp_value = response.read()
|
||||
if isinstance(resp_value, bytes):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
import tempfile
|
||||
from collections import OrderedDict
|
||||
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.error
|
||||
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
|
||||
in our case. This still needs to be called after the copy to destination
|
||||
because it expects the new directory structure to be in place."""
|
||||
for link in links:
|
||||
link_target = os.readlink(os.path.join(orig_prefix, link))
|
||||
link_target = readlink(os.path.join(orig_prefix, link))
|
||||
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
||||
new_link_path = os.path.join(new_prefix, link)
|
||||
os.unlink(new_link_path)
|
||||
|
||||
@@ -314,6 +314,10 @@ def using_libc_compatibility() -> bool:
|
||||
return spack.platforms.host().name == "linux"
|
||||
|
||||
|
||||
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool:
|
||||
return compiler.compiler_verbose_output is not None
|
||||
|
||||
|
||||
def extend_flag_list(flag_list, new_flags):
|
||||
"""Extend a list of flags, preserving order and precedence.
|
||||
|
||||
@@ -1935,6 +1939,11 @@ def _spec_clauses(
|
||||
for virtual in virtuals:
|
||||
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
||||
|
||||
# If the spec is external and concrete, we allow all the libcs on the system
|
||||
if spec.external and spec.concrete and using_libc_compatibility():
|
||||
for libc in self.libcs:
|
||||
clauses.append(fn.attr("compatible_libc", spec.name, libc.name, libc.version))
|
||||
|
||||
# add all clauses from dependencies
|
||||
if transitive:
|
||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||
@@ -2975,6 +2984,13 @@ class CompilerParser:
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||
tty.debug(
|
||||
f"the C compiler {c.cc} does not exist, or does not run correctly."
|
||||
f" The compiler {c.spec} will not be used during concretization."
|
||||
)
|
||||
continue
|
||||
|
||||
if using_libc_compatibility() and not c.default_libc:
|
||||
warnings.warn(
|
||||
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
||||
|
||||
@@ -1345,8 +1345,10 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
%
|
||||
% The priority ranges are:
|
||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
||||
% 1000+ Optimizations for concretization errors
|
||||
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||
% 200 - 299 Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds and minimizing dupes.
|
||||
% 0 - 99 Priorities for non-built nodes.
|
||||
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode).
|
||||
build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", PackageNode).
|
||||
@@ -1394,6 +1396,16 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
|
||||
% is displayed (clingo doesn't display sums over empty sets by default)
|
||||
|
||||
% A condition group specifies one or more specs that must be satisfied.
|
||||
% Specs declared first are preferred, so we assign increasing weights and
|
||||
% minimize the weights.
|
||||
opt_criterion(310, "requirement weight").
|
||||
#minimize{ 0@310: #true }.
|
||||
#minimize {
|
||||
Weight@310,PackageNode,Group
|
||||
: requirement_weight(PackageNode, Group, Weight)
|
||||
}.
|
||||
|
||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||
opt_criterion(110, "number of packages to build (vs. reuse)").
|
||||
#minimize { 0@110: #true }.
|
||||
@@ -1405,18 +1417,6 @@ opt_criterion(100, "number of nodes from the same package").
|
||||
#minimize { ID@100,Package : attr("virtual_node", node(ID, Package)) }.
|
||||
#defined optimize_for_reuse/0.
|
||||
|
||||
% A condition group specifies one or more specs that must be satisfied.
|
||||
% Specs declared first are preferred, so we assign increasing weights and
|
||||
% minimize the weights.
|
||||
opt_criterion(75, "requirement weight").
|
||||
#minimize{ 0@275: #true }.
|
||||
#minimize{ 0@75: #true }.
|
||||
#minimize {
|
||||
Weight@75+Priority,PackageNode,Group
|
||||
: requirement_weight(PackageNode, Group, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
% Minimize the number of deprecated versions being used
|
||||
opt_criterion(73, "deprecated versions used").
|
||||
#minimize{ 0@273: #true }.
|
||||
@@ -1432,11 +1432,11 @@ opt_criterion(73, "deprecated versions used").
|
||||
% 1. Version weight
|
||||
% 2. Number of variants with a non default value, if not set
|
||||
% for the root package.
|
||||
opt_criterion(70, "version weight").
|
||||
opt_criterion(70, "version badness (roots)").
|
||||
#minimize{ 0@270: #true }.
|
||||
#minimize{ 0@70: #true }.
|
||||
#minimize {
|
||||
Weight@70+Priority
|
||||
Weight@70+Priority,PackageNode
|
||||
: attr("root", PackageNode),
|
||||
version_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
@@ -1526,13 +1526,14 @@ opt_criterion(30, "non-preferred OS's").
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
opt_criterion(25, "version badness").
|
||||
opt_criterion(25, "version badness (non roots)").
|
||||
#minimize{ 0@225: #true }.
|
||||
#minimize{ 0@25: #true }.
|
||||
#minimize{
|
||||
Weight@25+Priority,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not attr("root", node(X, Package)),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
|
||||
@@ -10,12 +10,13 @@
|
||||
%=============================================================================
|
||||
|
||||
% A package cannot be reused if the libc is not compatible with it
|
||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
attr("hash", node(R, ReusedPackage), Hash),
|
||||
% Libc packages can be reused without the "compatible_libc" attribute
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage)
|
||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||
attr("version", node(X, LibcPackage), LibcVersion),
|
||||
attr("hash", node(R, ReusedPackage), Hash),
|
||||
% Libc packages can be reused without the "compatible_libc" attribute
|
||||
ReusedPackage != LibcPackage,
|
||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||
|
||||
% Check whether the DAG has any built package
|
||||
has_built_packages() :- build(X), not external(X).
|
||||
|
||||
@@ -19,6 +19,8 @@
|
||||
(["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# The package use a non existing variant in a depends_on directive
|
||||
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub pull request commit patch URL
|
||||
(["invalid-github-pull-commits-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has a GitHub patch URL without full_index=1
|
||||
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
|
||||
# This package has invalid GitLab patch URLs
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.caches
|
||||
@@ -1062,10 +1063,10 @@ def test_tarball_common_prefix(dummy_prefix, tmpdir):
|
||||
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
|
||||
|
||||
# Relative symlink should still be correct
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||
assert readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||
|
||||
# Absolute symlink should remain absolute -- this is for relocation to fix up.
|
||||
assert os.readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||
assert readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||
dummy_prefix, "bin", "app"
|
||||
)
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -716,3 +717,21 @@ def test_build_system_globals_only_set_on_root_during_build(default_mock_concret
|
||||
for depth, spec in root.traverse(depth=True, root=True):
|
||||
for variable in build_variables:
|
||||
assert hasattr(spec.package.module, variable) == should_be_set(depth)
|
||||
|
||||
|
||||
def test_rpath_with_duplicate_link_deps():
|
||||
"""If we have two instances of one package in the same link sub-dag, only the newest version is
|
||||
rpath'ed. This is for runtime support without splicing."""
|
||||
runtime_1 = spack.spec.Spec("runtime@=1.0")
|
||||
runtime_2 = spack.spec.Spec("runtime@=2.0")
|
||||
child = spack.spec.Spec("child@=1.0")
|
||||
root = spack.spec.Spec("root@=1.0")
|
||||
|
||||
root.add_dependency_edge(child, depflag=dt.LINK, virtuals=())
|
||||
root.add_dependency_edge(runtime_2, depflag=dt.LINK, virtuals=())
|
||||
child.add_dependency_edge(runtime_1, depflag=dt.LINK, virtuals=())
|
||||
|
||||
rpath_deps = spack.build_environment._get_rpath_deps_from_spec(root, transitive_rpaths=True)
|
||||
assert child in rpath_deps
|
||||
assert runtime_2 in rpath_deps
|
||||
assert runtime_1 not in rpath_deps
|
||||
|
||||
@@ -51,7 +51,7 @@ def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._content = content_to_read
|
||||
self._read = [False for c in content_to_read]
|
||||
|
||||
def open(self, request):
|
||||
def open(self, request, data=None, timeout=object()):
|
||||
return self
|
||||
|
||||
def getcode(self):
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd.clean
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.stage
|
||||
@@ -68,6 +69,20 @@ def test_function_calls(command_line, effects, mock_calls_for_clean):
|
||||
assert mock_calls_for_clean[name] == (1 if name in effects else 0)
|
||||
|
||||
|
||||
def test_env_aware_clean(mock_stage, install_mockery, mutable_mock_env_path, monkeypatch):
|
||||
e = ev.create("test", with_view=False)
|
||||
e.add("mpileaks")
|
||||
e.concretize()
|
||||
|
||||
def fail(*args, **kwargs):
|
||||
raise Exception("This should not have been called")
|
||||
|
||||
monkeypatch.setattr(spack.spec.Spec, "concretize", fail)
|
||||
|
||||
with e:
|
||||
clean("mpileaks")
|
||||
|
||||
|
||||
def test_remove_python_cache(tmpdir, monkeypatch):
|
||||
cache_files = ["file1.pyo", "file2.pyc"]
|
||||
source_file = "file1.py"
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.link_tree
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.cmd.env
|
||||
import spack.config
|
||||
@@ -4414,8 +4415,8 @@ def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, m
|
||||
# view-file/bin/
|
||||
# x # expect this x to be linked
|
||||
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
||||
assert readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
||||
assert readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
||||
|
||||
|
||||
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
|
||||
@@ -4426,4 +4427,4 @@ def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mo
|
||||
install()
|
||||
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
|
||||
# The dependent's file is linked into the view
|
||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
||||
assert readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
||||
|
||||
@@ -384,9 +384,18 @@ def test_clang_flags():
|
||||
unsupported_flag_test("cxx17_flag", "clang@3.4")
|
||||
supported_flag_test("cxx17_flag", "-std=c++1z", "clang@3.5")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "clang@5.0")
|
||||
unsupported_flag_test("cxx20_flag", "clang@4.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++2a", "clang@5.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++20", "clang@11.0")
|
||||
unsupported_flag_test("cxx23_flag", "clang@11.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++2b", "clang@12.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++23", "clang@17.0")
|
||||
supported_flag_test("c99_flag", "-std=c99", "clang@3.3")
|
||||
unsupported_flag_test("c11_flag", "clang@2.0")
|
||||
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0")
|
||||
unsupported_flag_test("c23_flag", "clang@8.0")
|
||||
supported_flag_test("c23_flag", "-std=c2x", "clang@9.0")
|
||||
supported_flag_test("c23_flag", "-std=c23", "clang@18.0")
|
||||
supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3")
|
||||
|
||||
@@ -1914,11 +1914,11 @@ def test_version_weight_and_provenance(self):
|
||||
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
|
||||
criteria = [
|
||||
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
|
||||
(2, 0, "version badness"),
|
||||
(2, 0, "version badness (non roots)"),
|
||||
]
|
||||
|
||||
for criterion in criteria:
|
||||
assert criterion in result.criteria, result_spec
|
||||
assert criterion in result.criteria, criterion
|
||||
assert result_spec.satisfies("^b@1.0")
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@@ -2546,6 +2546,30 @@ def test_include_specs_from_externals_and_libcs(
|
||||
|
||||
assert result["deprecated-versions"].satisfies("@1.0.0")
|
||||
|
||||
@pytest.mark.regression("44085")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_can_reuse_concrete_externals_for_dependents(self, mutable_config, tmp_path):
|
||||
"""Test that external specs that are in the DB can be reused. This means they are
|
||||
preferred to concretizing another external from packages.yaml
|
||||
"""
|
||||
packages_yaml = {
|
||||
"externaltool": {"externals": [{"spec": "externaltool@2.0", "prefix": "/fake/path"}]}
|
||||
}
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
# Concretize with gcc@9 to get a suboptimal spec, since we have gcc@10 available
|
||||
external_spec = Spec("externaltool@2 %gcc@9").concretized()
|
||||
assert external_spec.external
|
||||
|
||||
root_specs = [Spec("sombrero")]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=[external_spec])
|
||||
|
||||
assert len(result.specs) == 1
|
||||
sombrero = result.specs[0]
|
||||
assert sombrero["externaltool"].dag_hash() == external_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
|
||||
@@ -1176,3 +1176,46 @@ def test_forward_multi_valued_variant_using_requires(
|
||||
|
||||
for constraint in not_expected:
|
||||
assert not s.satisfies(constraint)
|
||||
|
||||
|
||||
def test_strong_preferences_higher_priority_than_reuse(concretize_scope, mock_packages):
|
||||
"""Tests that strong preferences have a higher priority than reusing specs."""
|
||||
reused_spec = Spec("adios2~bzip2").concretized()
|
||||
reuse_nodes = list(reused_spec.traverse())
|
||||
root_specs = [Spec("ascent+adios2")]
|
||||
|
||||
# Check that without further configuration adios2 is reused
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reuse_nodes)
|
||||
ascent = result.specs[0]
|
||||
assert ascent["adios2"].dag_hash() == reused_spec.dag_hash(), ascent
|
||||
|
||||
# If we stick a preference, adios2 is not reused
|
||||
update_packages_config(
|
||||
"""
|
||||
packages:
|
||||
adios2:
|
||||
prefer:
|
||||
- "+bzip2"
|
||||
"""
|
||||
)
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reuse_nodes)
|
||||
ascent = result.specs[0]
|
||||
|
||||
assert ascent["adios2"].dag_hash() != reused_spec.dag_hash()
|
||||
assert ascent["adios2"].satisfies("+bzip2")
|
||||
|
||||
# A preference is still preference, so we can override from input
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(
|
||||
setup, [Spec("ascent+adios2 ^adios2~bzip2")], reuse=reuse_nodes
|
||||
)
|
||||
ascent = result.specs[0]
|
||||
assert ascent["adios2"].dag_hash() == reused_spec.dag_hash(), ascent
|
||||
|
||||
@@ -2053,3 +2053,11 @@ def _true(x):
|
||||
@pytest.fixture()
|
||||
def do_not_check_runtimes_on_reuse(monkeypatch):
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope="session")
|
||||
def _c_compiler_always_exists():
|
||||
fn = spack.solver.asp.c_compiler_runs
|
||||
spack.solver.asp.c_compiler_runs = _true
|
||||
yield
|
||||
spack.solver.asp.c_compiler_runs = fn
|
||||
|
||||
@@ -813,3 +813,33 @@ def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path, mock
|
||||
assert len(e.concrete_roots()) == 3
|
||||
all_root_hashes = set(x.dag_hash() for x in e.concrete_roots())
|
||||
assert len(all_root_hashes) == 2
|
||||
|
||||
|
||||
@pytest.mark.regression("44216")
|
||||
@pytest.mark.only_clingo()
|
||||
def test_root_version_weights_for_old_versions(mutable_mock_env_path, mock_packages):
|
||||
"""Tests that, when we select two old versions of root specs that have the same version
|
||||
optimization penalty, both are considered.
|
||||
"""
|
||||
mutable_mock_env_path.mkdir()
|
||||
spack_yaml = mutable_mock_env_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
"""spack:
|
||||
specs:
|
||||
# allow any version, but the most recent
|
||||
- bowtie@:1.3
|
||||
# allows only the third most recent, so penalty is 2
|
||||
- gcc@1
|
||||
concretizer:
|
||||
unify: true
|
||||
"""
|
||||
)
|
||||
e = ev.Environment(mutable_mock_env_path)
|
||||
with e:
|
||||
e.concretize()
|
||||
|
||||
bowtie = [x for x in e.concrete_roots() if x.name == "bowtie"][0]
|
||||
gcc = [x for x in e.concrete_roots() if x.name == "gcc"][0]
|
||||
|
||||
assert bowtie.satisfies("@=1.3.0")
|
||||
assert gcc.satisfies("@=1.0")
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -181,7 +181,7 @@ def test_symlinks_true(self, stage):
|
||||
|
||||
assert os.path.exists("dest/a/b2")
|
||||
with fs.working_dir("dest/a"):
|
||||
assert os.path.exists(os.readlink("b2"))
|
||||
assert os.path.exists(readlink("b2"))
|
||||
|
||||
assert os.path.realpath("dest/f/2") == os.path.abspath("dest/a/b/2")
|
||||
assert os.path.realpath("dest/2") == os.path.abspath("dest/1")
|
||||
@@ -281,7 +281,7 @@ def test_allow_broken_symlinks(self, stage):
|
||||
symlink("nonexistant.txt", "source/broken", allow_broken_symlinks=True)
|
||||
fs.install_tree("source", "dest", symlinks=True, allow_broken_symlinks=True)
|
||||
assert os.path.islink("dest/broken")
|
||||
assert not os.path.exists(os.readlink("dest/broken"))
|
||||
assert not os.path.exists(readlink("dest/broken"))
|
||||
|
||||
def test_glob_src(self, stage):
|
||||
"""Test using a glob as the source."""
|
||||
|
||||
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -78,7 +80,7 @@ def test_modules_default_symlink(
|
||||
|
||||
link_path = os.path.join(os.path.dirname(mock_module_filename), "default")
|
||||
assert os.path.islink(link_path)
|
||||
assert os.readlink(link_path) == mock_module_filename
|
||||
assert readlink(link_path) == mock_module_filename
|
||||
|
||||
generator.remove()
|
||||
assert not os.path.lexists(link_path)
|
||||
|
||||
@@ -151,7 +151,9 @@ class InMemoryOCIRegistry(DummyServer):
|
||||
A third option is to use the chunked upload, but this is not implemented here, because
|
||||
it's typically a major performance hit in upload speed, so we're not using it in Spack."""
|
||||
|
||||
def __init__(self, domain: str, allow_single_post: bool = True) -> None:
|
||||
def __init__(
|
||||
self, domain: str, allow_single_post: bool = True, tags_per_page: int = 100
|
||||
) -> None:
|
||||
super().__init__(domain)
|
||||
self.router.register("GET", r"/v2/", self.index)
|
||||
self.router.register("HEAD", r"/v2/(?P<name>.+)/blobs/(?P<digest>.+)", self.head_blob)
|
||||
@@ -165,6 +167,9 @@ def __init__(self, domain: str, allow_single_post: bool = True) -> None:
|
||||
# If True, allow single POST upload, not all registries support this
|
||||
self.allow_single_post = allow_single_post
|
||||
|
||||
# How many tags are returned in a single request
|
||||
self.tags_per_page = tags_per_page
|
||||
|
||||
# Used for POST + PUT upload. This is a map from session ID to image name
|
||||
self.sessions: Dict[str, str] = {}
|
||||
|
||||
@@ -280,10 +285,34 @@ def handle_upload(self, req: Request, name: str, digest: Digest):
|
||||
return MockHTTPResponse(201, "Created", headers={"Location": f"/v2/{name}/blobs/{digest}"})
|
||||
|
||||
def list_tags(self, req: Request, name: str):
|
||||
# Paginate using Link headers, this was added to the spec in the following commit:
|
||||
# https://github.com/opencontainers/distribution-spec/commit/2ed79d930ecec11dd755dc8190409a3b10f01ca9
|
||||
|
||||
# List all tags, exclude digests.
|
||||
tags = [_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag]
|
||||
tags.sort()
|
||||
return MockHTTPResponse.with_json(200, "OK", body={"tags": tags})
|
||||
all_tags = sorted(
|
||||
_tag for _name, _tag in self.manifests.keys() if _name == name and ":" not in _tag
|
||||
)
|
||||
|
||||
query = urllib.parse.parse_qs(urllib.parse.urlparse(req.full_url).query)
|
||||
|
||||
n = int(query["n"][0]) if "n" in query else self.tags_per_page
|
||||
|
||||
if "last" in query:
|
||||
try:
|
||||
offset = all_tags.index(query["last"][0]) + 1
|
||||
except ValueError:
|
||||
return MockHTTPResponse(404, "Not found")
|
||||
else:
|
||||
offset = 0
|
||||
|
||||
tags = all_tags[offset : offset + n]
|
||||
|
||||
if offset + n < len(all_tags):
|
||||
headers = {"Link": f'</v2/{name}/tags/list?last={tags[-1]}&n={n}>; rel="next"'}
|
||||
else:
|
||||
headers = None
|
||||
|
||||
return MockHTTPResponse.with_json(200, "OK", headers=headers, body={"tags": tags})
|
||||
|
||||
|
||||
class DummyServerUrllibHandler(urllib.request.BaseHandler):
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import random
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@@ -19,6 +20,7 @@
|
||||
copy_missing_layers,
|
||||
get_manifest_and_config,
|
||||
image_from_mirror,
|
||||
list_tags,
|
||||
upload_blob,
|
||||
upload_manifest,
|
||||
)
|
||||
@@ -670,3 +672,31 @@ def test_retry(url, max_retries, expect_failure, expect_requests):
|
||||
|
||||
assert len(server.requests) == expect_requests
|
||||
assert sleep_time == [2**i for i in range(expect_requests - 1)]
|
||||
|
||||
|
||||
def test_list_tags():
|
||||
# Follows a relatively new rewording of the OCI distribution spec, which is not yet tagged.
|
||||
# https://github.com/opencontainers/distribution-spec/commit/2ed79d930ecec11dd755dc8190409a3b10f01ca9
|
||||
N = 20
|
||||
urlopen = create_opener(InMemoryOCIRegistry("example.com", tags_per_page=5)).open
|
||||
image = ImageReference.from_string("example.com/image")
|
||||
to_tag = lambda i: f"tag-{i:02}"
|
||||
|
||||
# Create N tags in arbitrary order
|
||||
_tags_to_create = [to_tag(i) for i in range(N)]
|
||||
random.shuffle(_tags_to_create)
|
||||
for tag in _tags_to_create:
|
||||
upload_manifest(image.with_tag(tag), default_manifest(), tag=True, _urlopen=urlopen)
|
||||
|
||||
# list_tags should return all tags from all pages in order
|
||||
tags = list_tags(image, urlopen)
|
||||
assert len(tags) == N
|
||||
assert [to_tag(i) for i in range(N)] == tags
|
||||
|
||||
# Test a single request, which should give the first 5 tags
|
||||
assert json.loads(urlopen(image.tags_url()).read())["tags"] == [to_tag(i) for i in range(5)]
|
||||
|
||||
# Test response at an offset, which should exclude the `last` tag.
|
||||
assert json.loads(urlopen(image.tags_url() + f"?last={to_tag(N - 3)}").read())["tags"] == [
|
||||
to_tag(i) for i in range(N - 2, N)
|
||||
]
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
import pytest
|
||||
|
||||
from llnl.util import filesystem as fs
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd.buildcache as buildcache
|
||||
@@ -181,12 +181,12 @@ def test_relocate_links(tmpdir):
|
||||
relocate_links(["to_self", "to_dependency", "to_system"], prefix_to_prefix)
|
||||
|
||||
# These two are relocated
|
||||
assert os.readlink("to_self") == str(tmpdir.join("new_prefix_a", "file"))
|
||||
assert os.readlink("to_dependency") == str(tmpdir.join("new_prefix_b", "file"))
|
||||
assert readlink("to_self") == str(tmpdir.join("new_prefix_a", "file"))
|
||||
assert readlink("to_dependency") == str(tmpdir.join("new_prefix_b", "file"))
|
||||
|
||||
# These two are not.
|
||||
assert os.readlink("to_system") == system_path
|
||||
assert os.readlink("to_self_but_relative") == "relative"
|
||||
assert readlink("to_system") == system_path
|
||||
assert readlink("to_self_but_relative") == "relative"
|
||||
|
||||
|
||||
def test_needs_relocation():
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import getuid, mkdirp, partition_path, touch, working_dir
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
@@ -872,7 +873,7 @@ def _create_files_from_tree(base, tree):
|
||||
|
||||
def _create_tree_from_dir_recursive(path):
|
||||
if os.path.islink(path):
|
||||
return os.readlink(path)
|
||||
return readlink(path)
|
||||
elif os.path.isdir(path):
|
||||
tree = {}
|
||||
for name in os.listdir(path):
|
||||
|
||||
@@ -87,6 +87,13 @@ def test_url_strip_name_suffixes(url, version, expected):
|
||||
59,
|
||||
"https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow",
|
||||
),
|
||||
(
|
||||
"hpcviewer",
|
||||
30,
|
||||
"2024.02",
|
||||
51,
|
||||
"https://gitlab.com/hpctoolkit/hpcviewer/-/releases/2024.02/downloads/hpcviewer.tgz",
|
||||
),
|
||||
# Version in stem
|
||||
("zlib", 24, "1.2.10", 29, "http://zlib.net/fossils/zlib-1.2.10.tar.gz"),
|
||||
(
|
||||
|
||||
@@ -207,3 +207,29 @@ def test_default_download_name_dot_dot():
|
||||
assert url_util.default_download_filename("https://example.com/.") == "_"
|
||||
assert url_util.default_download_filename("https://example.com/..") == "_."
|
||||
assert url_util.default_download_filename("https://example.com/.abcdef") == "_abcdef"
|
||||
|
||||
|
||||
def test_parse_link_rel_next():
|
||||
parse = url_util.parse_link_rel_next
|
||||
assert parse(r'</abc>; rel="next"') == "/abc"
|
||||
assert parse(r'</abc>; x=y; rel="next", </def>; x=y; rel="prev"') == "/abc"
|
||||
assert parse(r'</abc>; rel="prev"; x=y, </def>; x=y; rel="next"') == "/def"
|
||||
|
||||
# example from RFC5988
|
||||
assert (
|
||||
parse(
|
||||
r"""</TheBook/chapter2>; title*=UTF-8'de'letztes%20Kapitel; rel="previous","""
|
||||
r"""</TheBook/chapter4>; title*=UTF-8'de'n%c3%a4chstes%20Kapitel; rel="next" """
|
||||
)
|
||||
== "/TheBook/chapter4"
|
||||
)
|
||||
|
||||
assert (
|
||||
parse(r"""<https://example.com/example>; key=";a=b, </c/d>; e=f"; rel="next" """)
|
||||
== "https://example.com/example"
|
||||
)
|
||||
|
||||
assert parse("https://example.com/example") is None
|
||||
assert parse("<https://example.com/example; broken=broken") is None
|
||||
assert parse("https://example.com/example; rel=prev") is None
|
||||
assert parse("https://example.com/example; a=b; c=d; g=h") is None
|
||||
|
||||
@@ -258,7 +258,9 @@ def parse_version_offset(path):
|
||||
# 9th Pass: Version in path
|
||||
# github.com/repo/name/releases/download/vver/name
|
||||
# e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
|
||||
# e.g. https://gitlab.com/hpctoolkit/hpcviewer/-/releases/2024.02/downloads/hpcviewer.tgz
|
||||
(r"github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/", path),
|
||||
(r"gitlab\.com/[^/]+/.+/-/releases/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/downloads/", path),
|
||||
# e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz
|
||||
(r"(\d[\da-zA-Z._-]*)/[^/]+$", path),
|
||||
]
|
||||
|
||||
@@ -21,16 +21,6 @@ def get_version_lines(version_hashes_dict: dict, url_dict: Optional[dict] = None
|
||||
version_lines = []
|
||||
|
||||
for v, h in version_hashes_dict.items():
|
||||
expand_arg = ""
|
||||
|
||||
# Extract the url for a version if url_dict is provided.
|
||||
url = ""
|
||||
if url_dict is not None and v in url_dict:
|
||||
url = url_dict[v]
|
||||
|
||||
# Add expand_arg since wheels should not be expanded during stanging
|
||||
if url.endswith(".whl") or ".whl#" in url:
|
||||
expand_arg = ", expand=False"
|
||||
version_lines.append(f' version("{v}", sha256="{h}"{expand_arg})')
|
||||
version_lines.append(f' version("{v}", sha256="{h}")')
|
||||
|
||||
return "\n".join(version_lines)
|
||||
|
||||
@@ -22,7 +22,7 @@ def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
if not re.search(r"\bFree Software Foundation\b", stdout):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
@@ -75,7 +75,7 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
elif re.search(r"\bFree Software Foundation\b", stdout):
|
||||
# output is like "ld.so (...) stable release version 2.33."
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
|
||||
@@ -296,8 +296,8 @@ def process_scalar(self):
|
||||
if marked(self.event.value):
|
||||
self.saved = self.event.value
|
||||
|
||||
def write_line_break(self):
|
||||
super().write_line_break()
|
||||
def write_line_break(self, data=None):
|
||||
super().write_line_break(data)
|
||||
if self.saved is None:
|
||||
_ANNOTATIONS.append(colorize("@K{---}"))
|
||||
return
|
||||
|
||||
@@ -10,9 +10,11 @@
|
||||
import itertools
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from typing import Optional
|
||||
|
||||
from llnl.path import convert_to_posix_path
|
||||
|
||||
@@ -254,3 +256,43 @@ def default_download_filename(url: str) -> str:
|
||||
valid_name = "_" + valid_name[1:]
|
||||
|
||||
return valid_name
|
||||
|
||||
|
||||
def parse_link_rel_next(link_value: str) -> Optional[str]:
|
||||
"""Return the next link from a Link header value, if any."""
|
||||
|
||||
# Relaxed version of RFC5988
|
||||
uri = re.compile(r"\s*<([^>]+)>\s*")
|
||||
param_key = r"[^;=\s]+"
|
||||
quoted_string = r"\"([^\"]+)\""
|
||||
unquoted_param_value = r"([^;,\s]+)"
|
||||
param = re.compile(rf";\s*({param_key})\s*=\s*(?:{quoted_string}|{unquoted_param_value})\s*")
|
||||
|
||||
data = link_value
|
||||
|
||||
# Parse a list of <url>; key=value; key=value, <url>; key=value; key=value, ... links.
|
||||
while True:
|
||||
uri_match = re.match(uri, data)
|
||||
if not uri_match:
|
||||
break
|
||||
uri_reference = uri_match.group(1)
|
||||
data = data[uri_match.end() :]
|
||||
|
||||
# Parse parameter list
|
||||
while True:
|
||||
param_match = re.match(param, data)
|
||||
if not param_match:
|
||||
break
|
||||
key, quoted_value, unquoted_value = param_match.groups()
|
||||
value = quoted_value or unquoted_value
|
||||
data = data[param_match.end() :]
|
||||
|
||||
if key == "rel" and value == "next":
|
||||
return uri_reference
|
||||
|
||||
if not data.startswith(","):
|
||||
break
|
||||
|
||||
data = data[1:]
|
||||
|
||||
return None
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.filesystem_view
|
||||
import spack.store
|
||||
@@ -38,7 +39,7 @@ def create_manifest_entry(path: str) -> Dict[str, Any]:
|
||||
data: Dict[str, Any] = {"mode": s.st_mode, "owner": s.st_uid, "group": s.st_gid}
|
||||
|
||||
if stat.S_ISLNK(s.st_mode):
|
||||
data["dest"] = os.readlink(path)
|
||||
data["dest"] = readlink(path)
|
||||
|
||||
elif stat.S_ISREG(s.st_mode):
|
||||
data["hash"] = compute_hash(path)
|
||||
@@ -90,7 +91,7 @@ def check_entry(path, data):
|
||||
# instead of `lstat(...).st_mode`. So, ignore mode errors for symlinks.
|
||||
if not stat.S_ISLNK(s.st_mode) and s.st_mode != data["mode"]:
|
||||
res.add_error(path, "mode")
|
||||
elif stat.S_ISLNK(s.st_mode) and os.readlink(path) != data.get("dest"):
|
||||
elif stat.S_ISLNK(s.st_mode) and readlink(path) != data.get("dest"):
|
||||
res.add_error(path, "link")
|
||||
elif stat.S_ISREG(s.st_mode):
|
||||
# Check file contents against hash and listed as file
|
||||
|
||||
@@ -64,6 +64,11 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "ppc64le"
|
||||
|
||||
.win64-msvc2019:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "win64"
|
||||
SPACK_TARGET_ARCH: "x86_64"
|
||||
|
||||
########################################
|
||||
# Job templates
|
||||
########################################
|
||||
@@ -72,6 +77,8 @@ default:
|
||||
PIPELINE_MIRROR_TEMPLATE: "single-src-protected-mirrors.yaml.in"
|
||||
# TODO: We can remove this when we drop the "deprecated" stack
|
||||
PUSH_BUILDCACHE_DEPRECATED: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
SPACK_CI_CONFIG_ROOT: "${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/configs"
|
||||
SPACK_CI_SCRIPTS_ROOT: "${CI_PROJECT_DIR}/share/spack/gitlab/cloud_pipelines/scripts"
|
||||
|
||||
rules:
|
||||
- if: $SPACK_CI_DISABLE_STACKS =~ /.+/ && $SPACK_CI_STACK_NAME =~ $SPACK_CI_DISABLE_STACKS
|
||||
@@ -114,16 +121,8 @@ default:
|
||||
.generate-common:
|
||||
stage: generate
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack env activate --without-view .
|
||||
- export SPACK_CI_CONFIG_ROOT="${SPACK_ROOT}/share/spack/gitlab/cloud_pipelines/configs"
|
||||
- spack env activate --without-view share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
@@ -134,29 +133,25 @@ default:
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
audit configs
|
||||
- spack python -c "import os,sys; print(os.path.expandvars(sys.stdin.read()))"
|
||||
< "${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}" > "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
# Command below needs to be `spack python` due to naming differences accross platforms
|
||||
- spack python ${SPACK_CI_SCRIPTS_ROOT}/common/expand_vars.py
|
||||
"${SPACK_CI_CONFIG_ROOT}/${PIPELINE_MIRROR_TEMPLATE}"
|
||||
"${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
- spack config add -f "${SPACK_CI_CONFIG_ROOT}/mirrors.yaml"
|
||||
- mkdir -p "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
- mkdir "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
- spack
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
config blame > "${CI_PROJECT_DIR}/jobs_scratch_dir/spack.yaml.blame"
|
||||
- spack -v --color=always
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}"
|
||||
--config-scope "${SPACK_CI_CONFIG_ROOT}/${SPACK_TARGET_PLATFORM}/${SPACK_TARGET_ARCH}"
|
||||
${CI_STACK_CONFIG_SCOPES}
|
||||
ci generate --check-index-only
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
@@ -179,6 +174,16 @@ default:
|
||||
# Generate without tags for cases using external runners
|
||||
.generate-base:
|
||||
extends: [ ".base-job", ".generate-common" ]
|
||||
before_script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-x86_64:
|
||||
extends: [ ".generate-base" ]
|
||||
@@ -196,6 +201,25 @@ default:
|
||||
extends: [ ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "neoverse_v2"]
|
||||
|
||||
.generate-win64:
|
||||
extends: [ ".base-job", ".generate-common" ]
|
||||
before_script:
|
||||
- $ErrorActionOld=$ErrorActionPreference
|
||||
- $ErrorActionPreference="SilentlyContinue"
|
||||
- python -c"import psutil;print(psutil.getloadavg())"
|
||||
- (Get-WmiObject Win32_PhysicalMemory | measure-object Capacity -sum).sum/1kb
|
||||
- $ErrorActionPreference=$ErrorActionOld
|
||||
- . .\share\spack\setup-env.ps1
|
||||
after_script:
|
||||
- $ErrorActionOld=$ErrorActionPreference
|
||||
- $ErrorActionPreference="SilentlyContinue"
|
||||
- python -c"import psutil;print(psutil.getloadavg())"
|
||||
- (Get-WmiObject Win32_PhysicalMemory | measure-object Capacity -sum).sum/1kb
|
||||
- $ErrorActionPreference=$ErrorActionOld
|
||||
|
||||
tags: ["spack", "public", "medium", "x86_64-win"]
|
||||
image: "ghcr.io/johnwparent/windows-server21h2:sha-c749cf3"
|
||||
|
||||
.generate-deprecated:
|
||||
extends: [ ".base-job" ]
|
||||
stage: generate
|
||||
@@ -718,7 +742,7 @@ tutorial-build:
|
||||
|
||||
ml-linux-x86_64-cpu-generate:
|
||||
extends: [ ".generate-x86_64", .ml-linux-x86_64-cpu, ".tags-x86_64_v4" ]
|
||||
image: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
image: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
|
||||
ml-linux-x86_64-cpu-build:
|
||||
extends: [ ".build", ".ml-linux-x86_64-cpu" ]
|
||||
@@ -741,7 +765,7 @@ ml-linux-x86_64-cpu-build:
|
||||
|
||||
ml-linux-x86_64-cuda-generate:
|
||||
extends: [ ".generate-x86_64", .ml-linux-x86_64-cuda, ".tags-x86_64_v4" ]
|
||||
image: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
image: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
|
||||
ml-linux-x86_64-cuda-build:
|
||||
extends: [ ".build", ".ml-linux-x86_64-cuda" ]
|
||||
@@ -859,6 +883,15 @@ aws-pcluster-build-neoverse_v1:
|
||||
- echo $PATH
|
||||
- module avail
|
||||
- module list
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
|
||||
.generate-cray-rhel:
|
||||
tags: [ "cray-rhel-zen4", "public" ]
|
||||
@@ -912,3 +945,25 @@ e4s-cray-sles-build:
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-cray-sles-generate
|
||||
|
||||
#######################################
|
||||
# Windows Visualization Tools
|
||||
#######################################
|
||||
.windows-vis:
|
||||
extends: [".win64-msvc2019"]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: windows-vis
|
||||
|
||||
windows-vis-generate:
|
||||
extends: [ ".generate-win64", ".windows-vis" ]
|
||||
|
||||
windows-vis-build:
|
||||
extends: [ ".build", ".windows-vis"]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: windows-vis-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: windows-vis-generate
|
||||
|
||||
18
share/spack/gitlab/cloud_pipelines/configs/win64/ci.yaml
Normal file
18
share/spack/gitlab/cloud_pipelines/configs/win64/ci.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
after_script::
|
||||
- Write-Output "Done"
|
||||
|
||||
before_script::
|
||||
- fsutil 8dot3name set C:\ 0
|
||||
- . .\share\spack\setup-env.ps1
|
||||
- If (Test-Path -path C:\\key\intermediate_ci_signing_key.gpg) { spack.ps1 gpg trust C:\\key\intermediate_ci_signing_key.gpg }
|
||||
- If (Test-Path -path C:\\key\spack_public_key.gpg) { spack.ps1 gpg trust C:\\key\spack_public_key.gpg }
|
||||
|
||||
script::
|
||||
- spack.ps1 env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack.ps1 config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{hash}'"
|
||||
- mkdir ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- spack.ps1 --backtrace ci rebuild | Tee-Object -FilePath "${env:SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt" 2>&1 | Tee-Object -FilePath "${env:SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt"
|
||||
image: "ghcr.io/johnwparent/windows-server21h2:sha-c749cf3"
|
||||
10
share/spack/gitlab/cloud_pipelines/configs/win64/config.yaml
Normal file
10
share/spack/gitlab/cloud_pipelines/configs/win64/config.yaml
Normal file
@@ -0,0 +1,10 @@
|
||||
config:
|
||||
build_stage::
|
||||
- 'C:/spack stage'
|
||||
install_tree:
|
||||
root: "C:/spack install"
|
||||
# Path lengths on windows doesn't support much padding
|
||||
padded_length: 0
|
||||
# Reduce the projections to only including the hash to avoid path length issues
|
||||
projections:
|
||||
all: '{hash}'
|
||||
@@ -0,0 +1,25 @@
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.28.0-msvc1
|
||||
prefix: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\Common7\\IDE\\CommonExtensions\\Microsoft\\CMake\\CMake"
|
||||
buildable: False
|
||||
ninja:
|
||||
externals:
|
||||
- spec: ninja@1.11.0
|
||||
prefix: "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community\\Common7\\IDE\\CommonExtensions\\Microsoft\\CMake\\Ninja"
|
||||
buildable: False
|
||||
wgl:
|
||||
externals:
|
||||
- spec: wgl@10.0.22621 plat=x64
|
||||
prefix: "C:\\Program Files (x86)\\Windows Kits\\10"
|
||||
buildable: False
|
||||
win-sdk:
|
||||
externals:
|
||||
- spec: win-sdk@10.0.22621 plat=x64
|
||||
prefix: "C:\\Program Files (x86)\\Windows Kits\\10"
|
||||
buildable: False
|
||||
@@ -0,0 +1,4 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: [x86_64-win]
|
||||
@@ -0,0 +1,3 @@
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
@@ -0,0 +1,10 @@
|
||||
import argparse
|
||||
import os
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("input", type=argparse.FileType("r"))
|
||||
parser.add_argument("out", type=argparse.FileType("w"))
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
args.out.write(os.path.expandvars(args.input.read()))
|
||||
@@ -10,6 +10,7 @@ set -e
|
||||
# The best solution would be to have the compilers hash (or packages contents) be part of the
|
||||
# individual packages hashes. I don't see this at the moment.
|
||||
# Set to the latest tag including a recent oneapi compiler.
|
||||
# NOTE: If we update this spack version in the future make sure the compiler version also updates.
|
||||
spack_intel_compiler_commit="develop-2023-08-06"
|
||||
|
||||
set_pcluster_defaults() {
|
||||
@@ -23,10 +24,9 @@ set_pcluster_defaults() {
|
||||
|
||||
setup_spack() {
|
||||
spack compiler add --scope site
|
||||
spack external find --scope site
|
||||
# Remove all autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
# Do not add autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
# eventually end up in a version mismatch (e.g. when compiling gmp).
|
||||
spack tags build-tools | xargs -I {} spack config --scope site rm packages:{}
|
||||
spack external find --scope site --tag core-packages
|
||||
}
|
||||
|
||||
patch_compilers_yaml() {
|
||||
@@ -99,7 +99,7 @@ install_compilers() {
|
||||
# The compilers needs to be in the same install tree as the rest of the software such that the path
|
||||
# relocation works correctly. This holds the danger that this part will fail when the current spack gets
|
||||
# incompatible with the one in $spack_intel_compiler_commit. Therefore, we make intel installations optional
|
||||
# in package.yaml files.
|
||||
# in package.yaml files and add a fallback `%gcc` version for each application.
|
||||
if [ "x86_64" == "$(arch)" ]; then
|
||||
(
|
||||
CURRENT_SPACK_ROOT=${SPACK_ROOT}
|
||||
|
||||
@@ -30,8 +30,6 @@ spack:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mesa18:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
@@ -81,7 +79,7 @@ spack:
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
- paraview
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
- raja
|
||||
|
||||
@@ -30,8 +30,6 @@ spack:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mesa18:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
@@ -87,7 +85,7 @@ spack:
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
- paraview
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
|
||||
|
||||
@@ -19,7 +19,10 @@ packages:
|
||||
llvm:
|
||||
variants: ~lldb
|
||||
mpas-model:
|
||||
require: "precision=single make_target=llvm %arm ^parallelio+pnetcdf"
|
||||
require:
|
||||
- one_of:
|
||||
- "precision=single make_target=llvm %arm ^parallelio+pnetcdf"
|
||||
- "precision=single %gcc ^parallelio+pnetcdf"
|
||||
mpich:
|
||||
require: "mpich pmi=pmi2 device=ch4 netmod=ofi +slurm"
|
||||
nvhpc:
|
||||
|
||||
@@ -2,13 +2,23 @@ spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- optimized_configs:
|
||||
- gromacs target=neoverse_v1
|
||||
- gromacs target=neoverse_n1
|
||||
- apps:
|
||||
- gromacs
|
||||
- mpas-model
|
||||
- mpich
|
||||
- openfoam
|
||||
# - quantum-espresso : %gcc@12.3.0 on neoverse_v1 fails.
|
||||
# Root cause: internal compiler error: in compute_live_loop_exits, at tree-ssa-loop-manip.cc:247
|
||||
- wrf
|
||||
|
||||
- targets:
|
||||
- 'target=neoverse_v1'
|
||||
- 'target=neoverse_n1'
|
||||
|
||||
specs:
|
||||
- $optimized_configs
|
||||
|
||||
- matrix:
|
||||
- [$apps]
|
||||
- [$targets]
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
|
||||
@@ -5,13 +5,20 @@ packages:
|
||||
- one_of:
|
||||
- "cflags=-std=c18 target=x86_64_v4"
|
||||
- "cflags=-std=c18 target=x86_64_v3"
|
||||
- "%gcc"
|
||||
when: "%intel"
|
||||
gettext:
|
||||
# Newer gettext cannot build with gcc@12 and old AL2 glibc headers
|
||||
# Older gettext versions do not build correctly with oneapi.
|
||||
require:
|
||||
- one_of:
|
||||
- '@:0.20'
|
||||
- '%oneapi'
|
||||
gromacs:
|
||||
require:
|
||||
- one_of:
|
||||
- "+intel_provided_gcc %intel ^intel-oneapi-mkl target=x86_64_v4"
|
||||
- "+intel_provided_gcc %intel ^intel-oneapi-mkl target=x86_64_v3"
|
||||
- "%gcc"
|
||||
- "+intel_provided_gcc ^intel-oneapi-mkl target=x86_64_v4"
|
||||
- "+intel_provided_gcc ^intel-oneapi-mkl target=x86_64_v3"
|
||||
when: "%intel"
|
||||
intel-mpi:
|
||||
variants: +external-libfabric
|
||||
intel-oneapi-compilers:
|
||||
@@ -21,15 +28,15 @@ packages:
|
||||
lammps:
|
||||
require:
|
||||
- one_of:
|
||||
- "lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package +intel %intel ^intel-oneapi-mkl target=x86_64_v4"
|
||||
- "lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package %intel ^intel-oneapi-mkl target=x86_64_v3"
|
||||
- "%gcc"
|
||||
- "lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package +intel ^intel-oneapi-mkl target=x86_64_v4"
|
||||
- "lammps_sizes=bigbig +molecule +kspace +rigid +asphere +opt +openmp +openmp-package ^intel-oneapi-mkl target=x86_64_v3"
|
||||
when: "%intel"
|
||||
libidn2:
|
||||
require:
|
||||
- one_of:
|
||||
- "cflags=-std=c18 target=x86_64_v4"
|
||||
- "cflags=-std=c18 target=x86_64_v3"
|
||||
- '%gcc'
|
||||
when: "%intel"
|
||||
libfabric:
|
||||
buildable: true
|
||||
externals:
|
||||
@@ -41,13 +48,13 @@ packages:
|
||||
- one_of:
|
||||
- "cflags=-std=c18 target=x86_64_v4"
|
||||
- "cflags=-std=c18 target=x86_64_v3"
|
||||
- "%gcc"
|
||||
when: "%intel"
|
||||
mpas-model:
|
||||
require:
|
||||
- one_of:
|
||||
- "precision=single %intel ^parallelio+pnetcdf target=x86_64_v4"
|
||||
- "precision=single %intel ^parallelio+pnetcdf target=x86_64_v3"
|
||||
- "%gcc"
|
||||
- "precision=single ^parallelio+pnetcdf target=x86_64_v4"
|
||||
- "precision=single ^parallelio+pnetcdf target=x86_64_v3"
|
||||
when: "%intel"
|
||||
mpich:
|
||||
require:
|
||||
- one_of:
|
||||
@@ -67,9 +74,12 @@ packages:
|
||||
palace:
|
||||
require:
|
||||
- one_of:
|
||||
- "palace %oneapi ^fmt@9.1.0 target=x86_64_v4"
|
||||
- "palace %oneapi ^fmt@9.1.0 target=x86_64_v3"
|
||||
- "%gcc ^fmt@9.1.0"
|
||||
- "palace ^fmt@9.1.0 target=x86_64_v4"
|
||||
- "palace ^fmt@9.1.0 target=x86_64_v3"
|
||||
when: "%oneapi"
|
||||
- one_of:
|
||||
- "palace ^fmt@9.1.0"
|
||||
when: "%gcc"
|
||||
pmix:
|
||||
require:
|
||||
- one_of:
|
||||
@@ -78,9 +88,9 @@ packages:
|
||||
quantum-espresso:
|
||||
require:
|
||||
- one_of:
|
||||
- "quantum-espresso@6.6 %intel ^intel-oneapi-mkl+cluster target=x86_64_v4"
|
||||
- "quantum-espresso@6.6 %intel ^intel-oneapi-mkl+cluster target=x86_64_v3"
|
||||
- "%gcc"
|
||||
- "quantum-espresso@6.6 ^intel-oneapi-mkl+cluster target=x86_64_v4"
|
||||
- "quantum-espresso@6.6 ^intel-oneapi-mkl+cluster target=x86_64_v3"
|
||||
when: "%intel"
|
||||
slurm:
|
||||
buildable: false
|
||||
externals:
|
||||
@@ -89,12 +99,13 @@ packages:
|
||||
wrf:
|
||||
require:
|
||||
- one_of:
|
||||
- "wrf@4 build_type=dm+sm %intel target=x86_64_v4"
|
||||
- "wrf@4 build_type=dm+sm %intel target=x86_64_v3"
|
||||
- "wrf@4.2.2 +netcdf_classic fflags=\"-fp-model fast=2 -no-heap-arrays -no-prec-div -no-prec-sqrt -fno-common\" build_type=dm+sm %intel target=x86_64_v3"
|
||||
- "%gcc"
|
||||
- "wrf@4 build_type=dm+sm target=x86_64_v4"
|
||||
- "wrf@4 build_type=dm+sm target=x86_64_v3"
|
||||
- "wrf@4.2.2 +netcdf_classic fflags=\"-fp-model fast=2 -no-heap-arrays -no-prec-div -no-prec-sqrt -fno-common\" build_type=dm+sm target=x86_64_v3"
|
||||
when: "%intel"
|
||||
|
||||
all:
|
||||
compiler: [intel, gcc]
|
||||
compiler: [intel, oneapi, gcc]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -2,14 +2,24 @@ spack:
|
||||
view: false
|
||||
|
||||
definitions:
|
||||
- apps:
|
||||
- gromacs %intel
|
||||
- lammps %intel
|
||||
- mpas-model %intel
|
||||
- openfoam %gcc
|
||||
- palace %oneapi
|
||||
- quantum-espresso %intel
|
||||
# - wrf : While building hdf5 cmake errors out with Detecting Fortran/C Interface: Failed to compile
|
||||
# Root cause: ifort cannot deal with arbitrarily long file names.
|
||||
|
||||
- optimized_configs:
|
||||
- palace target=x86_64_v4
|
||||
- palace target=x86_64_v3
|
||||
- targets:
|
||||
- 'target=x86_64_v4'
|
||||
- 'target=x86_64_v3'
|
||||
|
||||
specs:
|
||||
- $optimized_configs
|
||||
|
||||
- matrix:
|
||||
- [$apps]
|
||||
- [$targets]
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
@@ -28,5 +38,6 @@ spack:
|
||||
# Do not distribute Intel & ARM binaries
|
||||
- - for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep intel-oneapi | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
- for i in $(aws s3 ls --recursive ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/ | grep armpl | awk '{print $4}' | sed -e 's?^.*build_cache/??g'); do aws s3 rm ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/$i; done
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
||||
|
||||
@@ -12,8 +12,6 @@ spack:
|
||||
- one_of: ['@1.14', '@1.12']
|
||||
mesa:
|
||||
require: "+glx +osmesa +opengl ~opengles +llvm"
|
||||
libosmesa:
|
||||
require: "mesa +osmesa"
|
||||
libglx:
|
||||
require: "mesa +glx"
|
||||
ospray:
|
||||
@@ -27,16 +25,16 @@ spack:
|
||||
- paraview_specs:
|
||||
- matrix:
|
||||
- - paraview +raytracing
|
||||
- - +qt~osmesa # GUI Support w/ GLX Rendering
|
||||
- ~qt~osmesa # GLX Rendering
|
||||
- ~qt+osmesa # OSMesa Rendering
|
||||
- - +qt ^[virtuals=gl] glx # GUI Support w/ GLX Rendering
|
||||
- ~qt ^[virtuals=gl] glx # GLX Rendering
|
||||
- ^[virtuals=gl] osmesa # OSMesa Rendering
|
||||
- visit_specs:
|
||||
- matrix:
|
||||
- - visit
|
||||
- - ~gui~osmesa # GLX Rendering
|
||||
- ~gui+osmesa # OSMesa Rendering
|
||||
- - visit~gui
|
||||
- - ^[virtuals=gl] glx # GLX Rendering
|
||||
- ^[virtuals=gl] osmesa # OSMesa Rendering
|
||||
# VisIt GUI does not work with Qt 5.14.2
|
||||
# - +gui~osmesa # GUI Support w/ GLX Rendering
|
||||
# - +gui ^[virtuals=gl] glx # GUI Support w/ GLX Rendering
|
||||
- sdk_base_spec:
|
||||
- matrix:
|
||||
- - ecp-data-vis-sdk +ascent +adios2 +cinema +darshan +faodel +hdf5 +pnetcdf
|
||||
|
||||
@@ -56,6 +56,16 @@ spack:
|
||||
- openssl certs=system # must be this, system external does not work
|
||||
- libtree
|
||||
- patchelf
|
||||
- sed
|
||||
- which
|
||||
- elfutils
|
||||
- fontconfig
|
||||
- font-util
|
||||
- gdb
|
||||
- flex
|
||||
- graphviz
|
||||
- doxygen
|
||||
- meson
|
||||
|
||||
- arch:
|
||||
- '%gcc target=x86_64_v3'
|
||||
|
||||
@@ -50,7 +50,7 @@ spack:
|
||||
variants: +termlib
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
python:
|
||||
version: [3.8.13]
|
||||
trilinos:
|
||||
|
||||
@@ -43,7 +43,7 @@ spack:
|
||||
variants: +termlib
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
python:
|
||||
version: [3.8.13]
|
||||
trilinos:
|
||||
|
||||
@@ -168,7 +168,6 @@ spack:
|
||||
- hdf5
|
||||
- libcatalyst
|
||||
- parallel-netcdf
|
||||
- paraview
|
||||
- py-cinemasci
|
||||
- sz
|
||||
- unifyfs
|
||||
|
||||
@@ -21,7 +21,7 @@ spack:
|
||||
variants: threads=openmp
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
|
||||
# ROCm 5.4.3
|
||||
comgr:
|
||||
|
||||
@@ -52,7 +52,7 @@ spack:
|
||||
version: [11.8.0]
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
|
||||
@@ -31,7 +31,7 @@ spack:
|
||||
variants: threads=openmp
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
require: "@5.11 ~qt ^[virtuals=gl] osmesa"
|
||||
trilinos:
|
||||
require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext
|
||||
+ifpack +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
|
||||
@@ -79,7 +79,7 @@ spack:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image:
|
||||
name: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
name: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
entrypoint: ['']
|
||||
|
||||
cdash:
|
||||
|
||||
@@ -83,7 +83,7 @@ spack:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image:
|
||||
name: ghcr.io/spack/linux-ubuntu22.04-x86_64_v2:v2024-01-29
|
||||
name: ghcr.io/spack/ubuntu-22.04:v2024-05-07
|
||||
entrypoint: ['']
|
||||
|
||||
cdash:
|
||||
|
||||
@@ -22,7 +22,6 @@ spack:
|
||||
- conduit # ^hdf5+shared
|
||||
- flux-core
|
||||
#- flux-sched
|
||||
#- glvis # ^mesa-glu@9.0.0 ^mesa18~llvm # same issue w/chai
|
||||
- hypre
|
||||
- lbann
|
||||
- lvarray ~tests # per Spack issue #23192 # ~examples
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
# Windows Visualization Stack
|
||||
# maintainers:
|
||||
# - John Parent (@johnwparent)
|
||||
# - Ryan Krattiger (@kwryankrattiger)
|
||||
|
||||
spack:
|
||||
view: false
|
||||
specs:
|
||||
- vtk
|
||||
|
||||
cdash:
|
||||
build-group: Windows Visualization (Kitware)
|
||||
@@ -0,0 +1,20 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class InvalidGithubPullCommitsPatchUrl(Package):
|
||||
"""Package that has a GitHub pull request commit patch URL that fails auditing."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/patch-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
|
||||
patch(
|
||||
"https://github.com/spack/spack/pull/1/commits/b4da28f71e2cef84c6e289afe89aa4bdf7936048.patch?full_index=1",
|
||||
sha256="eae9035b832792549fac00680db5f180a88ff79feb7d7a535b4fd71f9d885e73",
|
||||
)
|
||||
16
var/spack/repos/builtin.mock/packages/sombrero/package.py
Normal file
16
var/spack/repos/builtin.mock/packages/sombrero/package.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Sombrero(Package):
|
||||
"""Simple package with a dependency on an external spec."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/b-1.0.tar.gz"
|
||||
|
||||
version("1.0", md5="0123456789abcdef0123456789abcdef")
|
||||
depends_on("externaltool")
|
||||
@@ -36,6 +36,40 @@
|
||||
}
|
||||
|
||||
_versions = {
|
||||
"24.04": {
|
||||
"RHEL-7": (
|
||||
"064c3ecfd71cba3d8bf639448e899388f58eb7faef4b38f3c1aace625ace8b1e",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_RHEL-7_aarch64.tar",
|
||||
),
|
||||
"RHEL-8": (
|
||||
"38f46a3549667d0fbccd947653d3a1a56b630d3bbb1251888c674c463f00dac3",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_RHEL-8_aarch64.tar",
|
||||
),
|
||||
"RHEL-9": (
|
||||
"d335db82c8310e1d79c96dc09a19e4d509c5ab17eb6027214bb79cfc75d8229e",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_RHEL-9_aarch64.tar",
|
||||
),
|
||||
"SLES-15": (
|
||||
"6f2e090efcd8da2cbeaf63272fac5917f637713f1e86d73cde2ad7268e3a05a2",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_SLES-15_aarch64.tar",
|
||||
),
|
||||
"Ubuntu-20.04": (
|
||||
"0d782e6a69a11f90bf3b392313c885a2376c5761f227bf2f68e34e9848ec8e97",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_Ubuntu-20.04_aarch64.tar",
|
||||
),
|
||||
"Ubuntu-22.04": (
|
||||
"0bab2e89f0a2359746f89a01251dca763305c5b0dee95cf47b0968dd1cb5f6f6",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_Ubuntu-22.04_aarch64.tar",
|
||||
),
|
||||
"AmazonLinux-2": (
|
||||
"cf0bebe2d7123749c919a5f4e36100ad21f08ffbad3b53e477205c08ae973a2d",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_AmazonLinux-2_aarch64.tar",
|
||||
),
|
||||
"AmazonLinux-2023": (
|
||||
"035dae8c41a1ac86c8885837978cb712306aa75dc5d26d17aca843b84eaee9f4",
|
||||
"https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/24-04/arm-compiler-for-linux_24.04_AmazonLinux-2023_aarch64.tar",
|
||||
),
|
||||
},
|
||||
"23.10": {
|
||||
"RHEL-7": (
|
||||
"c3bd4df3e5f6c97369237b0067e0a421dceb9c167d73f22f3da87f5025258314",
|
||||
@@ -192,34 +226,22 @@ def get_armpl_version_to_3(spec):
|
||||
|
||||
|
||||
def get_armpl_prefix(spec):
|
||||
ver = get_armpl_version_to_3(spec)
|
||||
os = get_os(spec.version.string)
|
||||
if spec.version.string.startswith("22."):
|
||||
return join_path(
|
||||
spec.prefix,
|
||||
"armpl-{}_AArch64_{}_arm-linux-compiler_aarch64-linux".format(
|
||||
get_armpl_version_to_3(spec), get_os(spec.version.string)
|
||||
),
|
||||
)
|
||||
return join_path(spec.prefix, f"armpl-{ver}_AArch64_{os}_arm-linux-compiler_aarch64-linux")
|
||||
else:
|
||||
return join_path(
|
||||
spec.prefix,
|
||||
"armpl-{}_{}_arm-linux-compiler".format(
|
||||
get_armpl_version_to_3(spec), get_os(spec.version.string)
|
||||
),
|
||||
)
|
||||
return join_path(spec.prefix, f"armpl-{ver}_{os}_arm-linux-compiler")
|
||||
|
||||
|
||||
def get_acfl_prefix(spec):
|
||||
os = get_os(spec.version.string)
|
||||
if spec.version.string.startswith("22."):
|
||||
return join_path(
|
||||
spec.prefix,
|
||||
"arm-linux-compiler-{0}_Generic-AArch64_{1}_aarch64-linux".format(
|
||||
spec.version, get_os(spec.version.string)
|
||||
),
|
||||
spec.prefix, f"arm-linux-compiler-{spec.version}_Generic-AArch64_{os}_aarch64-linux"
|
||||
)
|
||||
else:
|
||||
return join_path(
|
||||
spec.prefix, f"arm-linux-compiler-{spec.version}_{get_os(spec.version.string)}"
|
||||
)
|
||||
return join_path(spec.prefix, f"arm-linux-compiler-{spec.version}_{os}")
|
||||
|
||||
|
||||
def get_gcc_prefix(spec):
|
||||
@@ -227,6 +249,16 @@ def get_gcc_prefix(spec):
|
||||
return join_path(spec.prefix, next(dir for dir in dirlist if dir.startswith("gcc")))
|
||||
|
||||
|
||||
def get_armpl_suffix(spec):
|
||||
suffix = ""
|
||||
if spec.satisfies("@24:"):
|
||||
suffix += "_ilp64" if spec.satisfies("+ilp64") else "_lp64"
|
||||
else:
|
||||
suffix += "_ilp64" if spec.satisfies("+ilp64") else ""
|
||||
suffix += "_mp" if spec.satisfies("threads=openmp") else ""
|
||||
return suffix
|
||||
|
||||
|
||||
class Acfl(Package, CompilerPackage):
|
||||
"""Arm Compiler combines the optimized tools and libraries from Arm
|
||||
with a modern LLVM-based compiler framework.
|
||||
@@ -235,7 +267,7 @@ class Acfl(Package, CompilerPackage):
|
||||
homepage = "https://developer.arm.com/Tools%20and%20Software/Arm%20Compiler%20for%20Linux"
|
||||
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/23-10/arm-compiler-for-linux_23.10_Ubuntu-22.04_aarch64.tar"
|
||||
|
||||
maintainers("annop-w")
|
||||
maintainers("paolotricerri")
|
||||
|
||||
# Build Versions
|
||||
for ver, packages in _versions.items():
|
||||
@@ -310,10 +342,7 @@ def fortran(self):
|
||||
|
||||
@property
|
||||
def lib_suffix(self):
|
||||
suffix = ""
|
||||
suffix += "_ilp64" if self.spec.satisfies("+ilp64") else ""
|
||||
suffix += "_mp" if self.spec.satisfies("threads=openmp") else ""
|
||||
return suffix
|
||||
return get_armpl_suffix(self.spec)
|
||||
|
||||
@property
|
||||
def blas_libs(self):
|
||||
@@ -389,8 +418,9 @@ def setup_run_environment(self, env):
|
||||
def check_install(self):
|
||||
arm_dir = get_acfl_prefix(self.spec)
|
||||
armpl_dir = get_armpl_prefix(self.spec)
|
||||
suffix = get_armpl_suffix(self.spec)
|
||||
gcc_dir = get_gcc_prefix(self.spec)
|
||||
armpl_example_dir = join_path(armpl_dir, "examples")
|
||||
armpl_example_dir = join_path(armpl_dir, f"examples{suffix}")
|
||||
# run example makefile
|
||||
make(
|
||||
"-C",
|
||||
@@ -399,6 +429,7 @@ def check_install(self):
|
||||
"F90=" + self.fortran,
|
||||
"CPATH=" + join_path(arm_dir, "include"),
|
||||
"COMPILER_PATH=" + gcc_dir,
|
||||
"ARMPL_DIR=" + armpl_dir,
|
||||
)
|
||||
# clean up
|
||||
make("-C", armpl_example_dir, "clean")
|
||||
|
||||
@@ -3,28 +3,31 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class Amduprof(Package):
|
||||
"""AMD uProf ("MICRO-prof") is a software profiling analysis tool for
|
||||
x86 applications running on Windows, Linux and FreeBSD operating systems
|
||||
and provides event information unique to the AMD "Zen"-based processors
|
||||
and AMD Instinct(tm) MI Series accelerators.
|
||||
"""
|
||||
"""AMD uProf ("MICRO-prof") is a software profiling analysis tool for x86
|
||||
applications running on Windows, Linux and FreeBSD operating systems and
|
||||
provides event information unique to the AMD "Zen"-based processors and AMD
|
||||
Instinct(tm) MI Series accelerators. AMD uProf enables the developer to better
|
||||
understand the limiters of application performance and evaluate
|
||||
improvements."""
|
||||
|
||||
homepage = "https://developer.amd.com/amd-uprof/"
|
||||
url = "https://download.amd.com/developer/eula/uprof/AMDuProf_Linux_x64_4.2.850.tar.bz2"
|
||||
url = f"file://{os.getcwd()}/AMDuProf_Linux_x64_4.2.850.tar.bz2"
|
||||
manual_download = True
|
||||
|
||||
maintainers("zzzoom")
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.2.850", sha256="f2d7c4eb9ec9c32845ff8f19874c1e6bcb0fa8ab2c12e73addcbf23a6d1bd623")
|
||||
|
||||
# TODO: build Power Profiling driver on Linux
|
||||
# TODO: ROCm for GPU tracing and profiling
|
||||
# TODO: BCC and eBPF for OS tracing
|
||||
depends_on("binutils@2.27:", type="run")
|
||||
|
||||
conflicts("platform=darwin")
|
||||
requires("target=x86_64:", msg="AMD uProf available only on x86_64")
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
@@ -21,6 +21,12 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("main", branch="main", submodules=True)
|
||||
version(
|
||||
"2.1.0", tag="v2.1.0", commit="bc787f21deca9239928182e27400133934c62658", submodules=True
|
||||
)
|
||||
version(
|
||||
"2.0.0", tag="v2.0.0", commit="ea448365033fc6bc9ee0febeb369b377f4fd8240", submodules=True
|
||||
)
|
||||
version(
|
||||
"1.4.0", tag="v1.4.0", commit="bdddf133e41a9b7b4c8ce28f1ea1bebec47678f5", submodules=True
|
||||
)
|
||||
@@ -91,7 +97,7 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("openfast@3.5:", when="@2: +openfast")
|
||||
depends_on("helics@:3.3.2", when="+helics")
|
||||
depends_on("helics@:3.3.2+mpi", when="+helics+mpi")
|
||||
depends_on("fftw", when="@2: +waves2amr")
|
||||
depends_on("fftw", when="@2.1: +waves2amr")
|
||||
|
||||
for arch in CudaPackage.cuda_arch_values:
|
||||
depends_on("hypre+cuda cuda_arch=%s" % arch, when="+cuda+hypre cuda_arch=%s" % arch)
|
||||
@@ -104,7 +110,7 @@ class AmrWind(CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
conflicts("+openmp", when="+cuda")
|
||||
conflicts("+shared", when="+cuda")
|
||||
conflicts("@:1.4.0", when="+waves2amr")
|
||||
conflicts("@:2.0", when="+waves2amr")
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
# Avoid compile errors with Intel interprocedural optimization
|
||||
|
||||
@@ -2,34 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class AppleGl(Package):
|
||||
"""Shim package for the core OpenGL library from Apple"""
|
||||
|
||||
class AppleGlBase(BundlePackage):
|
||||
homepage = "https://developer.apple.com/library/archive/documentation/GraphicsImaging/Conceptual/OpenGL-MacProgGuide/opengl_intro/opengl_intro.html"
|
||||
|
||||
maintainers("aphecetche")
|
||||
|
||||
has_code = False
|
||||
|
||||
version("4.1.0")
|
||||
|
||||
provides("gl@4.1")
|
||||
|
||||
# Only supported on 'platform=darwin' and compiler=apple-clang
|
||||
conflicts("platform=linux")
|
||||
conflicts("platform=cray")
|
||||
conflicts("platform=windows")
|
||||
conflicts("%gcc")
|
||||
conflicts("%clang")
|
||||
conflicts("%msvc")
|
||||
|
||||
phases = []
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# we try to setup a build environment with enough hints
|
||||
# for the build system to pick up on the Apple framework version
|
||||
@@ -50,3 +30,16 @@ def headers(self):
|
||||
@property
|
||||
def libs(self):
|
||||
return LibraryList(join_path(self.prefix, "System/Library/Frameworks/OpenGL.framework"))
|
||||
|
||||
|
||||
class AppleGl(AppleGlBase):
|
||||
"""Shim package for the core OpenGL library from Apple"""
|
||||
|
||||
version("4.1.0")
|
||||
|
||||
provides("gl@4.1")
|
||||
|
||||
requires(
|
||||
"%apple-clang platform=darwin",
|
||||
msg="Apple-GL is only available on Darwin, when using Apple Clang",
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user