Compare commits
85 Commits
prerelease
...
v0.22.2
Author | SHA1 | Date | |
---|---|---|---|
![]() |
594a376c52 | ||
![]() |
1538c48616 | ||
![]() |
683e50b8d9 | ||
![]() |
9b32fb0beb | ||
![]() |
2c6df0d491 | ||
![]() |
ce7218acae | ||
![]() |
246eeb2b69 | ||
![]() |
cc47ee3984 | ||
![]() |
7b644719c1 | ||
![]() |
d8a6aa551e | ||
![]() |
ac7b18483a | ||
![]() |
39f37de4ce | ||
![]() |
703e153404 | ||
![]() |
aa013611bc | ||
![]() |
6a7ccd4e46 | ||
![]() |
1c6c4b4690 | ||
![]() |
68558b3dd0 | ||
![]() |
5440fe09cd | ||
![]() |
03c22f403f | ||
![]() |
f339225d22 | ||
![]() |
22c815f3d4 | ||
![]() |
4354288e44 | ||
![]() |
ea2d43b4a6 | ||
![]() |
85e67d60a0 | ||
![]() |
bf6a9ff5ed | ||
![]() |
1bdc30979d | ||
![]() |
ef1eabe5b3 | ||
![]() |
43d673f915 | ||
![]() |
8a9c501030 | ||
![]() |
9f035ca030 | ||
![]() |
d66dce2d66 | ||
![]() |
ef2aa2f5f5 | ||
![]() |
41f5f6eaab | ||
![]() |
cba347e0b7 | ||
![]() |
a3cef0f02e | ||
![]() |
45fca040c3 | ||
![]() |
eb2b5739b2 | ||
![]() |
d299e17d43 | ||
![]() |
d883883be0 | ||
![]() |
249dcb49e2 | ||
![]() |
8628add66b | ||
![]() |
aeccba8bc0 | ||
![]() |
d94e8ab36f | ||
![]() |
e66c26871f | ||
![]() |
2db4ff7061 | ||
![]() |
c248932a94 | ||
![]() |
f15d302fc7 | ||
![]() |
74ef630241 | ||
![]() |
a70ea11e69 | ||
![]() |
a79b1bd9af | ||
![]() |
ac5d5485b9 | ||
![]() |
04258f9cce | ||
![]() |
1b14170bd1 | ||
![]() |
a3bc9dbfe8 | ||
![]() |
e7c86259bd | ||
![]() |
2605aeb072 | ||
![]() |
94536d2b66 | ||
![]() |
5e580fc82e | ||
![]() |
195bad8675 | ||
![]() |
bd9f3f100a | ||
![]() |
b5962613a0 | ||
![]() |
cbcfc7e10a | ||
![]() |
579fadacd0 | ||
![]() |
b86d08b022 | ||
![]() |
02d62cf40f | ||
![]() |
97369776f0 | ||
![]() |
47af0159dc | ||
![]() |
db6ead6fc1 | ||
![]() |
b4aa2c3cab | ||
![]() |
4108de1ce4 | ||
![]() |
5fe93fee1e | ||
![]() |
8207f11333 | ||
![]() |
5bb5d2696f | ||
![]() |
55f37dffe5 | ||
![]() |
252a5bd71b | ||
![]() |
f55224f161 | ||
![]() |
189ae4b06e | ||
![]() |
5e9c702fa7 | ||
![]() |
965bb4d3c0 | ||
![]() |
354f98c94a | ||
![]() |
5dce480154 | ||
![]() |
f634d48b7c | ||
![]() |
4daee565ae | ||
![]() |
8e4dbdc2d7 | ||
![]() |
4f6adc03cd |
@@ -5,7 +5,7 @@ coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
threshold: 2.0%
|
||||
threshold: 0.2%
|
||||
|
||||
ignore:
|
||||
- lib/spack/spack/test/.*
|
||||
|
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"name": "Ubuntu 20.04",
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"name": "Ubuntu 22.04",
|
||||
"image": "ghcr.io/spack/ubuntu-22.04:v2024-05-07",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@@ -5,10 +5,13 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks and build documentation
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directories:
|
||||
- "/.github/workflows/requirements/style/*"
|
||||
- "/lib/spack/docs"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
25
.github/workflows/audit.yaml
vendored
25
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -40,35 +40,30 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
name: coverage-audits-${{ matrix.system.os }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
93
.github/workflows/bootstrap.yml
vendored
93
.github/workflows/bootstrap.yml
vendored
@@ -37,14 +37,14 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -60,20 +60,20 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -83,22 +83,24 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree gawk
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -110,17 +112,19 @@ jobs:
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: brew install tree
|
||||
- name: Remove system executables
|
||||
run: |
|
||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||
sudo rm $(command -v gpg gpg2 patchelf)
|
||||
done
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -128,16 +132,15 @@ jobs:
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
3.13
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
@@ -147,7 +150,7 @@ jobs:
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
@@ -160,45 +163,5 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
tree $HOME/.spack/bootstrap/store/
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
windows:
|
||||
runs-on: "windows-latest"
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Setup Windows
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d gpg list
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
run: |
|
||||
./share/spack/setup-env.ps1
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
tree $env:userprofile/.spack/bootstrap/store/
|
||||
|
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -40,7 +40,8 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -55,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
@@ -87,19 +88,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,13 +108,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +127,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
66
.github/workflows/ci.yaml
vendored
66
.github/workflows/ci.yaml
vendored
@@ -15,6 +15,18 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -24,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -41,13 +53,6 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
@@ -67,53 +72,14 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
all:
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
uses: ./.github/workflows/coverage.yml
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
run: "true"
|
||||
|
34
.github/workflows/coverage.yml
vendored
34
.github/workflows/coverage.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: coverage
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Upload coverage reports to codecov once as a single bundle
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Install python dependencies
|
||||
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
|
||||
|
||||
- name: Download coverage artifact files
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
||||
with:
|
||||
pattern: coverage-*
|
||||
path: coverage
|
||||
merge-multiple: true
|
||||
|
||||
- run: ls -la coverage
|
||||
- run: coverage combine -a coverage/.coverage*
|
||||
- run: coverage xml
|
||||
|
||||
- name: "Upload coverage report to CodeCov"
|
||||
uses: codecov/codecov-action@5c47607acb93fed5485fdbf7232e8a31425f672a
|
||||
with:
|
||||
verbose: true
|
8
.github/workflows/install_spack.sh
vendored
Executable file
8
.github/workflows/install_spack.sh
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
spack compiler find
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
spack solve zlib
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1 +0,0 @@
|
||||
coverage==7.6.1
|
@@ -1,7 +0,0 @@
|
||||
black==24.10.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241105
|
||||
vermin==1.6.0
|
7
.github/workflows/style/requirements.txt
vendored
Normal file
7
.github/workflows/style/requirements.txt
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
119
.github/workflows/unit_tests.yaml
vendored
119
.github/workflows/unit_tests.yaml
vendored
@@ -16,34 +16,45 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -55,13 +66,13 @@ jobs:
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov clingo
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
@@ -74,25 +85,25 @@ jobs:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -107,17 +118,17 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
name: coverage-shell
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -130,13 +141,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
@@ -149,36 +160,36 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.13'
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack bootstrap disable spack-install
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: ${{ matrix.os }}
|
||||
@@ -187,10 +198,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -199,24 +210,24 @@ jobs:
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg kcov
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python')
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
@@ -225,10 +236,10 @@ jobs:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -236,15 +247,15 @@ jobs:
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
env:
|
||||
COVERAGE_FILE: coverage/.coverage-windows
|
||||
run: |
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
name: coverage-windows
|
||||
path: coverage
|
||||
include-hidden-files: true
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
79
.github/workflows/valid-style.yml
vendored
79
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,22 +35,22 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
@@ -70,13 +70,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
@@ -85,64 +85,5 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Import cycles before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old && cp solution solution.old
|
||||
- name: Import cycles after
|
||||
working-directory: circular-import-fighter
|
||||
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
||||
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
||||
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
@@ -14,26 +14,3 @@ sphinx:
|
||||
python:
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
search:
|
||||
ranking:
|
||||
spack.html: -10
|
||||
spack.*.html: -10
|
||||
llnl.html: -10
|
||||
llnl.*.html: -10
|
||||
_modules/*: -10
|
||||
command_index.html: -9
|
||||
basic_usage.html: 5
|
||||
configuration.html: 5
|
||||
config_yaml.html: 5
|
||||
packages_yaml.html: 5
|
||||
build_settings.html: 5
|
||||
environments.html: 5
|
||||
containers.html: 5
|
||||
mirrors.html: 5
|
||||
module_file_support.html: 5
|
||||
repositories.html: 5
|
||||
binary_caches.html: 5
|
||||
chain.html: 5
|
||||
pipelines.html: 5
|
||||
packaging_guide.html: 5
|
||||
|
12
CHANGELOG.md
12
CHANGELOG.md
@@ -60,6 +60,7 @@
|
||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||
- netlib-lapack: provide blas and lapack together (#44981)
|
||||
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
@@ -380,15 +381,6 @@
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.3 (2024-10-02)
|
||||
|
||||
## Bugfixes
|
||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
|
||||
#45721, #46445)
|
||||
- Support macOS Sequoia (#45018, #45127, #43862)
|
||||
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
@@ -419,7 +411,7 @@
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
|
11
README.md
11
README.md
@@ -32,7 +32,7 @@
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
|
||||
@@ -46,18 +46,13 @@ See the
|
||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||
for examples and highlights.
|
||||
|
||||
To install spack and your first package, make sure you have Python & Git.
|
||||
To install spack and your first package, make sure you have Python.
|
||||
Then:
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install zlib
|
||||
|
||||
> [!TIP]
|
||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
||||
>
|
||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
Documentation
|
||||
----------------
|
||||
|
||||
|
@@ -22,4 +22,4 @@
|
||||
#
|
||||
# This is compatible across platforms.
|
||||
#
|
||||
exec spack python "$@"
|
||||
exec /usr/bin/env spack python "$@"
|
||||
|
@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
@@ -1,11 +1,71 @@
|
||||
@ECHO OFF
|
||||
setlocal EnableDelayedExpansion
|
||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||
::
|
||||
:: source_dir/spack/bin/spack_cmd.bat
|
||||
::
|
||||
pushd %~dp0..
|
||||
set SPACK_ROOT=%CD%
|
||||
pushd %CD%\..
|
||||
set spackinstdir=%CD%
|
||||
popd
|
||||
|
||||
call "%~dp0..\share\spack\setup-env.bat"
|
||||
pushd %SPACK_ROOT%
|
||||
%comspec% /K
|
||||
|
||||
:: Check if Python is on the PATH
|
||||
if not defined python_pf_ver (
|
||||
(for /f "delims=" %%F in ('where python.exe') do (
|
||||
set "python_pf_ver=%%F"
|
||||
goto :found_python
|
||||
) ) 2> NUL
|
||||
)
|
||||
:found_python
|
||||
if not defined python_pf_ver (
|
||||
:: If not, look for Python from the Spack installer
|
||||
:get_builtin
|
||||
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||
set "python_ver=%%g")) 2> NUL
|
||||
|
||||
if not defined python_ver (
|
||||
echo Python was not found on your system.
|
||||
echo Please install Python or add Python to your PATH.
|
||||
) else (
|
||||
set "py_path=!spackinstdir!\!python_ver!"
|
||||
set "py_exe=!py_path!\python.exe"
|
||||
)
|
||||
goto :exitpoint
|
||||
) else (
|
||||
:: Python is already on the path
|
||||
set "py_exe=!python_pf_ver!"
|
||||
(for /F "tokens=* USEBACKQ" %%F in (
|
||||
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||
goto :exitpoint
|
||||
)
|
||||
:exitpoint
|
||||
|
||||
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||
if defined py_path (
|
||||
set "PATH=%py_path%;%PATH%"
|
||||
)
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
DOSKEY spacktivate=spack env activate $*
|
||||
|
||||
@echo **********************************************************************
|
||||
@echo ** Spack Package Manager
|
||||
@echo **********************************************************************
|
||||
|
||||
IF "%1"=="" GOTO CONTINUE
|
||||
set
|
||||
GOTO:EOF
|
||||
|
||||
:continue
|
||||
set PROMPT=[spack] %PROMPT%
|
||||
%comspec% /k
|
||||
|
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: github-actions-v0.6
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
||||
- name: github-actions-v0.5
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: spack-install
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.6: true
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
spack-install: true
|
||||
|
@@ -39,19 +39,11 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
|
@@ -19,7 +19,7 @@ config:
|
||||
install_tree:
|
||||
root: $spack/opt/spack
|
||||
projections:
|
||||
all: "{architecture.platform}/{architecture.target}/{name}-{version}-{hash}"
|
||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
# install_tree can include an optional padded length (int or boolean)
|
||||
# default is False (do not pad)
|
||||
# if padded_length is True, Spack will pad as close to the system max path
|
||||
@@ -115,6 +115,12 @@ config:
|
||||
suppress_gpg_warnings: false
|
||||
|
||||
|
||||
# If set to true, Spack will attempt to build any compiler on the spec
|
||||
# that is not already available. If set to False, Spack will only use
|
||||
# compilers already configured in compilers.yaml
|
||||
install_missing_compilers: false
|
||||
|
||||
|
||||
# If set to true, Spack will always check checksums after downloading
|
||||
# archives. If false, Spack skips the checksum step.
|
||||
checksum: true
|
||||
@@ -164,6 +170,23 @@ config:
|
||||
# If set to true, Spack will use ccache to cache C compiles.
|
||||
ccache: false
|
||||
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
|
16
etc/spack/defaults/cray/modules.yaml
Normal file
16
etc/spack/defaults/cray/modules.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default configuration for Spack's module file generation.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules: {}
|
@@ -15,11 +15,12 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
providers:
|
||||
c: [apple-clang, llvm, gcc]
|
||||
cxx: [apple-clang, llvm, gcc]
|
||||
elf: [libelf]
|
||||
fortran: [gcc]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
|
3
etc/spack/defaults/linux/packages.yaml
Normal file
3
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
packages:
|
||||
iconv:
|
||||
require: [libiconv]
|
@@ -15,18 +15,16 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
cxx: [gcc, llvm, intel-oneapi-compilers, xl, aocc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc, llvm]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -39,9 +37,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [gcc-runtime]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
@@ -63,7 +61,6 @@ packages:
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
wasi-sdk: [wasi-sdk-prebuilt]
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
@@ -71,13 +68,3 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -1,5 +1,6 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -15,8 +15,8 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
c : [msvc]
|
||||
cxx: [msvc]
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
@@ -1175,17 +1175,6 @@ unspecified version, but packages can depend on other packages with
|
||||
could depend on ``mpich@1.2:`` if it can only build with version
|
||||
``1.2`` or higher of ``mpich``.
|
||||
|
||||
.. note:: Windows Spec Syntax Caveats
|
||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||
Spack's spec dependency syntax uses the carat (``^``) character, however this is an escape string in CMD
|
||||
so it must be escaped with an additional carat (i.e. ``^^``).
|
||||
CMD also will attempt to interpret strings with ``=`` characters in them. Any spec including this symbol
|
||||
must double quote the string.
|
||||
|
||||
Note: All of these issues are unique to CMD, they can be avoided by using Powershell.
|
||||
|
||||
For more context on these caveats see the related issues: `carat <https://github.com/spack/spack/issues/42833>`_ and `equals <https://github.com/spack/spack/issues/43348>`_
|
||||
|
||||
Below are more details about the specifiers that you can add to specs.
|
||||
|
||||
.. _version-specifier:
|
||||
@@ -1359,10 +1348,6 @@ For example, for the ``stackstart`` variant:
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
Spack also allows variants to be propagated from a package that does
|
||||
not have that variant.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -1448,12 +1433,22 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
||||
$ spack install libelf os=ubuntu18.04
|
||||
$ spack install libelf target=broadwell
|
||||
|
||||
or together by using the reserved keyword ``arch``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install libelf arch=cray-CNL10-haswell
|
||||
|
||||
Normally users don't have to bother specifying the architecture if they
|
||||
are installing software for their current host, as in that case the
|
||||
values will be detected automatically. If you need fine-grained control
|
||||
over which packages use which targets (or over *all* packages' default
|
||||
target), see :ref:`package-preferences`.
|
||||
|
||||
.. admonition:: Cray machines
|
||||
|
||||
The situation is a little bit different for Cray machines and a detailed
|
||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
||||
|
||||
.. _support-for-microarchitectures:
|
||||
|
||||
|
@@ -166,106 +166,3 @@ while `py-numpy` still needs an older version:
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
--------
|
||||
Splicing
|
||||
--------
|
||||
|
||||
The ``splice`` key covers config attributes for splicing specs in the solver.
|
||||
|
||||
"Splicing" is a method for replacing a dependency with another spec
|
||||
that provides the same package or virtual. There are two types of
|
||||
splices, referring to different behaviors for shared dependencies
|
||||
between the root spec and the new spec replacing a dependency:
|
||||
"transitive" and "intransitive". A "transitive" splice is one that
|
||||
resolves all conflicts by taking the dependency from the new node. An
|
||||
"intransitive" splice is one that resolves all conflicts by taking the
|
||||
dependency from the original root. From a theory perspective, hybrid
|
||||
splices are possible but are not modeled by Spack.
|
||||
|
||||
All spliced specs retain a ``build_spec`` attribute that points to the
|
||||
original Spec before any splice occurred. The ``build_spec`` for a
|
||||
non-spliced spec is itself.
|
||||
|
||||
The figure below shows examples of transitive and intransitive splices:
|
||||
|
||||
.. figure:: images/splices.png
|
||||
:align: center
|
||||
|
||||
The concretizer can be configured to explicitly splice particular
|
||||
replacements for a target spec. Splicing will allow the user to make
|
||||
use of generically built public binary caches, while swapping in
|
||||
highly optimized local builds for performance critical components
|
||||
and/or components that interact closely with the specific hardware
|
||||
details of the system. The most prominent candidate for splicing is
|
||||
MPI providers. MPI packages have relatively well-understood ABI
|
||||
characteristics, and most High Performance Computing facilities deploy
|
||||
highly optimized MPI packages tailored to their particular
|
||||
hardware. The following config block configures Spack to replace
|
||||
whatever MPI provider each spec was concretized to use with the
|
||||
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
transitive: false
|
||||
|
||||
.. warning::
|
||||
|
||||
When configuring an explicit splice, you as the user take on the
|
||||
responsibility for ensuring ABI compatibility between the specs
|
||||
matched by the target and the replacement you provide. If they are
|
||||
not compatible, Spack will not warn you and your application will
|
||||
fail to run.
|
||||
|
||||
The ``target`` field of an explicit splice can be any abstract
|
||||
spec. The ``replacement`` field must be a spec that includes the hash
|
||||
of a concrete spec, and the replacement must either be the same
|
||||
package as the target, provide the virtual that is the target, or
|
||||
provide a virtual that the target provides. The ``transitive`` field
|
||||
is optional -- by default, splices will be transitive.
|
||||
|
||||
.. note::
|
||||
|
||||
With explicit splices configured, it is possible for Spack to
|
||||
concretize to a spec that does not satisfy the input. For example,
|
||||
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
@@ -147,15 +147,6 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AutotoolsBuilder(AutotoolsBuilder):
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
@@ -130,19 +130,14 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
@run_after("install")
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
maker = make
|
||||
cpan_dir = join_path("cpanm", "cpanm")
|
||||
if sys.platform == "win32":
|
||||
maker = nmake
|
||||
cpan_dir = join_path(self.stage.source_path, cpan_dir)
|
||||
cpan_dir = windows_sfn(cpan_dir)
|
||||
if "+cpanm" in spec:
|
||||
with working_dir(cpan_dir):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
maker()
|
||||
maker("install")
|
||||
spec = self.spec
|
||||
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
make()
|
||||
make("install")
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -181,14 +176,8 @@ In the ``perl`` package, we can see:
|
||||
|
||||
@run_after("build")
|
||||
@on_package_attributes(run_tests=True)
|
||||
def build_test(self):
|
||||
if sys.platform == "win32":
|
||||
win32_dir = os.path.join(self.stage.source_path, "win32")
|
||||
win32_dir = windows_sfn(win32_dir)
|
||||
with working_dir(win32_dir):
|
||||
nmake("test", ignore_quotes=True)
|
||||
else:
|
||||
make("test")
|
||||
def test(self):
|
||||
make("test")
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
|
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
@@ -49,14 +49,14 @@ following phases:
|
||||
#. ``install`` - install the package
|
||||
|
||||
Package developers often add unit tests that can be invoked with
|
||||
``scons test`` or ``scons check``. Spack provides a ``build_test`` method
|
||||
``scons test`` or ``scons check``. Spack provides a ``test`` method
|
||||
to handle this. Since we don't know which one the package developer
|
||||
chose, the ``build_test`` method does nothing by default, but can be easily
|
||||
chose, the ``test`` method does nothing by default, but can be easily
|
||||
overridden like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def build_test(self):
|
||||
def test(self):
|
||||
scons("check")
|
||||
|
||||
|
||||
|
@@ -11,8 +11,7 @@ Chaining Spack Installations (upstreams.yaml)
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
:ref:`configuration-scopes`:
|
||||
you can add it as an entry to ``upstreams.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -23,8 +22,7 @@ you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
install_tree: /path/to/another/spack/opt/spack
|
||||
|
||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||
Spack base directory, or the location of the ``install_tree`` defined
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
Spack base directory.
|
||||
|
||||
Once the upstream Spack instance has been added, ``spack find`` will
|
||||
automatically check the upstream instance when querying installed packages,
|
||||
|
@@ -206,24 +206,17 @@ def setup(sphinx):
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
("py:class", "concurrent.futures._base.Executor"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.ArchSpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||
can override those with the same key in a lower-precedence one. For
|
||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||
lower-precedence settings. Completely ignoring lower-precedence configuration
|
||||
lower-precedence settings. Completely ignoring higher-level configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
@@ -511,7 +511,6 @@ Spack understands over a dozen special variables. These are:
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
* ``$date``: the current date in the format YYYY-MM-DD
|
||||
* ``$spack_short_version``: the Spack version truncated to the first components.
|
||||
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
|
@@ -203,9 +203,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
@@ -316,215 +316,6 @@ documentation tests to make sure there are no errors. Documentation changes can
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
|
||||
.. _spack-builders-and-pipelines:
|
||||
|
||||
^^^^^^^^^
|
||||
GitLab CI
|
||||
^^^^^^^^^
|
||||
|
||||
""""""""""""""""""
|
||||
Build Cache Stacks
|
||||
""""""""""""""""""
|
||||
|
||||
Spack welcomes the contribution of software stacks of interest to the community. These
|
||||
stacks are used to test package recipes and generate publicly available build caches.
|
||||
Spack uses GitLab CI for managing the orchestration of build jobs.
|
||||
|
||||
GitLab Entry Point
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Add stack entrypoint to the ``share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml``. There
|
||||
are two stages required for each new stack, the generation stage and the build stage.
|
||||
|
||||
The generate stage is defined using the job template ``.generate`` configured with
|
||||
environment variables defining the name of the stack in ``SPACK_CI_STACK_NAME`` and the
|
||||
platform (``SPACK_TARGET_PLATFORM``) and architecture (``SPACK_TARGET_ARCH``) configuration,
|
||||
and the tags associated with the class of runners to build on.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``SPACK_CI_STACK_NAME`` must match the name of the directory containing the
|
||||
stacks ``spack.yaml``.
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
The platform and architecture variables are specified in order to select the
|
||||
correct configurations from the generic configurations used in Spack CI. The
|
||||
configurations currently available are:
|
||||
|
||||
* ``.cray_rhel_zen4``
|
||||
* ``.cray_sles_zen4``
|
||||
* ``.darwin_aarch64``
|
||||
* ``.darwin_x86_64``
|
||||
* ``.linux_aarch64``
|
||||
* ``.linux_icelake``
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
||||
New configurations can be added to accommodate new platforms and architectures.
|
||||
|
||||
|
||||
The build stage is defined as a trigger job that consumes the GitLab CI pipeline generated in
|
||||
the generate stage for this stack. Build stage jobs use the ``.build`` job template which
|
||||
handles the basic configuration.
|
||||
|
||||
An example entry point for a new stack called ``my-super-cool-stack``
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
.my-super-cool-stack:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: my-super-cool-stack
|
||||
tags: [ "all", "tags", "your", "job", "needs"]
|
||||
|
||||
my-super-cool-stack-generate:
|
||||
extends: [ ".generate", ".my-super-cool-stack" ]
|
||||
image: my-super-cool-stack-image:0.0.1
|
||||
|
||||
my-super-cool-stack-build:
|
||||
extends: [ ".build", ".my-super-cool-stack" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: my-super-cool-stack-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: my-super-cool-stack-generate
|
||||
|
||||
|
||||
Stack Configuration
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The stack configuration is a spack environment file with two additional sections added.
|
||||
Stack configurations should be located in ``share/spack/gitlab/cloud_pipelines/stacks/<stack_name>/spack.yaml``.
|
||||
|
||||
The ``ci`` section is generally used to define stack specific mappings such as image or tags.
|
||||
For more information on what can go into the ``ci`` section refer to the docs on pipelines.
|
||||
|
||||
The ``cdash`` section is used for defining where to upload the results of builds. Spack configures
|
||||
most of the details for posting pipeline results to
|
||||
`cdash.spack.io <https://cdash.spack.io/index.php?project=Spack+Testing>`_. The only
|
||||
requirement in the stack configuration is to define a ``build-group`` that is unique,
|
||||
this is usually the long name of the stack.
|
||||
|
||||
An example stack that builds ``zlib``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
require: ["%gcc", "target=x86_64_v3"]
|
||||
specs:
|
||||
- zlib
|
||||
|
||||
ci:
|
||||
pipeline-gen
|
||||
- build-job:
|
||||
image: my-super-cool-stack-image:0.0.1
|
||||
|
||||
cdash:
|
||||
build-group: My Super Cool Stack
|
||||
|
||||
.. note::
|
||||
|
||||
The ``image`` used in the ``*-generate`` job must match exactly the ``image`` used in the ``build-job``.
|
||||
When the images do not match the build job may fail.
|
||||
|
||||
|
||||
"""""""""""""""""""
|
||||
Registering Runners
|
||||
"""""""""""""""""""
|
||||
|
||||
Contributing computational resources to Spack's CI build farm is one way to help expand the
|
||||
capabilities and offerings of the public Spack build caches. Currently, Spack utilizes linux runners
|
||||
from AWS, Google, and the University of Oregon (UO).
|
||||
|
||||
Runners require three key peices:
|
||||
* Runner Registration Token
|
||||
* Accurate tags
|
||||
* OIDC Authentication script
|
||||
* GPG keys
|
||||
|
||||
|
||||
Minimum GitLab Runner Version: ``16.1.0``
|
||||
`Intallation instructions <https://docs.gitlab.com/runner/install/>`_
|
||||
|
||||
Registration Token
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The first step to contribute new runners is to open an issue in the `spack infrastructure <https://github.com/spack/spack-infrastructure/issues/new?assignees=&labels=runner-registration&projects=&template=runner_registration.yml>`_
|
||||
project. This will be reported to the spack infrastructure team who will guide users through the process
|
||||
of registering new runners for Spack CI.
|
||||
|
||||
The information needed to register a runner is the motivation for the new resources, a semi-detailed description of
|
||||
the runner, and finallly the point of contact for maintaining the software on the runner.
|
||||
|
||||
The point of contact will then work with the infrastruture team to obtain runner registration token(s) for interacting with
|
||||
with Spack's GitLab instance. Once the runner is active, this point of contact will also be responsible for updating the
|
||||
GitLab runner software to keep pace with Spack's Gitlab.
|
||||
|
||||
Tagging
|
||||
~~~~~~~
|
||||
|
||||
In the initial stages of runner registration it is important to **exclude** the special tag ``spack``. This will prevent
|
||||
the new runner(s) from being picked up for production CI jobs while it is configured and evaluated. Once it is determined
|
||||
that the runner is ready for production use the ``spack`` tag will be added.
|
||||
|
||||
Because gitlab has no concept of tag exclustion, runners that provide specialized resource also require specialized tags.
|
||||
For example, a basic CPU only x86_64 runner may have a tag ``x86_64`` associated with it. However, a runner containing an
|
||||
CUDA capable GPU may have the tag ``x86_64-cuda`` to denote that it should only be used for packages that will benefit from
|
||||
a CUDA capable resource.
|
||||
|
||||
OIDC
|
||||
~~~~
|
||||
|
||||
Spack runners use OIDC authentication for connecting to the appropriate AWS bucket
|
||||
which is used for coordinating the communication of binaries between build jobs. In
|
||||
order to configure OIDC authentication, Spack CI runners use a python script with minimal
|
||||
dependencies. This script can be configured for runners as seen here using the ``pre_build_script``.
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[[runners]]
|
||||
pre_build_script = """
|
||||
echo 'Executing Spack pre-build setup script'
|
||||
|
||||
for cmd in "${PY3:-}" python3 python; do
|
||||
if command -v > /dev/null "$cmd"; then
|
||||
export PY3="$(command -v "$cmd")"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "${PY3:-}" ]; then
|
||||
echo "Unable to find python3 executable"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
$PY3 -c "import urllib.request;urllib.request.urlretrieve('https://raw.githubusercontent.com/spack/spack-infrastructure/main/scripts/gitlab_runner_pre_build/pre_build.py', 'pre_build.py')"
|
||||
$PY3 pre_build.py > envvars
|
||||
|
||||
. ./envvars
|
||||
rm -f envvars
|
||||
unset GITLAB_OIDC_TOKEN
|
||||
"""
|
||||
|
||||
GPG Keys
|
||||
~~~~~~~~
|
||||
|
||||
Runners that may be utilized for ``protected`` CI require the registration of an intermediate signing key that
|
||||
can be used to sign packages. For more information on package signing read :ref:`key_architecture`.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
--------
|
||||
|
@@ -181,6 +181,10 @@ Spec-related modules
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
which allows site administrators to change Spack's :ref:`concretization-policies`.
|
||||
|
||||
:mod:`spack.version`
|
||||
Implements a simple :class:`~spack.version.Version` class with simple
|
||||
comparison semantics. Also implements :class:`~spack.version.VersionRange`
|
||||
@@ -333,9 +337,13 @@ inserting them at different places in the spack code base. Whenever a hook
|
||||
type triggers by way of a function call, we find all the hooks of that type,
|
||||
and run them.
|
||||
|
||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||
This section will cover the basic kind of hooks, and how to write them.
|
||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Types of Hooks
|
||||
@@ -708,27 +716,27 @@ Release branches
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
There are currently two types of Spack releases: :ref:`major releases
|
||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||
diagram of how Spack release branches work::
|
||||
|
||||
o branch: develop (latest version, v0.23.0.dev0)
|
||||
o branch: develop (latest version, v0.19.0.dev0)
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.22, tag: v0.22.1
|
||||
| o branch: releases/v0.18, tag: v0.18.1
|
||||
o |
|
||||
| o tag: v0.22.0
|
||||
| o tag: v0.18.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
o
|
||||
|
|
||||
o
|
||||
| o branch: releases/v0.21, tag: v0.21.2
|
||||
| o branch: releases/v0.17, tag: v0.17.2
|
||||
o |
|
||||
| o tag: v0.21.1
|
||||
| o tag: v0.17.1
|
||||
o |
|
||||
| o tag: v0.21.0
|
||||
| o tag: v0.17.0
|
||||
o |
|
||||
| o
|
||||
|/
|
||||
@@ -739,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||
|
||||
Each Spack release series also has a corresponding branch, e.g.
|
||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||
tagged version on a release branch. Minor releases are back-ported from
|
||||
develop onto release branches. This is typically done by cherry-picking
|
||||
bugfix commits off of ``develop``.
|
||||
@@ -770,40 +778,27 @@ for more details.
|
||||
Scheduling work for releases
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We schedule work for **major releases** through `milestones
|
||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||
<https://github.com/spack/spack/labels>`_.
|
||||
We schedule work for releases by creating `GitHub projects
|
||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||
several open release projects. For example, below are two releases (from
|
||||
some past version of the page linked above):
|
||||
|
||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||
when the release is made, and a new milestone is created for the next major release.
|
||||
.. image:: images/projects.png
|
||||
|
||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||
the project board.
|
||||
This image shows one release in progress for ``0.15.1`` and another for
|
||||
``0.16.0``. Each of these releases has a project board containing issues
|
||||
and pull requests. GitHub shows a status bar with completed work in
|
||||
green, work in progress in purple, and work not started yet in gray, so
|
||||
it's fairly easy to see progress.
|
||||
|
||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||
release milestone, rather than delaying the release to complete them.
|
||||
Spack's project boards are not firm commitments so we move work between
|
||||
releases frequently. If we need to make a release and some tasks are not
|
||||
yet done, we will simply move them to the next minor or major release, rather
|
||||
than delaying the release to complete them.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Backporting bug fixes
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
For more on using GitHub project boards, see `GitHub's documentation
|
||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||
|
||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||
that the bug fix should be backported to.
|
||||
|
||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||
Typically there are one or two backport pull requests open at any given time.
|
||||
|
||||
.. _major-releases:
|
||||
|
||||
@@ -811,21 +806,25 @@ Typically there are one or two backport pull requests open at any given time.
|
||||
Making major releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||
are:
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the major release are:
|
||||
|
||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||
release.
|
||||
#. Create two new project boards:
|
||||
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||
* One for the next major release
|
||||
* One for the next point release
|
||||
|
||||
#. Move any optional tasks that are not done to the next milestone.
|
||||
#. Move any optional tasks that are not done to one of the new project boards.
|
||||
|
||||
In general, small bugfixes should go to the next point release. Major
|
||||
features, refactors, and changes that could affect concretization should
|
||||
go in the next major release.
|
||||
|
||||
#. Create a branch for the release, based on ``develop``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git checkout -b releases/v0.23 develop
|
||||
$ git checkout -b releases/v0.15 develop
|
||||
|
||||
For a version ``vX.Y.Z``, the branch's name should be
|
||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||
@@ -861,8 +860,8 @@ are:
|
||||
|
||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||
For instance when you have just released ``v0.23.0``, set the version
|
||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||
For instance when you have just released ``v0.15.0``, set the version
|
||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -871,52 +870,82 @@ are:
|
||||
#. Follow the steps in :ref:`announcing-releases`.
|
||||
|
||||
|
||||
.. _patch-releases:
|
||||
.. _point-releases:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Making patch releases
|
||||
Making point releases
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||
changes are fresh in the mind of the developer.
|
||||
Assuming a project board has already been created and all required work
|
||||
completed, the steps to make the point release are:
|
||||
|
||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||
#. Create a new project board for the next point release.
|
||||
|
||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||
``backports/vX.Y.Z`` branch.
|
||||
#. Move any optional tasks that are not done to the next project board.
|
||||
|
||||
.. warning::
|
||||
#. Check out the release branch (it should already exist).
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and/or the resulting differences.
|
||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||
|
||||
1. If the changes are small, you might just cherry-pick it.
|
||||
.. code-block:: console
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a patch release, in which case you should remove
|
||||
the label from the pull request. Remember that large, manual backports
|
||||
are seldom the right choice for a patch release.
|
||||
$ git checkout releases/v0.15
|
||||
|
||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||
release as follows:
|
||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||
in the project, create it. This pull request ought to be created as early as
|
||||
possible when working on a release project, so that we can build the release
|
||||
commits incrementally, and identify potential conflicts at an early stage.
|
||||
|
||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||
release.
|
||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||
|
||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||
This is **usually** fairly simple since we squash the commits from the
|
||||
vast majority of pull requests. That means there is only one commit
|
||||
per pull request to cherry-pick. For example, `this pull request
|
||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||
they were squashed into a single commit on merge. You can see the
|
||||
commit that was created here:
|
||||
|
||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||
.. image:: images/pr-commit.png
|
||||
|
||||
You can easily cherry pick it like this (assuming you already have the
|
||||
release branch checked out):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git cherry-pick 7e46da7
|
||||
|
||||
For pull requests that were rebased (or not squashed), you'll need to
|
||||
cherry-pick each associated commit individually.
|
||||
|
||||
.. warning::
|
||||
|
||||
It is important to cherry-pick commits in the order they happened,
|
||||
otherwise you can get conflicts while cherry-picking. When
|
||||
cherry-picking look at the merge date,
|
||||
**not** the number of the pull request or the date it was opened.
|
||||
|
||||
Sometimes you may **still** get merge conflicts even if you have
|
||||
cherry-picked all the commits in order. This generally means there
|
||||
is some other intervening pull request that the one you're trying
|
||||
to pick depends on. In these cases, you'll need to make a judgment
|
||||
call regarding those pull requests. Consider the number of affected
|
||||
files and or the resulting differences.
|
||||
|
||||
1. If the dependency changes are small, you might just cherry-pick it,
|
||||
too. If you do this, add the task to the release board.
|
||||
|
||||
2. If the changes are large, then you may decide that this fix is not
|
||||
worth including in a point release, in which case you should remove
|
||||
the task from the release project.
|
||||
|
||||
3. You can always decide to manually back-port the fix to the release
|
||||
branch if neither of the above options makes sense, but this can
|
||||
require a lot of work. It's seldom the right choice.
|
||||
|
||||
#. When all the commits from the project board are cherry-picked into
|
||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||
|
||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||
@@ -925,22 +954,20 @@ release as follows:
|
||||
release branch. See `the changelog from 0.14.1
|
||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||
|
||||
#. Make sure CI passes on the **backports pull request**, including:
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the release branch, including:
|
||||
|
||||
* Regular unit tests
|
||||
* Build tests
|
||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||
|
||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||
is needed to keep track in the release branch of all the commits that were
|
||||
cherry-picked.
|
||||
|
||||
#. Make sure CI passes on the last commit of the **release branch**.
|
||||
|
||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||
repeat the process. Avoid repeated force pushes to the release branch.
|
||||
If CI does not pass, you'll need to figure out why, and make changes
|
||||
to the release branch until it does. You can make more commits, modify
|
||||
or remove cherry-picked commits, or cherry-pick **more** from
|
||||
``develop`` to make this happen.
|
||||
|
||||
#. Follow the steps in :ref:`publishing-releases`.
|
||||
|
||||
@@ -1015,31 +1042,25 @@ Updating `releases/latest`
|
||||
|
||||
If the new release is the **highest** Spack release yet, you should
|
||||
also tag it as ``releases/latest``. For example, suppose the highest
|
||||
release is currently ``0.22.3``:
|
||||
release is currently ``0.15.3``:
|
||||
|
||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||
|
||||
* If you are making a new release of an **older** major version of
|
||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||
``releases/latest`` (as there are newer major versions).
|
||||
|
||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||
To tag ``releases/latest``, do this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||
$ git tag --force releases/latest
|
||||
$ git push --force --tags
|
||||
|
||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git tag --force releases/latest vX.Y.Z
|
||||
$ git push --force git@github.com:spack/spack releases/latest
|
||||
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||
local tags.
|
||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||
``releases/latest`` tag with the new one.
|
||||
|
||||
|
||||
.. _announcing-releases:
|
||||
|
@@ -5,56 +5,49 @@
|
||||
|
||||
.. _environments:
|
||||
|
||||
=====================================
|
||||
Environments (spack.yaml, spack.lock)
|
||||
=====================================
|
||||
=========================
|
||||
Environments (spack.yaml)
|
||||
=========================
|
||||
|
||||
An environment is used to group a set of specs intended for some purpose
|
||||
to be built, rebuilt, and deployed in a coherent fashion. Environments
|
||||
define aspects of the installation of the software, such as:
|
||||
An environment is used to group together a set of specs for the
|
||||
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||
Environments provide a number of advantages over the *à la carte*
|
||||
approach of building and loading individual Spack modules:
|
||||
|
||||
#. *which* specs to install;
|
||||
#. *how* those specs are configured; and
|
||||
#. *where* the concretized software will be installed.
|
||||
|
||||
Aggregating this information into an environment for processing has advantages
|
||||
over the *à la carte* approach of building and loading individual Spack modules.
|
||||
|
||||
With environments, you concretize, install, or load (activate) all of the
|
||||
specs with a single command. Concretization fully configures the specs
|
||||
and dependencies of the environment in preparation for installing the
|
||||
software. This is a more robust solution than ad-hoc installation scripts.
|
||||
And you can share an environment or even re-use it on a different computer.
|
||||
|
||||
Environment definitions, especially *how* specs are configured, allow the
|
||||
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized.
|
||||
|
||||
Defining *where* specs are installed supports a filesystem view of the
|
||||
environment. Yet Spack maintains a single installation of the software that
|
||||
can be re-used across multiple environments.
|
||||
|
||||
Activating an environment determines *when* all of the associated (and
|
||||
installed) specs are loaded so limits the software loaded to those specs
|
||||
actually needed by the environment. Spack can even generate a script to
|
||||
load all modules related to an environment.
|
||||
#. Environments separate the steps of (a) choosing what to
|
||||
install, (b) concretizing, and (c) installing. This allows
|
||||
Environments to remain stable and repeatable, even if Spack packages
|
||||
are upgraded: specs are only re-concretized when the user
|
||||
explicitly asks for it. It is even possible to reliably
|
||||
transport environments between different computers running
|
||||
different versions of Spack!
|
||||
#. Environments allow several specs to be built at once; a more robust
|
||||
solution than ad-hoc scripts making multiple calls to ``spack
|
||||
install``.
|
||||
#. An Environment that is built as a whole can be loaded as a whole
|
||||
into the user environment. An Environment can be built to maintain
|
||||
a filesystem view of its packages, and the environment can load
|
||||
that view into the user environment at activation time. Spack can
|
||||
also generate a script to load all modules related to an
|
||||
environment.
|
||||
|
||||
Other packaging systems also provide environments that are similar in
|
||||
some ways to Spack environments; for example, `Conda environments
|
||||
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
||||
`Python Virtual Environments
|
||||
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
||||
provide some distinctive features though:
|
||||
provide some distinctive features:
|
||||
|
||||
#. A spec installed "in" an environment is no different from the same
|
||||
spec installed anywhere else in Spack.
|
||||
#. Spack environments may contain more than one spec of the same
|
||||
spec installed anywhere else in Spack. Environments are assembled
|
||||
simply by collecting together a set of specs.
|
||||
#. Spack Environments may contain more than one spec of the same
|
||||
package.
|
||||
|
||||
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package managers.
|
||||
The environment's user input file (or manifest), is named ``spack.yaml``.
|
||||
The lock file, which contains the fully configured and concretized specs,
|
||||
is named ``spack.lock``.
|
||||
<https://bundler.io/man/gemfile.5.html>`_ and other package
|
||||
managers. The user input file is named ``spack.yaml`` and the lock
|
||||
file is named ``spack.lock``
|
||||
|
||||
.. _environments-using:
|
||||
|
||||
@@ -75,60 +68,55 @@ An environment is created by:
|
||||
|
||||
$ spack env create myenv
|
||||
|
||||
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created
|
||||
to manage the environment.
|
||||
Spack then creates the directory ``var/spack/environments/myenv``.
|
||||
|
||||
.. note::
|
||||
|
||||
All managed environments by default are stored in the
|
||||
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed
|
||||
by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||
|
||||
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and
|
||||
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User
|
||||
interaction occurs through the ``spack.yaml`` file and the Spack commands
|
||||
that affect it. Metadata and, by default, the view are stored in the
|
||||
``.spack-env`` directory. When the environment is concretized, Spack creates
|
||||
the ``spack.lock`` file with the fully configured specs and dependencies for
|
||||
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||
|
||||
Spack stores metadata in the ``.spack-env`` directory. User
|
||||
interaction will occur through the ``spack.yaml`` file and the Spack
|
||||
commands that affect it. When the environment is concretized, Spack
|
||||
will create a file ``spack.lock`` with the concrete information for
|
||||
the environment.
|
||||
|
||||
The ``.spack-env`` subdirectory also contains:
|
||||
In addition to being the default location for the view associated with
|
||||
an Environment, the ``.spack-env`` directory also contains:
|
||||
|
||||
* ``repo/``: A subdirectory acting as the repo consisting of the Spack
|
||||
packages used in the environment. It allows the environment to build
|
||||
the same, in theory, even on different versions of Spack with different
|
||||
* ``repo/``: A repo consisting of the Spack packages used in this
|
||||
environment. This allows the environment to build the same, in
|
||||
theory, even on different versions of Spack with different
|
||||
packages!
|
||||
* ``logs/``: A subdirectory containing the build logs for the packages
|
||||
in this environment.
|
||||
* ``logs/``: A directory containing the build logs for the packages
|
||||
in this Environment.
|
||||
|
||||
Spack Environments can also be created from either the user input, or
|
||||
manifest, file or the lockfile. Create an environment from a manifest using:
|
||||
Spack Environments can also be created from either a manifest file
|
||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||
To create an Environment from a manifest:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.yaml
|
||||
|
||||
The resulting environment is guaranteed to have the same root specs as
|
||||
the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
To create an Environment from a ``spack.lock`` lockfile:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv spack.lock
|
||||
|
||||
The resulting environment, when on the same or a compatible machine, is
|
||||
guaranteed to initially have the same concrete specs as the original.
|
||||
Either of these commands can also take a full path to the
|
||||
initialization file.
|
||||
|
||||
.. note::
|
||||
|
||||
Environment creation also accepts a full path to the file.
|
||||
|
||||
If the path is not under the ``$SPACK_ROOT/var/spack/environments``
|
||||
directory then the source is referred to as an
|
||||
:ref:`independent environment <independent_environments>`.
|
||||
A Spack Environment created from a ``spack.yaml`` manifest is
|
||||
guaranteed to have the same root specs as the original Environment,
|
||||
but may concretize differently. A Spack Environment created from a
|
||||
``spack.lock`` lockfile is guaranteed to have the same concrete specs
|
||||
as the original Environment. Either may obviously then differ as the
|
||||
user modifies it.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating an Environment
|
||||
@@ -141,7 +129,7 @@ To activate an environment, use the following command:
|
||||
$ spack env activate myenv
|
||||
|
||||
By default, the ``spack env activate`` will load the view associated
|
||||
with the environment into the user environment. The ``-v,
|
||||
with the Environment into the user environment. The ``-v,
|
||||
--with-view`` argument ensures this behavior, and the ``-V,
|
||||
--without-view`` argument activates the environment without changing
|
||||
the user environment variables.
|
||||
@@ -154,11 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and independent
|
||||
environments can both be created using the same flags that `spack env create`
|
||||
accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -183,50 +168,49 @@ or the shortcut alias
|
||||
If the environment was activated with its view, deactivating the
|
||||
environment will remove the view from the user environment.
|
||||
|
||||
.. _independent_environments:
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Independent Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
|
||||
Independent environments can be located in any directory outside of Spack.
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for independent
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
environments.
|
||||
|
||||
To create an independent environment, use one of the following commands:
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
|
||||
As a shorthand, you can also create an independent environment upon activation if it does not
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
already exist:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an independent environment in a temporary directory for you:
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment-Aware Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack commands are environment-aware. For example, the ``find``
|
||||
command shows only the specs in the active environment if an
|
||||
environment has been activated. Otherwise it shows all specs in
|
||||
the Spack instance. The same rule applies to the ``install`` and
|
||||
``uninstall`` commands.
|
||||
Spack commands are environment sensitive. For example, the ``find``
|
||||
command shows only the specs in the active Environment if an
|
||||
Environment has been activated. Similarly, the ``install`` and
|
||||
``uninstall`` commands act on the active environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -271,33 +255,32 @@ the Spack instance. The same rule applies to the ``install`` and
|
||||
|
||||
|
||||
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
||||
presented as a root of the environment. All explicitly installed
|
||||
packages will be listed as roots of the environment.
|
||||
presented as a root of the Environment. All explicitly installed
|
||||
packages will be listed as roots of the Environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
environment-aware in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, etcetera. In the
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
environment-aware commands further.
|
||||
Environment-sensitive commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding Abstract Specs
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
An abstract spec is the user-specified spec before Spack applies
|
||||
defaults or dependency information.
|
||||
An abstract spec is the user-specified spec before Spack has applied
|
||||
any defaults or dependency information.
|
||||
|
||||
Users can add abstract specs to an environment using the ``spack add``
|
||||
command. The most important component of an environment is a list of
|
||||
Users can add abstract specs to an Environment using the ``spack add``
|
||||
command. The most important component of an Environment is a list of
|
||||
abstract specs.
|
||||
|
||||
Adding a spec adds it as a root spec of the environment in the user
|
||||
input file (``spack.yaml``). It does not affect the concrete specs
|
||||
in the lock file (``spack.lock``) and it does not install the spec.
|
||||
Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
|
||||
used to define the roots of the Environment, but does not affect the
|
||||
concrete specs in the lockfile, nor does it install the spec.
|
||||
|
||||
The ``spack add`` command is environment-aware. It adds the spec to the
|
||||
currently active environment. An error is generated if there isn't an
|
||||
active environment. All environment-aware commands can also
|
||||
The ``spack add`` command is environment aware. It adds to the
|
||||
currently active environment. All environment aware commands can also
|
||||
be called using the ``spack -e`` flag to specify the environment.
|
||||
|
||||
.. code-block:: console
|
||||
@@ -317,11 +300,11 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once user specs have been added to an environment, they can be concretized.
|
||||
There are three different modes of operation to concretize an environment,
|
||||
explained in detail in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation is chosen, the following
|
||||
command will ensure all of the root specs are concretized according to the
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -330,15 +313,16 @@ constraints that are prescribed in the configuration:
|
||||
|
||||
In the case of specs that are not concretized together, the command
|
||||
above will concretize only the specs that were added and not yet
|
||||
concretized. Forcing a re-concretization of all of the specs can be done
|
||||
by adding the ``-f`` option:
|
||||
concretized. Forcing a re-concretization of all the specs can be done
|
||||
instead with this command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack concretize -f
|
||||
|
||||
Without the option, Spack guarantees that already concretized specs are
|
||||
unchanged in the environment.
|
||||
When the ``-f`` flag is not used to reconcretize all specs, Spack
|
||||
guarantees that already concretized specs are unchanged in the
|
||||
environment.
|
||||
|
||||
The ``concretize`` command does not install any packages. For packages
|
||||
that have already been installed outside of the environment, the
|
||||
@@ -371,16 +355,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
|
||||
Installing an Environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding individual specs to an environment, one
|
||||
can install the entire environment at once using the command
|
||||
In addition to installing individual specs into an Environment, one
|
||||
can install the entire Environment at once using the command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[myenv]$ spack install
|
||||
|
||||
If the environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will concretize
|
||||
the environment before installing the concretized specs.
|
||||
If the Environment has been concretized, Spack will install the
|
||||
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||
the Environment and then install the concretized specs.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -401,17 +385,17 @@ the environment before installing the concretized specs.
|
||||
|
||||
|
||||
As it installs, ``spack install`` creates symbolic links in the
|
||||
``logs/`` directory in the environment, allowing for easy inspection
|
||||
``logs/`` directory in the Environment, allowing for easy inspection
|
||||
of build logs related to that environment. The ``spack install``
|
||||
command also stores a Spack repo containing the ``package.py`` file
|
||||
used at install time for each package in the ``repos/`` directory in
|
||||
the environment.
|
||||
the Environment.
|
||||
|
||||
The ``--no-add`` option can be used in a concrete environment to tell
|
||||
spack to install specs already present in the environment but not to
|
||||
add any new root specs to the environment. For root specs provided
|
||||
to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||
while for dependency specs, it is optional. In other
|
||||
while for dependency specs on the other hand, it is optional. In other
|
||||
words, if there is an unambiguous match in the active concrete environment
|
||||
for a root spec provided to ``spack install`` on the command line, spack
|
||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||
@@ -425,22 +409,12 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
||||
environment for the local source. This package will have a special variant ``dev_path``
|
||||
will configure Spack to install the package from local source. By
|
||||
default, it will also clone the package to a subdirectory in the
|
||||
environment. This package will have a special variant ``dev_path``
|
||||
set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
is to check if ``mtime`` is newer than the installation.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
|
||||
Spack ensures that all instances of a
|
||||
code has been modified. Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
version (and other constraints) passed as the spec argument to the
|
||||
``spack develop`` command.
|
||||
@@ -450,7 +424,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by re-installing the environment,
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
@@ -635,7 +609,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
|
||||
Inline configurations
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Inline environment-scope configuration is done using the same yaml
|
||||
Inline Environment-scope configuration is done using the same yaml
|
||||
format as standard Spack configuration scopes, covered in the
|
||||
:ref:`configuration` section. Each section is contained under a
|
||||
top-level yaml object with it's name. For example, a ``spack.yaml``
|
||||
@@ -660,7 +634,7 @@ Included configurations
|
||||
|
||||
Spack environments allow an ``include`` heading in their yaml
|
||||
schema. This heading pulls in external configuration files and applies
|
||||
them to the environment.
|
||||
them to the Environment.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -673,9 +647,6 @@ them to the environment.
|
||||
Environments can include files or URLs. File paths can be relative or
|
||||
absolute. URLs include the path to the text for individual files or
|
||||
can be the path to a directory containing configuration files.
|
||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
||||
schemes). Spack-specific, environment and user path variables may be
|
||||
used in these paths. See :ref:`config-file-variables` for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration precedence
|
||||
@@ -690,7 +661,7 @@ have higher precedence, as the included configs are applied in reverse order.
|
||||
Manually Editing the Specs List
|
||||
-------------------------------
|
||||
|
||||
The list of abstract/root specs in the environment is maintained in
|
||||
The list of abstract/root specs in the Environment is maintained in
|
||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||
|
||||
.. code-block:: yaml
|
||||
@@ -798,7 +769,7 @@ evaluates to the cross-product of those specs. Spec matrices also
|
||||
contain an ``excludes`` directive, which eliminates certain
|
||||
combinations from the evaluated result.
|
||||
|
||||
The following two environment manifests are identical:
|
||||
The following two Environment manifests are identical:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -873,7 +844,7 @@ files are identical.
|
||||
In short files like the example, it may be easier to simply list the
|
||||
included specs. However for more complicated examples involving many
|
||||
packages across many toolchains, separately factored lists make
|
||||
environments substantially more manageable.
|
||||
Environments substantially more manageable.
|
||||
|
||||
Additionally, the ``-l`` option to the ``spack add`` command allows
|
||||
one to add to named lists in the definitions section of the manifest
|
||||
@@ -892,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
|
||||
spack:
|
||||
definitions:
|
||||
- compilers: ['%gcc', '%clang']
|
||||
- when: arch.satisfies('target=x86_64:')
|
||||
- when: arch.satisfies('x86_64:')
|
||||
compilers: ['%intel']
|
||||
|
||||
.. note::
|
||||
@@ -960,89 +931,37 @@ This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
-----------------
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
|
||||
Spack Environments can have an associated filesystem view, which is a directory
|
||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
||||
in which all files of the installed packages are linked.
|
||||
|
||||
By default a view is created for each environment, thanks to the ``view: true``
|
||||
option in the ``spack.yaml`` manifest file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: [perl, python]
|
||||
view: true
|
||||
|
||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
||||
you can directly run executables from all installed packages in the environment.
|
||||
|
||||
Views are highly customizable: you can control where they are put, modify their structure,
|
||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
||||
views for a single environment.
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Minimal view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The minimal configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: true
|
||||
|
||||
lets Spack generate a single view with default settings under the
|
||||
``.spack-env/view`` directory of the environment.
|
||||
|
||||
Another short way to configure a view is to specify just where to put it:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
Views can also be disabled by setting ``view: false``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Advanced view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default: # name of the view
|
||||
root: /path/to/view # view descriptor attribute
|
||||
|
||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
||||
environment, this view will be used to update (among other things) your ``PATH``
|
||||
variable.
|
||||
|
||||
View descriptors must contain the root of the view, and optionally projections,
|
||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
As a more advanced example, in the following manifest
|
||||
For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
package. This view selects all packages that depend on MPI, and
|
||||
excludes those built with the GCC compiler at version 18.5.
|
||||
excludes those built with the PGI compiler at version 18.5.
|
||||
The root specs with their (transitive) link and run type dependencies
|
||||
will be put in the view due to the ``link: all`` option,
|
||||
and the files in the view will be symlinks to the spack install
|
||||
@@ -1056,7 +975,7 @@ directories.
|
||||
mpis:
|
||||
root: /path/to/view
|
||||
select: [^mpi]
|
||||
exclude: ['%gcc@18.5']
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
@@ -1082,14 +1001,63 @@ of ``hardlink`` or ``copy``.
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||
and the default projection, we can put ``view: True`` in the
|
||||
environment manifest. Similarly, if the environment has a view with a
|
||||
different root, but default selection, exclusion, and projections, the
|
||||
manifest can say ``view: /path/to/view``. These views are
|
||||
automatically named ``default``, so that
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
|
||||
and
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
||||
By default, Spack environments are configured with ``view: True`` in
|
||||
the manifest. Environments can be configured without views using
|
||||
``view: False``. For backwards compatibility reasons, environments
|
||||
with no ``view`` key are treated the same as ``view: True``.
|
||||
|
||||
From the command line, the ``spack env create`` command takes an
|
||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||
view. If no path is specified, the default path is used (``view:
|
||||
true``). The argument ``--without-view`` can be used to create an
|
||||
True``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
of an environment. The subcommand ``spack env view enable`` will add a
|
||||
of an Environment. The subcommand ``spack env view enable`` will add a
|
||||
view named ``default`` to an environment. It takes an optional
|
||||
argument to specify the path for the new default view. The subcommand
|
||||
``spack env view disable`` will remove the view named ``default`` from
|
||||
@@ -1151,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack env activate <env>`` has two effects:
|
||||
|
||||
1. It activates the environment so that further Spack commands such
|
||||
as ``spack install`` will run in the context of the environment.
|
||||
2. It activates the view so that environment variables such as
|
||||
``PATH`` are updated to include the view.
|
||||
|
||||
Without further arguments, the ``default`` view of the environment is
|
||||
activated. If a view with a different name has to be activated,
|
||||
``spack env activate --with-view <name> <env>`` can be
|
||||
used instead. You can also activate the environment without modifying
|
||||
further environment variables using ``--without-view``.
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||
``-V,--without-view`` can be used to tune this behavior. The default
|
||||
behavior is to activate with the environment view if there is one.
|
||||
|
||||
The environment variables affected by the ``spack env activate``
|
||||
command and the paths that are used to update them are determined by
|
||||
@@ -1185,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
|
||||
recommended to use non-default projections with the default view of an
|
||||
environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the active view of
|
||||
the Spack environment from the user's environment variables.
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
@@ -1257,7 +1218,7 @@ gets installed and is available for use in the ``env`` target.
|
||||
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
||||
|
||||
env: spack/env
|
||||
$(info environment installed!)
|
||||
$(info Environment installed!)
|
||||
|
||||
clean:
|
||||
rm -rf spack.lock env.mk spack/
|
||||
@@ -1345,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install gcc git zip
|
||||
brew install curl gcc git gnupg zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -61,15 +61,10 @@ Getting Spack is easy. You can clone it from the `github repository
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
|
||||
This will create a directory called ``spack``.
|
||||
|
||||
.. note::
|
||||
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
||||
|
||||
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
.. _shell-support:
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
@@ -283,6 +278,10 @@ compilers`` or ``spack compiler list``:
|
||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||
-- clang -------------------------------------------------------
|
||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||
-- pgi ---------------------------------------------------------
|
||||
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||
|
||||
Any of these compilers can be used to build Spack packages. More on
|
||||
how this is done is in :ref:`sec-specs`.
|
||||
@@ -802,6 +801,65 @@ flags to the ``icc`` command:
|
||||
spec: intel@15.0.24.4.9.3
|
||||
|
||||
|
||||
^^^
|
||||
PGI
|
||||
^^^
|
||||
|
||||
PGI comes with two sets of compilers for C++ and Fortran,
|
||||
distinguishable by their names. "Old" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||
|
||||
"New" compilers:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||
|
||||
Older installations of PGI contains just the old compilers; whereas
|
||||
newer installations contain the old and the new. The new compiler is
|
||||
considered preferable, as some packages
|
||||
(``hdf``) will not build with the old compiler.
|
||||
|
||||
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||
find the old compilers, when you really want it to find the new
|
||||
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||
``pgfortran``.
|
||||
|
||||
Other issues:
|
||||
|
||||
* There are reports that some packages will not build with PGI,
|
||||
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||
build these packages with another compiler and then use them as
|
||||
dependencies for PGI-build packages. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||
|
||||
|
||||
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||
information on installation.
|
||||
|
||||
.. note::
|
||||
|
||||
It is believed the problem with HDF 4 is that everything is
|
||||
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||
work.
|
||||
|
||||
|
||||
^^^
|
||||
NAG
|
||||
^^^
|
||||
@@ -1306,6 +1364,187 @@ This will write the private key to the file `dinosaur.priv`.
|
||||
or for help on an issue or the Spack slack.
|
||||
|
||||
|
||||
.. _cray-support:
|
||||
|
||||
-------------
|
||||
Spack on Cray
|
||||
-------------
|
||||
|
||||
Spack differs slightly when used on a Cray system. The architecture spec
|
||||
can differentiate between the front-end and back-end processor and operating system.
|
||||
For example, on Edison at NERSC, the back-end target processor
|
||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=ivybridge
|
||||
|
||||
You can also use the operating system to build against the back-end:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=CNL10
|
||||
|
||||
Notice that the name includes both the operating system name and the major
|
||||
version number concatenated together.
|
||||
|
||||
Alternatively, if you want to build something for the front-end,
|
||||
you can specify the front-end target processor. The processor for a login node
|
||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=sandybridge
|
||||
|
||||
And the front-end operating system is:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=SuSE11
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Cray compiler detection
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can detect compilers using two methods. For the front-end, we treat
|
||||
everything the same. The difference lies in back-end compiler detection.
|
||||
Back-end compiler detection is made via the Tcl module avail command.
|
||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
||||
|
||||
The compilers.yaml config file will also differ. There is a
|
||||
modules section that is filled with the compiler's Programming Environment
|
||||
and module name. On other systems, this field is empty []:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
modules:
|
||||
- PrgEnv-intel
|
||||
- intel/15.0.109
|
||||
|
||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
||||
compiler are replaced with their respective Cray compiler wrapper names:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
|
||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
||||
to call the Cray compiler wrappers during build time.
|
||||
|
||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
||||
|
||||
Spack sets the default Cray link type to dynamic, to better match other
|
||||
other platforms. Individual packages can enable static linking (which is the
|
||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting defaults and using Cray modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to use default compilers for each PrgEnv and also be able
|
||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
||||
|
||||
Here's an example of an external configuration for cray modules:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
all:
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
This tells Spack that for whatever package that depends on mpi, load the
|
||||
cray-mpich module into the environment. You can then be able to use whatever
|
||||
environment variables, libraries, etc, that are brought into the environment
|
||||
via module load.
|
||||
|
||||
.. note::
|
||||
|
||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
||||
compiler wrapper link line).
|
||||
|
||||
You can set the default compiler that Spack can use for each compiler type.
|
||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
||||
In the compiler field, set the compiler specs in your order of preference.
|
||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
||||
|
||||
Here is an example of a full packages.yaml used at NERSC
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
buildable: False
|
||||
netcdf:
|
||||
externals:
|
||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
buildable: False
|
||||
hdf5:
|
||||
externals:
|
||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
buildable: False
|
||||
all:
|
||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
||||
for each compiler type for each cray modules. This ensures that for each
|
||||
compiler on our system we can use that external module.
|
||||
|
||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Linux containers on Cray machines
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses environment variables particular to the Cray programming
|
||||
environment to determine which systems are Cray platforms. These
|
||||
environment variables may be propagated into containers that are not
|
||||
using the Cray programming environment.
|
||||
|
||||
To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
@@ -1417,14 +1656,16 @@ in a Windows CMD prompt.
|
||||
Step 3: Run and configure Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
On Windows, Spack supports both primary native shells, Powershell and the traditional command prompt.
|
||||
To use Spack, pick your favorite shell, and run ``bin\spack_cmd.bat`` or ``share/spack/setup-env.ps1``
|
||||
(you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Spack enabled shell. If you receive a warning message that Python is not in your ``PATH``
|
||||
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||
|
||||
.. note::
|
||||
Alternatively, Powershell can be used in place of CMD
|
||||
|
||||
To configure Spack, first run the following command inside the Spack console:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -1489,7 +1730,7 @@ and not tabs, so ensure that this is the case when editing one directly.
|
||||
|
||||
.. note:: Cygwin
|
||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||
However Spack will not prevent this, so use if choosing to use Spack
|
||||
However Spack will not throw an error, so use if choosing to use Spack
|
||||
with Cygwin, know that no functionality is garunteed.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -1503,12 +1744,21 @@ Spack console via:
|
||||
|
||||
spack install cpuinfo
|
||||
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should install both packages
|
||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
||||
|
||||
.. note:: Spec Syntax Caveats
|
||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
||||
See the Spack spec syntax doc for more information
|
||||
"""""""""""""""""""""""""""
|
||||
Windows Compatible Packages
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
Not all spack packages currently have Windows support. Some are inherently incompatible with the
|
||||
platform, and others simply have yet to be ported. To view the current set of packages with Windows
|
||||
support, the list command should be used via `spack list -t windows`. If there's a package you'd like
|
||||
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
|
||||
the port yourself.
|
||||
|
||||
.. note::
|
||||
This is by no means a comprehensive list, some packages may have ports that were not tagged
|
||||
while others may just work out of the box on Windows and have not been tagged as such.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
For developers
|
||||
@@ -1518,3 +1768,6 @@ The intent is to provide a Windows installer that will automatically set up
|
||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||
Instructions for creating the installer are at
|
||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||
|
||||
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||
available at each run of the CI on develop or any PR.
|
||||
|
BIN
lib/spack/docs/images/pr-commit.png
Normal file
BIN
lib/spack/docs/images/pr-commit.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 44 KiB |
BIN
lib/spack/docs/images/projects.png
Normal file
BIN
lib/spack/docs/images/projects.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 68 KiB |
Binary file not shown.
Before Width: | Height: | Size: 358 KiB |
@@ -12,6 +12,10 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
@@ -35,15 +39,10 @@ package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
||||
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||
$ cd spack/bin
|
||||
$ ./spack install libelf
|
||||
|
||||
.. note::
|
||||
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
||||
|
||||
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
||||
|
||||
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
||||
or refer to the full manual below.
|
||||
|
||||
|
@@ -457,11 +457,11 @@ For instance, the following config options,
|
||||
tcl:
|
||||
all:
|
||||
suffixes:
|
||||
^python@3: 'python{^python.version}'
|
||||
^python@3.12: 'python-3.12'
|
||||
^openblas: 'openblas'
|
||||
|
||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3``. This is useful to know which
|
||||
will add a ``python-3.12`` version string to any packages compiled with
|
||||
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||
version of Python a set of Python extensions is associated with. Likewise, the
|
||||
``openblas`` string is attached to any program that has openblas in the spec,
|
||||
most likely via the ``+blas`` variant specification.
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -59,7 +59,7 @@ Functional Example
|
||||
------------------
|
||||
|
||||
The simplest fully functional standalone example of a working pipeline can be
|
||||
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
||||
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
||||
on gitlab.com.
|
||||
|
||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||
@@ -67,46 +67,39 @@ pipeline:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
stages: [ "generate", "build" ]
|
||||
stages: [generate, build]
|
||||
|
||||
variables:
|
||||
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
||||
SPACK_REF: "develop-2024-10-06"
|
||||
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
||||
SPACK_BACKTRACE: 1
|
||||
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
||||
SPACK_REF: pipelines-reproducible-builds
|
||||
|
||||
generate-pipeline:
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
stage: generate
|
||||
tags:
|
||||
- docker
|
||||
image:
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
before_script:
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
script:
|
||||
- spack env activate --without-view .
|
||||
- spack -d -v --color=always
|
||||
ci generate
|
||||
--check-index-only
|
||||
- spack -d ci generate
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
|
||||
build-pipeline:
|
||||
build-jobs:
|
||||
stage: build
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
- artifact: "jobs_scratch_dir/pipeline.yml"
|
||||
job: generate-pipeline
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: generate-pipeline
|
||||
|
||||
|
||||
The key thing to note above is that there are two jobs: The first job to run,
|
||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||
@@ -121,93 +114,82 @@ And here's the spack environment built by the pipeline represented as a
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
unify: true
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
- zlib
|
||||
- bzip2 ~debug
|
||||
- compiler:
|
||||
- '%gcc'
|
||||
- bzip2
|
||||
- arch:
|
||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- - $pkgs
|
||||
- - $compiler
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||
|
||||
ci:
|
||||
target: gitlab
|
||||
|
||||
enable-artifacts-buildcache: True
|
||||
rebuild-index: False
|
||||
pipeline-gen:
|
||||
- any-job:
|
||||
tags:
|
||||
- saas-linux-small-amd64
|
||||
image:
|
||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
before_script:
|
||||
- git clone ${SPACK_REPOSITORY}
|
||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
||||
- spack config blame mirrors
|
||||
- git clone ${SPACK_REPO}
|
||||
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||
- . "./spack/share/spack/setup-env.sh"
|
||||
- build-job:
|
||||
tags: [docker]
|
||||
image:
|
||||
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||
entrypoint: [""]
|
||||
|
||||
|
||||
The elements of this file important to spack ci pipelines are described in more
|
||||
detail below, but there are a couple of things to note about the above working
|
||||
example:
|
||||
|
||||
.. note::
|
||||
The use of ``reuse: false`` in spack environments used for pipelines is
|
||||
almost always what you want, as without it your pipelines will not rebuild
|
||||
packages even if package hashes have changed. This is due to the concretizer
|
||||
strongly preferring known hashes when ``reuse: true``.
|
||||
There is no ``script`` attribute specified for here. The reason for this is
|
||||
Spack CI will automatically generate reasonable default scripts. More
|
||||
detail on what is in these scripts can be found below.
|
||||
|
||||
The ``ci`` section in the above environment file contains the bare minimum
|
||||
configuration required for ``spack ci generate`` to create a working pipeline.
|
||||
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
||||
gitlab. However, this isn't strictly required, as currently gitlab is the
|
||||
only possible output format for pipelines. The ``pipeline-gen`` section
|
||||
contains the key information needed to specify attributes for the generated
|
||||
jobs. Notice that it contains a list which has only a single element in
|
||||
this case. In real pipelines it will almost certainly have more elements,
|
||||
and in those cases, order is important: spack starts at the bottom of the
|
||||
list and works upwards when applying attributes.
|
||||
Also notice the ``before_script`` section. It is required when using any of the
|
||||
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||
the default scripts where to find the ``spack`` executable.
|
||||
|
||||
But in this simple case, we use only the special key ``any-job`` to
|
||||
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
||||
and ``before_script``) to any job it generates. This includes jobs for
|
||||
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
||||
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
||||
no rebuilds are required.
|
||||
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||
results in large binary artifacts getting transferred back and forth between
|
||||
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||
shared, persistent file system, and where no secrets are stored for giving
|
||||
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
||||
way to propagate binaries from jobs to their dependents.
|
||||
|
||||
Something to note is that in this simple case, we rely on spack to
|
||||
generate a reasonable script for the package build jobs (it just creates
|
||||
a script that invokes ``spack ci rebuild``).
|
||||
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
||||
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
||||
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
||||
reasons as well, but those are out of scope for this discussion). In this case we
|
||||
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||
generated in the artifacts mirror anyway, and consequently would not be available
|
||||
during subsequent pipeline runs.
|
||||
|
||||
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
||||
variable in any generated jobs. The purpose of this is to make spack
|
||||
aware of one final file in the example, the one that contains the mirror
|
||||
configuration. This file, ``mirrors.yaml`` looks like this:
|
||||
.. note::
|
||||
With the addition of reproducible builds (#22887) a previously working
|
||||
pipeline will require some changes:
|
||||
|
||||
.. code-block:: yaml
|
||||
* In the build-jobs, the environment location changed.
|
||||
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||
|
||||
mirrors:
|
||||
buildcache-destination:
|
||||
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
||||
binary: true
|
||||
access_pair:
|
||||
id_variable: CI_REGISTRY_USER
|
||||
secret_variable: CI_REGISTRY_PASSWORD
|
||||
* When using ``include`` in your environment, be sure to make the included
|
||||
files available in the build jobs. This means adding those files to the
|
||||
artifact directory. Those files will also be missing in the reproducibility
|
||||
artifact.
|
||||
|
||||
|
||||
Note the name of the mirror is ``buildcache-destination``, which is required
|
||||
as of Spack 0.23 (see below for more information). The mirror url simply
|
||||
points to the container registry associated with the project, while
|
||||
``id_variable`` and ``secret_variable`` refer to to environment variables
|
||||
containing the access credentials for the mirror.
|
||||
|
||||
When spack builds packages for this example project, they will be pushed to
|
||||
the project container registry, where they will be available for subsequent
|
||||
jobs to install as dependencies, or for other pipelines to use to build runnable
|
||||
container images.
|
||||
* Because the location of the environment changed, including files with
|
||||
relative path may have to be adapted to work both in the project context
|
||||
(generation job) and in the concrete env dir context (build job).
|
||||
|
||||
-----------------------------------
|
||||
Spack commands supporting pipelines
|
||||
@@ -271,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
@@ -435,6 +428,15 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
||||
does not create a signing job and the job configuration attributes will be ignored.
|
||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Cleanup (cleanup)
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||
script, but do expect that the spack command is in the path and require a
|
||||
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||
|
||||
.. _noop_jobs:
|
||||
|
||||
^^^^^^^^^^^^
|
||||
@@ -601,77 +603,6 @@ the attributes will be merged starting from the bottom match going up to the top
|
||||
|
||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Dynamic Mapping Sections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
||||
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
||||
behavior, but it does follow the rest of the merge rules for configurations.
|
||||
|
||||
The dynamic mapping service needs to implement a single REST API interface for getting
|
||||
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
||||
|
||||
example request.
|
||||
|
||||
.. code-block::
|
||||
|
||||
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
||||
|
||||
|
||||
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
||||
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
||||
|
||||
.. code-block::
|
||||
|
||||
200 OK
|
||||
|
||||
{
|
||||
"variables":
|
||||
{
|
||||
"KUBERNETES_CPU_REQUEST": "500m",
|
||||
"KUBERNETES_MEMORY_REQUEST": "2G",
|
||||
},
|
||||
"retry": { "max:": "1"}
|
||||
"script+:":
|
||||
[
|
||||
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
||||
|
||||
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
||||
respectively. It is also possible to configure required attributes under ``required`` section.
|
||||
|
||||
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
||||
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
||||
|
||||
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
||||
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
||||
|
||||
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ci:
|
||||
- dynamic-mapping:
|
||||
endpoint: my-dyn-mapping.spack.io/allocation
|
||||
timeout: 10
|
||||
verify_ssl: True
|
||||
header:
|
||||
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
||||
MY_CONFIG: "fuzz_allocation:false"
|
||||
allow:
|
||||
- variables
|
||||
ignore:
|
||||
- script
|
||||
require: []
|
||||
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Bootstrapping
|
||||
^^^^^^^^^^^^^
|
||||
@@ -743,13 +674,26 @@ build the package.
|
||||
|
||||
When including a bootstrapping phase as in the example above, the result is that
|
||||
the bootstrapped compiler packages will be pushed to the binary mirror (and the
|
||||
local artifacts mirror) before the actual release specs are built.
|
||||
local artifacts mirror) before the actual release specs are built. In this case,
|
||||
the jobs corresponding to subsequent release specs are configured to
|
||||
``install_missing_compilers``, so that if spack is asked to install a package
|
||||
with a compiler it doesn't know about, it can be quickly installed from the
|
||||
binary mirror first.
|
||||
|
||||
Since bootstrapping compilers is optional, those items can be left out of the
|
||||
environment/stack file, and in that case no bootstrapping will be done (only the
|
||||
specs will be staged for building) and the runners will be expected to already
|
||||
have all needed compilers installed and configured for spack to use.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Pipeline Buildcache
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``enable-artifacts-buildcache`` key
|
||||
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||
pass along the buildcaches from one stage to the next (the default if you don't
|
||||
provide this option is ``False``).
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Broken Specs URL
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
@@ -476,3 +476,9 @@ implemented using Python's built-in `sys.path
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==8.1.3
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.2
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
isort==5.13.2
|
||||
black==24.10.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
|
349
lib/spack/env/cc
vendored
349
lib/spack/env/cc
vendored
@@ -41,6 +41,10 @@ SPACK_ENV_PATH
|
||||
SPACK_DEBUG_LOG_DIR
|
||||
SPACK_DEBUG_LOG_ID
|
||||
SPACK_COMPILER_SPEC
|
||||
SPACK_CC_RPATH_ARG
|
||||
SPACK_CXX_RPATH_ARG
|
||||
SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS
|
||||
@@ -97,9 +101,10 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -169,46 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
execute() {
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exit
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -219,7 +184,6 @@ for param in $params; do
|
||||
if eval "test -z \"\${${param}:-}\""; then
|
||||
die "Spack compiler must be run from Spack! Input '$param' is missing."
|
||||
fi
|
||||
# FIXME (compiler as nodes) add checks on whether `SPACK_XX_RPATH` is set if `SPACK_XX` is set
|
||||
done
|
||||
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
@@ -234,36 +198,6 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -297,17 +231,12 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
|
||||
# including version checks (SPACK_XFLAGS variants are not applied
|
||||
# for version checks).
|
||||
command="${0##*/}"
|
||||
comp="CC"
|
||||
vcheck_flags=""
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||
command="$SPACK_CC"
|
||||
@@ -315,7 +244,6 @@ case "$command" in
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
@@ -323,7 +251,6 @@ case "$command" in
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||
command="$SPACK_FC"
|
||||
@@ -331,7 +258,6 @@ case "$command" in
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
@@ -339,7 +265,6 @@ case "$command" in
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
@@ -400,7 +325,7 @@ dtags_to_strip="${SPACK_DTAGS_TO_STRIP}"
|
||||
linker_arg="${SPACK_LINKER_ARG}"
|
||||
|
||||
# Set up rpath variable according to language.
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET, MAYBE THE PACKAGE DOES NOT DEPEND ON ${comp}?"
|
||||
rpath="ERROR: RPATH ARG WAS NOT SET"
|
||||
eval "rpath=\${SPACK_${comp}_RPATH_ARG:?${rpath}}"
|
||||
|
||||
# Dump the mode and exit if the command is dump-mode.
|
||||
@@ -440,11 +365,7 @@ unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
full_command_list="$command"
|
||||
args="$@"
|
||||
extend full_command_list vcheck_flags
|
||||
extend full_command_list args
|
||||
execute
|
||||
exec "${command}" "$@"
|
||||
fi
|
||||
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
@@ -496,7 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -505,14 +431,24 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -520,7 +456,8 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -539,10 +476,21 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -608,17 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -651,17 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -678,36 +656,7 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
if [ $# -eq 1 ]; then
|
||||
# -rpath without value: let the linker raise an error.
|
||||
append return_other_args_list "$1"
|
||||
break
|
||||
fi
|
||||
shift
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -729,10 +678,21 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -762,7 +722,6 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_ALWAYS_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -772,7 +731,6 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -783,19 +741,15 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CC
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
preextend flags_list SPACK_TARGET_ARGS_CXX
|
||||
;;
|
||||
F)
|
||||
preextend flags_list SPACK_TARGET_ARGS_FORTRAN
|
||||
;;
|
||||
esac
|
||||
|
||||
# prepend target args
|
||||
preextend flags_list SPACK_TARGET_ARGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -810,10 +764,21 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -872,7 +837,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -888,63 +853,64 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="$rpath"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
@@ -967,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
esac
|
||||
fi
|
||||
|
||||
execute
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||
* Version: 0.2.5-dev (commit cbb1fd5eb397a70d466e5160b393b87b0dbcc78f)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
24
lib/spack/external/_vendoring/distro/distro.py
vendored
24
lib/spack/external/_vendoring/distro/distro.py
vendored
@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
with os.scandir(self.etc_dir) as it:
|
||||
etc_files = [
|
||||
p.path for p in it
|
||||
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
]
|
||||
basenames = [
|
||||
basename
|
||||
for basename in os.listdir(self.etc_dir)
|
||||
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
etc_files.sort()
|
||||
basenames.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
etc_files = [
|
||||
os.path.join(self.etc_dir, basename)
|
||||
for basename in _DISTRO_RELEASE_BASENAMES
|
||||
]
|
||||
|
||||
for filepath in etc_files:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
basenames = _DISTRO_RELEASE_BASENAMES
|
||||
for basename in basenames:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
if match is None:
|
||||
continue
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
||||
|
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
@@ -231,6 +231,96 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
import idna
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
def is_idn_host_name(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
idna.encode(instance)
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3986_validator import validate_rfc3986
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(name="uri")
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI_reference")
|
||||
|
||||
else:
|
||||
@_checks_drafts(draft7="iri", raises=ValueError)
|
||||
def is_iri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI")
|
||||
|
||||
@_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
def is_iri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI_reference")
|
||||
|
||||
@_checks_drafts(name="uri", raises=ValueError)
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI_reference")
|
||||
|
||||
|
||||
try:
|
||||
from strict_rfc3339 import validate_rfc3339
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3339_validator import validate_rfc3339
|
||||
except ImportError:
|
||||
validate_rfc3339 = None
|
||||
|
||||
if validate_rfc3339:
|
||||
@_checks_drafts(name="date-time")
|
||||
def is_datetime(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3339(instance)
|
||||
|
||||
@_checks_drafts(draft7="time")
|
||||
def is_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return is_datetime("1970-01-01T" + instance)
|
||||
|
||||
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
|
||||
|
||||
try:
|
||||
import webcolors
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def is_css_color_code(instance):
|
||||
return webcolors.normalize_hex(instance)
|
||||
|
||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
def is_css21_color(instance):
|
||||
if (
|
||||
not isinstance(instance, str_types) or
|
||||
instance.lower() in webcolors.css21_names_to_hex
|
||||
):
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
def is_css3_color(instance):
|
||||
if instance.lower() in webcolors.css3_names_to_hex:
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
|
||||
try:
|
||||
import jsonpointer
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="json-pointer",
|
||||
draft7="json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_json_pointer(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return jsonpointer.JsonPointer(instance)
|
||||
|
||||
# TODO: I don't want to maintain this, so it
|
||||
# needs to go either into jsonpointer (pending
|
||||
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
# into a new external library.
|
||||
@_checks_drafts(
|
||||
draft7="relative-json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_relative_json_pointer(instance):
|
||||
# Definition taken from:
|
||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
non_negative_integer, rest = [], ""
|
||||
for i, character in enumerate(instance):
|
||||
if character.isdigit():
|
||||
non_negative_integer.append(character)
|
||||
continue
|
||||
|
||||
if not non_negative_integer:
|
||||
return False
|
||||
|
||||
rest = instance[i:]
|
||||
break
|
||||
return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
|
||||
|
||||
try:
|
||||
import uritemplate.exceptions
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="uri-template",
|
||||
draft7="uri-template",
|
||||
raises=uritemplate.exceptions.InvalidTemplate,
|
||||
)
|
||||
def is_uri_template(
|
||||
instance,
|
||||
template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
):
|
||||
template = uritemplate.URITemplate(instance)
|
||||
return template_validator.validate(template)
|
||||
|
@@ -81,13 +81,8 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
|
||||
# Cache the "ancestor" computation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
# Cache the "generic" computation
|
||||
self._generic = None
|
||||
# Cache the "family" computation
|
||||
self._family = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
@@ -120,9 +115,6 @@ def __eq__(self, other):
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.name)
|
||||
|
||||
@coerce_target_names
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
@@ -179,22 +171,18 @@ def __contains__(self, feature):
|
||||
@property
|
||||
def family(self):
|
||||
"""Returns the architecture family a given target belongs to"""
|
||||
if self._family is None:
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
self._family = roots.pop()
|
||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||
msg = "a target is expected to belong to just one architecture family"
|
||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||
assert len(roots) == 1, msg
|
||||
|
||||
return self._family
|
||||
return roots.pop()
|
||||
|
||||
@property
|
||||
def generic(self):
|
||||
"""Returns the best generic architecture that is compatible with self"""
|
||||
if self._generic is None:
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||
return self._generic
|
||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||
return max(generics, key=lambda x: len(x.ancestors))
|
||||
|
||||
def to_dict(self):
|
||||
"""Returns a dictionary representation of this object."""
|
||||
|
@@ -1482,6 +1482,7 @@
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
@@ -2236,84 +2237,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen5": {
|
||||
"from": ["zen4"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"abm",
|
||||
"aes",
|
||||
"avx",
|
||||
"avx2",
|
||||
"avx512_bf16",
|
||||
"avx512_bitalg",
|
||||
"avx512bw",
|
||||
"avx512cd",
|
||||
"avx512dq",
|
||||
"avx512f",
|
||||
"avx512ifma",
|
||||
"avx512vbmi",
|
||||
"avx512_vbmi2",
|
||||
"avx512vl",
|
||||
"avx512_vnni",
|
||||
"avx512_vp2intersect",
|
||||
"avx512_vpopcntdq",
|
||||
"avx_vnni",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"clflushopt",
|
||||
"clwb",
|
||||
"clzero",
|
||||
"cppc",
|
||||
"cx16",
|
||||
"f16c",
|
||||
"flush_l1d",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"gfni",
|
||||
"ibrs_enhanced",
|
||||
"mmx",
|
||||
"movbe",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pclmulqdq",
|
||||
"popcnt",
|
||||
"rdseed",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"tsc_adjust",
|
||||
"vaes",
|
||||
"vpclmulqdq",
|
||||
"xsavec",
|
||||
"xsaveopt"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "14.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "5.0:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "19.1:",
|
||||
"name": "znver5",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
45
lib/spack/external/patches/distro.patch
vendored
45
lib/spack/external/patches/distro.patch
vendored
@@ -1,45 +0,0 @@
|
||||
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
|
||||
index 89e1868047..50c3b18d4d 100644
|
||||
--- a/lib/spack/external/_vendoring/distro/distro.py
|
||||
+++ b/lib/spack/external/_vendoring/distro/distro.py
|
||||
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
- basenames = [
|
||||
- basename
|
||||
- for basename in os.listdir(self.etc_dir)
|
||||
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
- and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
- ]
|
||||
+ with os.scandir(self.etc_dir) as it:
|
||||
+ etc_files = [
|
||||
+ p.path for p in it
|
||||
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
+ ]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
- basenames.sort()
|
||||
+ etc_files.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
- basenames = _DISTRO_RELEASE_BASENAMES
|
||||
- for basename in basenames:
|
||||
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
+ etc_files = [
|
||||
+ os.path.join(self.etc_dir, basename)
|
||||
+ for basename in _DISTRO_RELEASE_BASENAMES
|
||||
+ ]
|
||||
+
|
||||
+ for filepath in etc_files:
|
||||
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
if match is None:
|
||||
continue
|
||||
- filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
188
lib/spack/external/patches/jsonschema.patch
vendored
188
lib/spack/external/patches/jsonschema.patch
vendored
@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
|
||||
-__version__ = metadata.version("jsonschema")
|
||||
+
|
||||
+__version__ = "3.2.0"
|
||||
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
index 281a7cfcff..29061e3661 100644
|
||||
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
@@ -231,96 +231,6 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
-try:
|
||||
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
- import idna
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
- def is_idn_host_name(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- idna.encode(instance)
|
||||
- return True
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import rfc3987
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3986_validator import validate_rfc3986
|
||||
- except ImportError:
|
||||
- pass
|
||||
- else:
|
||||
- @_checks_drafts(name="uri")
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI_reference")
|
||||
-
|
||||
-else:
|
||||
- @_checks_drafts(draft7="iri", raises=ValueError)
|
||||
- def is_iri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI")
|
||||
-
|
||||
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
- def is_iri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI_reference")
|
||||
-
|
||||
- @_checks_drafts(name="uri", raises=ValueError)
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI_reference")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- from strict_rfc3339 import validate_rfc3339
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3339_validator import validate_rfc3339
|
||||
- except ImportError:
|
||||
- validate_rfc3339 = None
|
||||
-
|
||||
-if validate_rfc3339:
|
||||
- @_checks_drafts(name="date-time")
|
||||
- def is_datetime(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3339(instance)
|
||||
-
|
||||
- @_checks_drafts(draft7="time")
|
||||
- def is_time(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return is_datetime("1970-01-01T" + instance)
|
||||
-
|
||||
-
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import webcolors
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- def is_css_color_code(instance):
|
||||
- return webcolors.normalize_hex(instance)
|
||||
-
|
||||
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
- def is_css21_color(instance):
|
||||
- if (
|
||||
- not isinstance(instance, str_types) or
|
||||
- instance.lower() in webcolors.css21_names_to_hex
|
||||
- ):
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
- def is_css3_color(instance):
|
||||
- if instance.lower() in webcolors.css3_names_to_hex:
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import jsonpointer
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="json-pointer",
|
||||
- draft7="json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_json_pointer(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return jsonpointer.JsonPointer(instance)
|
||||
-
|
||||
- # TODO: I don't want to maintain this, so it
|
||||
- # needs to go either into jsonpointer (pending
|
||||
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
- # into a new external library.
|
||||
- @_checks_drafts(
|
||||
- draft7="relative-json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_relative_json_pointer(instance):
|
||||
- # Definition taken from:
|
||||
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- non_negative_integer, rest = [], ""
|
||||
- for i, character in enumerate(instance):
|
||||
- if character.isdigit():
|
||||
- non_negative_integer.append(character)
|
||||
- continue
|
||||
-
|
||||
- if not non_negative_integer:
|
||||
- return False
|
||||
-
|
||||
- rest = instance[i:]
|
||||
- break
|
||||
- return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import uritemplate.exceptions
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-template",
|
||||
- draft7="uri-template",
|
||||
- raises=uritemplate.exceptions.InvalidTemplate,
|
||||
- )
|
||||
- def is_uri_template(
|
||||
- instance,
|
||||
- template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
- ):
|
||||
- template = uritemplate.URITemplate(instance)
|
||||
- return template_validator.validate(template)
|
||||
|
@@ -41,20 +41,6 @@ def comma_and(sequence: List[str]) -> str:
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def ordinal(number: int) -> str:
|
||||
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
||||
|
||||
Args:
|
||||
number: int to convert to ordinal number
|
||||
|
||||
Returns: number's corresponding ordinal
|
||||
"""
|
||||
idx = (number % 10) << 1
|
||||
tens = number % 100 // 10
|
||||
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
||||
return f"{number}{suffix}"
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
@@ -20,25 +20,15 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
@@ -59,11 +49,11 @@
|
||||
"copy_mode",
|
||||
"filter_file",
|
||||
"find",
|
||||
"find_first",
|
||||
"find_headers",
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"fix_darwin_install_name",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"getuid",
|
||||
@@ -97,8 +87,6 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
@@ -260,6 +248,42 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
file_cmd = which("file", required=True)
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -742,6 +766,7 @@ def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -764,6 +789,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -771,6 +798,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -843,14 +872,16 @@ def escaped_path(path):
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d)
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -861,12 +892,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1600,12 +1640,6 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
# Adding this prefix allows shutil to remove long paths on windows
|
||||
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
|
||||
long_path_pfx = "\\\\?\\"
|
||||
if not path.startswith(long_path_pfx):
|
||||
path = long_path_pfx + path
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
@@ -1655,6 +1689,41 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
There are two parts of this task:
|
||||
|
||||
1. Use ``install_name('-id', ...)`` to change install name of a single lib
|
||||
2. Use ``install_name('-change', ...)`` to change the cross linking between
|
||||
libs. The function assumes that all libraries are in one folder and
|
||||
currently won't follow subfolders.
|
||||
|
||||
Parameters:
|
||||
path (str): directory in which .dylib files are located
|
||||
"""
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
install_name_tool("-id", lib, lib)
|
||||
otool = Executable("otool")
|
||||
long_deps = otool("-L", lib, output=str).split("\n")
|
||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
# We really want to check for either
|
||||
# dep == os.path.basename(loc) or
|
||||
# dep == join_path(builddir, os.path.basename(loc)),
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
install_name_tool("-change", dep, loc, lib)
|
||||
break
|
||||
|
||||
|
||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||
"""Find the first file matching a pattern.
|
||||
|
||||
@@ -1687,203 +1756,105 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(
|
||||
root: Union[Path, Sequence[Path]],
|
||||
files: Union[str, Sequence[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
||||
returns a deterministic result for the same input and directory structure when run multiple
|
||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
||||
symlinked directories.
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
Like GNU/BSD find but written entirely in Python.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr', 'python')
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr/local/bin -maxdepth 1 -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches one or more characters
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Examples:
|
||||
|
||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
||||
|
||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
||||
depth 2.
|
||||
|
||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
||||
|
||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
||||
``/var`` at any depth.
|
||||
|
||||
>>> find("/usr", "GL/*.h", recursive=True)
|
||||
|
||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
"""
|
||||
if isinstance(root, (str, pathlib.Path)):
|
||||
root = [root]
|
||||
elif not isinstance(root, collections.abc.Sequence):
|
||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
||||
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
elif not isinstance(files, collections.abc.Sequence):
|
||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
||||
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
# found in a key, and reconstructing the stable order later.
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
matches = [os.path.join(path, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have absolute path returned
|
||||
root = os.path.abspath(root)
|
||||
|
||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
||||
unique_files: List[str] = []
|
||||
# tuple of (inode, device) for each file without following symlinks
|
||||
visited: Set[Tuple[int, int]] = set()
|
||||
for path in paths:
|
||||
try:
|
||||
stat_info = os.lstat(path)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, path)
|
||||
continue
|
||||
file_id = _file_id(stat_info)
|
||||
if file_id not in visited:
|
||||
unique_files.append(path)
|
||||
visited.add(file_id)
|
||||
return unique_files
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(root, search_file))
|
||||
matches = [os.path.join(root, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
def _find_max_depth(
|
||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
||||
) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
||||
# every directory we visit within max_depth.
|
||||
filename_only_patterns = {
|
||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
||||
}
|
||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
||||
# Ordered dictionary that keeps track of what pattern found which files
|
||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
|
||||
for root in roots:
|
||||
try:
|
||||
stat_root = os.stat(root)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _file_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
|
||||
while dir_queue:
|
||||
depth, curr_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(curr_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, curr_dir)
|
||||
continue
|
||||
|
||||
# Use glob.glob for complex patterns.
|
||||
for pattern_name, pattern in complex_patterns.items():
|
||||
matched_paths[pattern_name].extend(
|
||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
||||
)
|
||||
|
||||
# List of subdirectories by path and (inode, device) tuple
|
||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
||||
|
||||
with dir_iter:
|
||||
for dir_entry in dir_iter:
|
||||
|
||||
# Match filename only patterns
|
||||
if filename_only_patterns:
|
||||
m = regex.match(os.path.normcase(dir_entry.name))
|
||||
if m:
|
||||
for pattern_name in filename_only_patterns:
|
||||
if m.group(pattern_name):
|
||||
matched_paths[pattern_name].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Collect subdirectories
|
||||
if depth >= max_depth:
|
||||
continue
|
||||
|
||||
try:
|
||||
if not dir_entry.is_dir(follow_symlinks=True):
|
||||
continue
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
||||
# to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
||||
|
||||
# Enqueue subdirectories in a deterministic order
|
||||
if subdirs:
|
||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
||||
for subdir, subdir_id in subdirs:
|
||||
if subdir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, subdir))
|
||||
visited_dirs.add(subdir_id)
|
||||
|
||||
# Sort the matched paths for deterministic output
|
||||
for paths in matched_paths.values():
|
||||
paths.sort()
|
||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
||||
|
||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
||||
# multiple times
|
||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
||||
return answer
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2322,9 +2293,7 @@ def find_system_libraries(libraries, shared=True):
|
||||
return libraries_found
|
||||
|
||||
|
||||
def find_libraries(
|
||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
||||
):
|
||||
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
@@ -2345,8 +2314,6 @@ def find_libraries(
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
||||
if recursive is False
|
||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||
search for runtime shared libs (.DLL), otherwise, search
|
||||
for .Lib files. If shared is false, this has no meaning.
|
||||
@@ -2355,7 +2322,6 @@ def find_libraries(
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
@@ -2388,10 +2354,8 @@ def find_libraries(
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
if max_depth:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
# If not recursive, look for the libraries directly in root
|
||||
return LibraryList(find(root, libraries, recursive=False))
|
||||
return LibraryList(find(root, libraries, False))
|
||||
|
||||
# To speedup the search for external packages configured e.g. in /usr,
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
@@ -2409,7 +2373,7 @@ def find_libraries(
|
||||
if found_libs:
|
||||
break
|
||||
else:
|
||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
||||
found_libs = find(root, libraries, True)
|
||||
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
@@ -5,20 +5,18 @@
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
ignore_modules = [r"^\.#", "~$"]
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
@@ -73,7 +71,7 @@ def index_by(objects, *funcs):
|
||||
if isinstance(f, str):
|
||||
f = lambda x: getattr(x, funcs[0])
|
||||
elif isinstance(f, tuple):
|
||||
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
||||
|
||||
result = {}
|
||||
for o in objects:
|
||||
@@ -86,6 +84,20 @@ def index_by(objects, *funcs):
|
||||
return result
|
||||
|
||||
|
||||
def caller_locals():
|
||||
"""This will return the locals of the *parent* of the caller.
|
||||
This allows a function to insert variables into its caller's
|
||||
scope. Yes, this is some black magic, and yes it's useful
|
||||
for implementing things like depends_on and provides.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
try:
|
||||
return stack[2][0].f_locals
|
||||
finally:
|
||||
del stack
|
||||
|
||||
|
||||
def attr_setdefault(obj, name, value):
|
||||
"""Like dict.setdefault, but for objects."""
|
||||
if not hasattr(obj, name):
|
||||
@@ -93,6 +105,15 @@ def attr_setdefault(obj, name, value):
|
||||
return getattr(obj, name)
|
||||
|
||||
|
||||
def has_method(cls, name):
|
||||
for base in inspect.getmro(cls):
|
||||
if base is object:
|
||||
continue
|
||||
if name in base.__dict__:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def union_dicts(*dicts):
|
||||
"""Use update() to combine all dicts into one.
|
||||
|
||||
@@ -157,22 +178,19 @@ def list_modules(directory, **kwargs):
|
||||
order."""
|
||||
list_directories = kwargs.setdefault("directories", True)
|
||||
|
||||
ignore = re.compile(ignore_modules)
|
||||
for name in os.listdir(directory):
|
||||
if name == "__init__.py":
|
||||
continue
|
||||
|
||||
with os.scandir(directory) as it:
|
||||
for entry in it:
|
||||
if entry.name == "__init__.py" or entry.name == "__pycache__":
|
||||
continue
|
||||
path = os.path.join(directory, name)
|
||||
if list_directories and os.path.isdir(path):
|
||||
init_py = os.path.join(path, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
yield name
|
||||
|
||||
if (
|
||||
list_directories
|
||||
and entry.is_dir()
|
||||
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
|
||||
):
|
||||
yield entry.name
|
||||
|
||||
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
|
||||
yield entry.name[:-3] # strip .py
|
||||
elif name.endswith(".py"):
|
||||
if not any(re.search(pattern, name) for pattern in ignore_modules):
|
||||
yield re.sub(".py$", "", name)
|
||||
|
||||
|
||||
def decorator_with_or_without_args(decorator):
|
||||
@@ -219,8 +237,8 @@ def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not hasattr(cls, "_cmp_key"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
|
||||
if not has_method(cls, "_cmp_key"):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
|
||||
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
@@ -370,8 +388,8 @@ def cd_fun():
|
||||
TypeError: If the class does not have a ``_cmp_iter`` method
|
||||
|
||||
"""
|
||||
if not hasattr(cls, "_cmp_iter"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
|
||||
if not has_method(cls, "_cmp_iter"):
|
||||
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
|
||||
|
||||
# comparison operators are implemented in terms of lazy_eq and lazy_lt
|
||||
def eq(self, other):
|
||||
@@ -846,32 +864,20 @@ def uniq(sequence):
|
||||
return uniq_list
|
||||
|
||||
|
||||
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
def elide_list(line_list, max_num=10):
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list(["1", "2", "3", "4", "5", "6"], 4)
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
|
||||
gives::
|
||||
|
||||
["1", "2", "3", "...", "6"]
|
||||
[1, 2, 3, '...', 6]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return [*line_list[: max_num - 1], "...", line_list[-1]]
|
||||
return line_list
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
||||
names, and values are filename patterns. The output is a regex that matches any of the
|
||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
||||
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -886,12 +892,18 @@ class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool]
|
||||
) -> Tuple[List[T], List[T]]:
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
Args:
|
||||
@@ -903,13 +915,12 @@ def stable_partition(
|
||||
Tuple of the list of elements evaluating to True, and
|
||||
list of elements evaluating to False.
|
||||
"""
|
||||
true_items: List[T] = []
|
||||
false_items: List[T] = []
|
||||
true_items, false_items = [], []
|
||||
for item in input_iterable:
|
||||
if predicate_fn(item):
|
||||
true_items.append(item)
|
||||
else:
|
||||
false_items.append(item)
|
||||
continue
|
||||
false_items.append(item)
|
||||
return true_items, false_items
|
||||
|
||||
|
||||
@@ -921,21 +932,6 @@ def ensure_last(lst, *elements):
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
|
||||
|
||||
class Const:
|
||||
"""Class level constant, raises when trying to set the attribute"""
|
||||
|
||||
__slots__ = ["value"]
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
||||
|
||||
|
||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
@@ -995,8 +991,11 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||
"{0} raised {1}: {2}{3}".format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
@@ -1037,42 +1036,3 @@ def __init__(self, callback):
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
Derived classes must define a factory method to return an adaptor for the deprecated
|
||||
property, if the descriptor is not set to error.
|
||||
"""
|
||||
|
||||
__slots__ = ["name"]
|
||||
|
||||
#: 0 - Nothing
|
||||
#: 1 - Warning
|
||||
#: 2 - Error
|
||||
error_lvl = 0
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
if self.error_lvl == 1:
|
||||
warnings.warn(
|
||||
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
||||
)
|
||||
elif self.error_lvl == 2:
|
||||
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
||||
|
||||
return self.factory(instance, owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(
|
||||
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
||||
)
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Union
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
@@ -17,66 +16,92 @@
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def _windows_symlink(
|
||||
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||
):
|
||||
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||
hardlink for a file. On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||
privileges to make these.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
Create a link.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||
privileges.
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||
remote directory. Don't need System Administrator privileges."""
|
||||
source_path = os.path.normpath(src)
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(dst)
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path):
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
else:
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if not _windows_can_symlink():
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def _windows_islink(path: str) -> bool:
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
@@ -244,7 +269,7 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def _windows_readlink(path: str, *, dir_fd=None):
|
||||
def readlink(path: str, *, dir_fd=None):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
@@ -313,16 +338,6 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
symlink = _windows_symlink
|
||||
readlink = _windows_readlink
|
||||
islink = _windows_islink
|
||||
else:
|
||||
symlink = os.symlink
|
||||
readlink = os.readlink
|
||||
islink = os.path.islink
|
||||
|
||||
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
|
@@ -263,9 +263,7 @@ def match_to_ansi(match):
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
color_number = colors.get(color_code, "")
|
||||
semi = ";" if color_number else ""
|
||||
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
|
@@ -18,10 +18,9 @@
|
||||
import threading
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from multiprocessing.connection import Connection
|
||||
from threading import Thread
|
||||
from types import ModuleType
|
||||
from typing import Callable, Optional
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -34,23 +33,8 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
@@ -345,6 +329,49 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
|
||||
def __init__(self, fd):
|
||||
self._connection = None
|
||||
self._fd = None
|
||||
if sys.version_info >= (3, 8):
|
||||
self._connection = multiprocessing.connection.Connection(fd)
|
||||
else:
|
||||
self._fd = fd
|
||||
|
||||
@property
|
||||
def fd(self):
|
||||
if self._connection:
|
||||
return self._connection._handle
|
||||
else:
|
||||
return self._fd
|
||||
|
||||
def close(self):
|
||||
if self._connection:
|
||||
self._connection.close()
|
||||
else:
|
||||
os.close(self._fd)
|
||||
|
||||
|
||||
def close_connection_and_file(multiprocess_fd, file):
|
||||
# MultiprocessFd is intended to transmit a FD
|
||||
# to a child process, this FD is then opened to a Python File object
|
||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||
# multiprocessing.connection.Connection; Connection closes the FD
|
||||
# when it is deleted, and prints a warning about duplicate closure if
|
||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||
# simple FD; closing the FD here appears to conflict with
|
||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||
# to choose whether to close the File or the Connection.
|
||||
if sys.version_info >= (3, 8):
|
||||
multiprocess_fd.close()
|
||||
else:
|
||||
file.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def replace_environment(env):
|
||||
"""Replace the current environment (`os.environ`) with `env`.
|
||||
@@ -502,20 +529,22 @@ def __enter__(self):
|
||||
# forcing debug output.
|
||||
self._saved_debug = tty._debug
|
||||
|
||||
# Pipe for redirecting output to logger
|
||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||
# OS-level pipe for redirecting output to logger
|
||||
read_fd, write_fd = os.pipe()
|
||||
|
||||
# Pipe for communication back from the daemon
|
||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||
|
||||
# Multiprocessing pipe for communication back from the daemon
|
||||
# Currently only used to save echo value between uses
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||
|
||||
# Sets a daemon that writes to file what it reads from a pipe
|
||||
try:
|
||||
# need to pass this b/c multiprocessing closes stdin in child.
|
||||
input_fd = None
|
||||
input_multiprocess_fd = None
|
||||
try:
|
||||
if sys.stdin.isatty():
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||
except BaseException:
|
||||
# just don't forward input if this fails
|
||||
pass
|
||||
@@ -524,9 +553,9 @@ def __enter__(self):
|
||||
self.process = multiprocessing.Process(
|
||||
target=_writer_daemon,
|
||||
args=(
|
||||
input_fd,
|
||||
read_fd,
|
||||
self.write_fd,
|
||||
input_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
self.echo,
|
||||
self.log_file,
|
||||
child_pipe,
|
||||
@@ -537,9 +566,9 @@ def __enter__(self):
|
||||
self.process.start()
|
||||
|
||||
finally:
|
||||
if input_fd:
|
||||
input_fd.close()
|
||||
read_fd.close()
|
||||
if input_multiprocess_fd:
|
||||
input_multiprocess_fd.close()
|
||||
read_multiprocess_fd.close()
|
||||
|
||||
# Flush immediately before redirecting so that anything buffered
|
||||
# goes to the original stream
|
||||
@@ -557,9 +586,9 @@ def __enter__(self):
|
||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||
|
||||
# redirect to the pipe we created above
|
||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||
self.write_fd.close()
|
||||
os.dup2(write_fd, sys.stdout.fileno())
|
||||
os.dup2(write_fd, sys.stderr.fileno())
|
||||
os.close(write_fd)
|
||||
|
||||
else:
|
||||
# Handle I/O the Python way. This won't redirect lower-level
|
||||
@@ -572,7 +601,7 @@ def __enter__(self):
|
||||
self._saved_stderr = sys.stderr
|
||||
|
||||
# create a file object for the pipe; redirect to it.
|
||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||
sys.stdout = pipe_fd_out
|
||||
sys.stderr = pipe_fd_out
|
||||
|
||||
@@ -608,7 +637,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
else:
|
||||
sys.stdout = self._saved_stdout
|
||||
sys.stderr = self._saved_stderr
|
||||
self.write_fd.close()
|
||||
|
||||
# print log contents in parent if needed.
|
||||
if self.log_file.write_in_parent:
|
||||
@@ -822,14 +850,14 @@ def force_echo(self):
|
||||
|
||||
|
||||
def _writer_daemon(
|
||||
stdin_fd: Optional[Connection],
|
||||
read_fd: Connection,
|
||||
write_fd: Connection,
|
||||
echo: bool,
|
||||
log_file_wrapper: FileWrapper,
|
||||
control_fd: Connection,
|
||||
filter_fn: Optional[Callable[[str], str]],
|
||||
) -> None:
|
||||
stdin_multiprocess_fd,
|
||||
read_multiprocess_fd,
|
||||
write_fd,
|
||||
echo,
|
||||
log_file_wrapper,
|
||||
control_pipe,
|
||||
filter_fn,
|
||||
):
|
||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||
|
||||
The daemon receives output from the parent process and writes it both
|
||||
@@ -866,37 +894,43 @@ def _writer_daemon(
|
||||
``StringIO`` in the parent. This is mainly for testing.
|
||||
|
||||
Arguments:
|
||||
stdin_fd: optional input from the terminal
|
||||
read_fd: pipe for reading from parent's redirected stdout
|
||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||
daemons
|
||||
log_file_wrapper: file to log all output
|
||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||
filter_fn: optional function to filter each line of output
|
||||
stdin_multiprocess_fd (int): input from the terminal
|
||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||
stdout
|
||||
echo (bool): initial echo setting -- controlled by user and
|
||||
preserved across multiple writer daemons
|
||||
log_file_wrapper (FileWrapper): file to log all output
|
||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||
information to the parent
|
||||
filter_fn (callable, optional): function to filter each line of output
|
||||
|
||||
"""
|
||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||
write_fd.close()
|
||||
# If this process was forked, then it will inherit file descriptors from
|
||||
# the parent process. This process depends on closing all instances of
|
||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||
# here. Forking is the process spawning method everywhere except Mac OS
|
||||
# for Python >= 3.8 and on Windows
|
||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||
os.close(write_fd)
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
if stdin_multiprocess_fd:
|
||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||
else:
|
||||
stdin_file = None
|
||||
stdin = None
|
||||
|
||||
# list of streams to select from
|
||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||
force_echo = False # parent can force echo for certain output
|
||||
|
||||
log_file = log_file_wrapper.unwrap()
|
||||
|
||||
try:
|
||||
with keyboard_input(stdin_file) as kb:
|
||||
with keyboard_input(stdin) as kb:
|
||||
while True:
|
||||
# fix the terminal settings if we recently came to
|
||||
# the foreground
|
||||
@@ -909,12 +943,12 @@ def _writer_daemon(
|
||||
# Allow user to toggle echo with 'v' key.
|
||||
# Currently ignores other chars.
|
||||
# only read stdin if we're in the foreground
|
||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||
if stdin in rlist and not _is_background_tty(stdin):
|
||||
# it's possible to be backgrounded between the above
|
||||
# check and the read, so we ignore SIGTTIN here.
|
||||
with ignore_signal(signal.SIGTTIN):
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
if stdin.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
@@ -923,13 +957,13 @@ def _writer_daemon(
|
||||
if e.errno != errno.EIO:
|
||||
raise
|
||||
|
||||
if read_file in rlist:
|
||||
if in_pipe in rlist:
|
||||
line_count = 0
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
try:
|
||||
line = _retry(read_file.readline)()
|
||||
line = _retry(in_pipe.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
@@ -958,7 +992,7 @@ def _writer_daemon(
|
||||
if xoff in controls:
|
||||
force_echo = False
|
||||
|
||||
if not _input_available(read_file):
|
||||
if not _input_available(in_pipe):
|
||||
break
|
||||
finally:
|
||||
if line_count > 0:
|
||||
@@ -973,14 +1007,14 @@ def _writer_daemon(
|
||||
finally:
|
||||
# send written data back to parent if we used a StringIO
|
||||
if isinstance(log_file, io.StringIO):
|
||||
control_fd.send(log_file.getvalue())
|
||||
control_pipe.send(log_file.getvalue())
|
||||
log_file_wrapper.close()
|
||||
read_fd.close()
|
||||
if stdin_fd:
|
||||
stdin_fd.close()
|
||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||
if stdin_multiprocess_fd:
|
||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||
|
||||
# send echo value back to the parent so it can be preserved.
|
||||
control_fd.send(echo)
|
||||
control_pipe.send(echo)
|
||||
|
||||
|
||||
def _retry(function):
|
||||
|
@@ -3,15 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
import spack.paths
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "1.0.0-alpha.1"
|
||||
__version__ = "0.22.2"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
@@ -26,58 +19,4 @@ def __try_int(v):
|
||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||
|
||||
|
||||
def get_spack_commit() -> Optional[str]:
|
||||
"""Get the Spack git commit sha.
|
||||
|
||||
Returns:
|
||||
(str or None) the commit sha if available, otherwise None
|
||||
"""
|
||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
||||
if not os.path.exists(git_path):
|
||||
return None
|
||||
|
||||
git = spack.util.git.git()
|
||||
if not git:
|
||||
return None
|
||||
|
||||
rev = git(
|
||||
"-C",
|
||||
spack.paths.prefix,
|
||||
"rev-parse",
|
||||
"HEAD",
|
||||
output=str,
|
||||
error=os.devnull,
|
||||
fail_on_error=False,
|
||||
)
|
||||
if git.returncode != 0:
|
||||
return None
|
||||
|
||||
match = re.match(r"[a-f\d]{7,}$", rev)
|
||||
return match.group(0) if match else None
|
||||
|
||||
|
||||
def get_version() -> str:
|
||||
"""Get a descriptive version of this instance of Spack.
|
||||
|
||||
Outputs '<PEP440 version> (<git commit sha>)'.
|
||||
|
||||
The commit sha is only added when available.
|
||||
"""
|
||||
commit = get_spack_commit()
|
||||
if commit:
|
||||
return f"{spack_version} ({commit})"
|
||||
return spack_version
|
||||
|
||||
|
||||
def get_short_version() -> str:
|
||||
"""Short Spack version."""
|
||||
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"spack_version_info",
|
||||
"spack_version",
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
]
|
||||
__all__ = ["spack_version_info", "spack_version"]
|
||||
|
131
lib/spack/spack/abi.py
Normal file
131
lib/spack/spack/abi.py
Normal file
@@ -0,0 +1,131 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
as architecture ABI compatible to constraint."""
|
||||
return (
|
||||
not target.architecture
|
||||
or not constraint.architecture
|
||||
or target.architecture.intersects(constraint.architecture)
|
||||
)
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
"""Returns gcc ABI compatibility info by getting the library version of
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
compiler = compilers[0]
|
||||
rungcc = None
|
||||
libname = None
|
||||
output = None
|
||||
if compiler.cxx:
|
||||
rungcc = Executable(compiler.cxx)
|
||||
libname = "libstdc++." + dso_suffix
|
||||
elif compiler.cc:
|
||||
rungcc = Executable(compiler.cc)
|
||||
libname = "libgcc_s." + dso_suffix
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
# Some gcc's are actually clang and don't respond properly to
|
||||
# --print-file-name (they just print the filename, not the
|
||||
# full path). Ignore these and expect them to be handled as clang.
|
||||
if Clang.default_version(rungcc.exe[0]) != "unknown":
|
||||
return None
|
||||
|
||||
output = rungcc("--print-file-name=%s" % libname, output=str)
|
||||
except ProcessError:
|
||||
return None
|
||||
if not output:
|
||||
return None
|
||||
libpath = os.path.realpath(output.strip())
|
||||
if not libpath:
|
||||
return None
|
||||
return os.path.basename(libpath)
|
||||
|
||||
@memoized
|
||||
def _gcc_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the gcc version pversion and cversion
|
||||
are ABI compatible."""
|
||||
plib = self._gcc_get_libstdcxx_version(pversion)
|
||||
clib = self._gcc_get_libstdcxx_version(cversion)
|
||||
if not plib or not clib:
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
|
||||
if parent.compiler.name != child.compiler.name:
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if loose:
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
# TODO: into compiler classes?
|
||||
for pversion in parent.compiler.versions:
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.intersects(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loose
|
||||
)
|
@@ -42,21 +42,15 @@ def _search_duplicate_compilers(error_cls):
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from typing import Iterable, List, Set, Tuple
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
import spack.deptypes
|
||||
import spack.fetch_strategy
|
||||
import spack.patch
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
@@ -79,9 +73,7 @@ def __init__(self, summary, details):
|
||||
self.details = tuple(details)
|
||||
|
||||
def __str__(self):
|
||||
if self.details:
|
||||
return f"{self.summary}\n" + "\n".join(f" {detail}" for detail in self.details)
|
||||
return self.summary
|
||||
return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
|
||||
|
||||
def __eq__(self, other):
|
||||
if self.summary != other.summary or self.details != other.details:
|
||||
@@ -218,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
|
||||
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
|
||||
)
|
||||
|
||||
#: Sanity checks on packages.yaml
|
||||
config_repos = AuditClass(
|
||||
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
|
||||
)
|
||||
|
||||
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
@@ -265,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(attribute_name, config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
||||
|
||||
for package_name in packages_yaml:
|
||||
if package_name == "all":
|
||||
continue
|
||||
|
||||
package_conf = packages_yaml[package_name]
|
||||
for attribute in ("compiler", "providers", "target"):
|
||||
if attribute not in package_conf:
|
||||
continue
|
||||
summary = (
|
||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
||||
)
|
||||
errors.append(make_error(attribute, package_conf, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _avoid_mismatched_variants(error_cls):
|
||||
"""Warns if variant preferences have mismatched types or names."""
|
||||
@@ -285,7 +306,7 @@ def _avoid_mismatched_variants(error_cls):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant in current_spec.variants.values():
|
||||
# Variant does not exist at all
|
||||
if variant.name not in pkg_cls.variant_names():
|
||||
if variant.name not in pkg_cls.variants:
|
||||
summary = (
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
@@ -294,8 +315,9 @@ def _avoid_mismatched_variants(error_cls):
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
s = spack.spec.Spec(pkg_name)
|
||||
try:
|
||||
spack.variant.prevalidate_variant_value(pkg_cls, variant, strict=True)
|
||||
s.update_variant_validate(variant.name, variant.value)
|
||||
except Exception:
|
||||
summary = (
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
@@ -329,43 +351,6 @@ def _wrongly_named_spec(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
"""All virtual packages must have a default provider explicitly set."""
|
||||
configuration = spack.config.create()
|
||||
defaults = configuration.get("packages", scope="defaults")
|
||||
default_providers = defaults["all"]["providers"]
|
||||
virtuals = spack.repo.PATH.provider_index.providers
|
||||
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
|
||||
|
||||
return [
|
||||
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
|
||||
for virtual in virtuals
|
||||
if virtual not in default_providers
|
||||
]
|
||||
|
||||
|
||||
@config_repos
|
||||
def _ensure_no_folders_without_package_py(error_cls):
|
||||
"""Check that we don't leave any folder without a package.py in repos"""
|
||||
errors = []
|
||||
for repository in spack.repo.PATH.repos:
|
||||
missing = []
|
||||
for entry in os.scandir(repository.packages_path):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
|
||||
if not package_py.exists():
|
||||
missing.append(entry.path)
|
||||
if missing:
|
||||
summary = (
|
||||
f"The '{repository.namespace}' repository misses a package.py file"
|
||||
f" in the following folders"
|
||||
)
|
||||
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -389,14 +374,6 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
package_deprecated_attributes = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-DEPRECATED-ATTRIBUTES",
|
||||
description="Sanity checks to preclude use of deprecated package attributes",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
package_properties = AuditClass(
|
||||
group="packages",
|
||||
tag="PKG-PROPERTIES",
|
||||
@@ -415,23 +392,22 @@ def _make_config_error(config_data, summary, error_cls):
|
||||
)
|
||||
|
||||
|
||||
@package_properties
|
||||
@package_directives
|
||||
def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods are not included in build-time callbacks.
|
||||
|
||||
Test methods are for checking the installed software as stand-alone tests.
|
||||
They could also be called during the post-install phase of a build.
|
||||
"""
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks])
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
||||
if has_test_method:
|
||||
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks."
|
||||
callbacks = ", ".join(test_callbacks)
|
||||
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}"
|
||||
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
||||
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
||||
", ".join(test_callbacks)
|
||||
)
|
||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||
|
||||
return errors
|
||||
@@ -445,10 +421,6 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
github_pull_commits_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
@@ -464,24 +436,14 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_pull_commits_re, patch.url):
|
||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ "must not be a pull request commit; "
|
||||
+ f"instead use {url}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(github_patch_url_re, patch.url):
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ f"must end with {full_index_arg}",
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -489,7 +451,9 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -506,7 +470,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in pkg_cls.__mro__:
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
current_value = cls_item.__dict__.get(name)
|
||||
if current_value is None:
|
||||
@@ -529,58 +493,13 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_deprecated_attributes
|
||||
def _search_for_deprecated_package_methods(pkgs, error_cls):
|
||||
"""Ensure the package doesn't define or use deprecated methods"""
|
||||
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
|
||||
DEPRECATED_USE = (
|
||||
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
|
||||
("self.install_test_root(", "install_test_root(self, ..)"),
|
||||
("self.run_test(", "test_part(self, ..)"),
|
||||
)
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_errors = collections.defaultdict(list)
|
||||
for name, function in methods:
|
||||
for deprecated_name, alternate in DEPRECATED_METHOD:
|
||||
if name == deprecated_name:
|
||||
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
source = inspect.getsource(function)
|
||||
for deprecated_name, alternate in DEPRECATED_USE:
|
||||
if deprecated_name in source:
|
||||
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
|
||||
method_errors[name].append(msg)
|
||||
|
||||
num_methods = len(method_errors)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
error_msg = (
|
||||
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
|
||||
)
|
||||
instr = [f"Make changes to '{pkg_cls.__module__}':"]
|
||||
for name in sorted(method_errors):
|
||||
instr.extend([f" {msg}" for msg in method_errors[name]])
|
||||
errors.append(error_cls(error_msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
"""Ensure package names are lowercase and consistent"""
|
||||
reserved_names = ("all",)
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if pkg_name in reserved_names:
|
||||
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
|
||||
errors.append(error_cls(error_msg, []))
|
||||
return errors
|
||||
|
||||
@@ -719,17 +638,12 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# values are either ConditionalValue objects or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
module = pkg_cls.module
|
||||
has_builders_in_package_py = any(
|
||||
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
||||
getattr(module, name, False) for name in builder_cls_names
|
||||
)
|
||||
if not has_builders_in_package_py:
|
||||
continue
|
||||
@@ -745,171 +659,6 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
class DeprecatedMagicGlobals(ast.NodeVisitor):
|
||||
def __init__(self, magic_globals: Iterable[str]):
|
||||
super().__init__()
|
||||
|
||||
self.magic_globals: Set[str] = set(magic_globals)
|
||||
|
||||
# State to track whether we're in a class function
|
||||
self.depth: int = 0
|
||||
self.in_function: bool = False
|
||||
self.path = (ast.Module, ast.ClassDef, ast.FunctionDef)
|
||||
|
||||
# Defined locals in the current function (heuristically at least)
|
||||
self.locals: Set[str] = set()
|
||||
|
||||
# List of (name, lineno) tuples for references to magic globals
|
||||
self.references_to_globals: List[Tuple[str, int]] = []
|
||||
|
||||
def descend_in_function_def(self, node: ast.AST) -> None:
|
||||
if not isinstance(node, self.path[self.depth]):
|
||||
return
|
||||
self.depth += 1
|
||||
if self.depth == len(self.path):
|
||||
self.in_function = True
|
||||
super().generic_visit(node)
|
||||
if self.depth == len(self.path):
|
||||
self.in_function = False
|
||||
self.locals.clear()
|
||||
self.depth -= 1
|
||||
|
||||
def generic_visit(self, node: ast.AST) -> None:
|
||||
# Recurse into function definitions
|
||||
if self.depth < len(self.path):
|
||||
return self.descend_in_function_def(node)
|
||||
elif not self.in_function:
|
||||
return
|
||||
elif isinstance(node, ast.Global):
|
||||
for name in node.names:
|
||||
if name in self.magic_globals:
|
||||
self.references_to_globals.append((name, node.lineno))
|
||||
elif isinstance(node, ast.Assign):
|
||||
# visit the rhs before lhs
|
||||
super().visit(node.value)
|
||||
for target in node.targets:
|
||||
super().visit(target)
|
||||
elif isinstance(node, ast.Name) and node.id in self.magic_globals:
|
||||
if isinstance(node.ctx, ast.Load) and node.id not in self.locals:
|
||||
self.references_to_globals.append((node.id, node.lineno))
|
||||
elif isinstance(node.ctx, ast.Store):
|
||||
self.locals.add(node.id)
|
||||
else:
|
||||
super().generic_visit(node)
|
||||
|
||||
|
||||
@package_properties
|
||||
def _uses_deprecated_globals(pkgs, error_cls):
|
||||
"""Ensure that packages do not use deprecated globals"""
|
||||
errors = []
|
||||
|
||||
for pkg_name in pkgs:
|
||||
# some packages scheduled to be removed in v0.23 are not worth fixing.
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
if all(v.get("deprecated", False) for v in pkg_cls.versions.values()):
|
||||
continue
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"Package '{pkg_name}' uses deprecated globals",
|
||||
[
|
||||
f"{file}:{line} references '{name}'"
|
||||
for name, line in visitor.references_to_globals
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_docstring(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods have a docstring.
|
||||
|
||||
The docstring of a test method is implicitly used as the description of
|
||||
the corresponding test part during test results reporting.
|
||||
"""
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
# Ensure the test method has a docstring
|
||||
source = inspect.getsource(test_fn)
|
||||
match = re.search(doc_regex, source)
|
||||
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
docstrings = plural(num_methods, "docstring", show_n=False)
|
||||
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
"Docstrings are used as descriptions in test outputs.",
|
||||
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
|
||||
f"{names}",
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_properties
|
||||
def _ensure_test_implemented(pkgs, error_cls):
|
||||
"""Ensure stand-alone test methods are implemented.
|
||||
|
||||
The test method is also required to be non-empty.
|
||||
"""
|
||||
|
||||
def skip(line):
|
||||
ln = line.strip()
|
||||
return ln.startswith("#") or "pass" in ln
|
||||
|
||||
doc_regex = r'\s+("""[^"]+""")'
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
||||
method_names = []
|
||||
for name, test_fn in methods:
|
||||
if not name.startswith("test_"):
|
||||
continue
|
||||
|
||||
source = inspect.getsource(test_fn)
|
||||
|
||||
# Attempt to ensure the test method is implemented.
|
||||
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
|
||||
lines = [ln.strip() for ln in impl if not skip(ln)]
|
||||
if not lines:
|
||||
method_names.append(name)
|
||||
|
||||
num_methods = len(method_names)
|
||||
if num_methods > 0:
|
||||
methods = plural(num_methods, "method", show_n=False)
|
||||
msg = f"Package {pkg_name} has empty or missing test {methods}."
|
||||
names = ", ".join(method_names)
|
||||
instr = [
|
||||
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
|
||||
]
|
||||
errors.append(error_cls(msg, instr))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_https_directives
|
||||
def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
@@ -1015,14 +764,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
||||
# depends_on('foo+bar ^fee+baz')
|
||||
#
|
||||
# but we'd like to have two dependencies listed instead.
|
||||
nested_dependencies = dep.spec.edges_to_dependencies()
|
||||
# Filter out pure build dependencies, like:
|
||||
#
|
||||
# depends_on('foo+bar%gcc')
|
||||
#
|
||||
nested_dependencies = [
|
||||
x for x in nested_dependencies if x.depflag != spack.deptypes.BUILD
|
||||
]
|
||||
nested_dependencies = dep.spec.dependencies()
|
||||
if nested_dependencies:
|
||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||
ndir = len(nested_dependencies) + 1
|
||||
@@ -1083,22 +825,20 @@ def check_virtual_with_variants(spec, msg):
|
||||
|
||||
# check variants
|
||||
dependency_variants = dep.spec.variants
|
||||
for name, variant in dependency_variants.items():
|
||||
for name, value in dependency_variants.items():
|
||||
try:
|
||||
spack.variant.prevalidate_variant_value(
|
||||
dependency_pkg_cls, variant, dep.spec, strict=True
|
||||
)
|
||||
v, _ = dependency_pkg_cls.variants[name]
|
||||
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||
except Exception as e:
|
||||
summary = (
|
||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||
)
|
||||
|
||||
error_msg = str(e)
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = (
|
||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||
f" in package '{dep_name}'"
|
||||
)
|
||||
error_msg += f" in package '{dep_name}'"
|
||||
|
||||
errors.append(
|
||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||
@@ -1110,38 +850,39 @@ def check_virtual_with_variants(spec, msg):
|
||||
@package_directives
|
||||
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
|
||||
def check_variant(pkg_cls, variant, vname):
|
||||
# bool is a subclass of int in python. Permitting a default that is an instance
|
||||
# of 'int' means both foo=false and foo=0 are accepted. Other falsish values are
|
||||
# not allowed, since they can't be parsed from CLI ('foo=')
|
||||
default_is_parsable = isinstance(variant.default, int) or variant.default
|
||||
|
||||
if not default_is_parsable:
|
||||
msg = f"Variant '{vname}' of package '{pkg_cls.name}' has an unparsable default value"
|
||||
return [error_cls(msg, [])]
|
||||
|
||||
try:
|
||||
vspec = variant.make_default()
|
||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||
msg = f"Can't create default value for variant '{vname}' in package '{pkg_cls.name}'"
|
||||
return [error_cls(msg, [])]
|
||||
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls.name)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
msg = "Default value of variant '{vname}' in package '{pkg.name}' is invalid"
|
||||
question = "Is it among the allowed values?"
|
||||
return [error_cls(msg, [question])]
|
||||
|
||||
return []
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for vname in pkg_cls.variant_names():
|
||||
for _, variant_def in pkg_cls.variant_definitions(vname):
|
||||
errors.extend(check_variant(pkg_cls, variant_def, vname))
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
# Permitting a default that is an instance on 'int' permits
|
||||
# to have foo=false or foo=0. Other falsish values are
|
||||
# not allowed, since they can't be parsed from cli ('foo=')
|
||||
isinstance(variant.default, int)
|
||||
or variant.default
|
||||
)
|
||||
if not default_is_parsable:
|
||||
error_msg = "Variant '{}' of package '{}' has a bad default value"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
continue
|
||||
|
||||
try:
|
||||
vspec = variant.make_default()
|
||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
continue
|
||||
|
||||
try:
|
||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||
except spack.variant.InvalidVariantValueError:
|
||||
error_msg = (
|
||||
"The default value of the variant '{}' in package '{}' failed validation"
|
||||
)
|
||||
question = "Is it among the allowed values?"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@@ -1151,11 +892,11 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for name in pkg_cls.variant_names():
|
||||
for when, variant in pkg_cls.variant_definitions(name):
|
||||
if not variant.description:
|
||||
msg = f"Variant '{name}' in package '{pkg_name}' is missing a description"
|
||||
errors.append(error_cls(msg, []))
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
@@ -1212,26 +953,29 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
variant_exceptions = (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
spack.variant.InvalidVariantValueError,
|
||||
KeyError,
|
||||
)
|
||||
errors = []
|
||||
variant_names = pkg.variant_names()
|
||||
summary = f"{pkg.name}: wrong variant in '{directive}' directive"
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
for name, v in constraint.variants.items():
|
||||
if name not in variant_names:
|
||||
msg = f"variant {name} does not exist in {pkg.name}"
|
||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||
continue
|
||||
|
||||
try:
|
||||
spack.variant.prevalidate_variant_value(pkg, v, constraint, strict=True)
|
||||
except (
|
||||
spack.variant.InconsistentValidationError,
|
||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||
spack.variant.InvalidVariantValueError,
|
||||
) as e:
|
||||
msg = str(e).strip()
|
||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
||||
variant, _ = pkg.variants[name]
|
||||
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
error_msg = "the variant {0} does not exist".format(error_msg)
|
||||
|
||||
err = error_cls(summary=summary, details=[error_msg, "in " + filename])
|
||||
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
@@ -1269,10 +1013,9 @@ def _extracts_errors(triggers, summary):
|
||||
for dname in dnames
|
||||
)
|
||||
|
||||
for when, variants_by_name in pkg_cls.variants.items():
|
||||
for vname, variant in variants_by_name.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors([when], summary))
|
||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for when, providers, details in _error_items(pkg_cls.provided):
|
||||
errors.extend(
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,6 @@
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_file_in_path_or_raise,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
@@ -20,7 +19,6 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_file_in_path_or_raise",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
|
@@ -4,8 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import glob
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
@@ -30,7 +28,7 @@
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
try:
|
||||
importlib.import_module(module)
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
@@ -61,19 +59,10 @@ def _try_import_from_store(
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
|
||||
# if python is installed, ask it for the layout
|
||||
if python.installed:
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
# otherwise search for the site-packages directory
|
||||
# (clingo from binaries with truncated python-venv runtime)
|
||||
else:
|
||||
module_paths = glob.glob(
|
||||
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
||||
)
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
@@ -224,19 +213,15 @@ def _root_spec(spec_str: str) -> str:
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
# FIXME (compiler as nodes): recover the compiler for source bootstrapping
|
||||
# if platform == "darwin":
|
||||
# spec_str += " %apple-clang"
|
||||
if platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
# elif platform == "linux":
|
||||
# spec_str += " %gcc"
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
|
@@ -1,167 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap concrete specs for clingo
|
||||
|
||||
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
|
||||
we need to rely on another mechanism to get a concrete spec that fits the current host.
|
||||
|
||||
This module contains the logic to get a concrete spec for clingo, starting from a prototype
|
||||
JSON file for a similar platform.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
if str(self.host_platform) == "linux":
|
||||
self.host_libc = self.libc_external_spec()
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self):
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
compiler_name = "apple-clang"
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "llvm"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
candidates = [
|
||||
x
|
||||
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||
if x.name == compiler_name
|
||||
]
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||
best = candidates[0]
|
||||
# Get compilers for bootstrapping from the 'builtin' repository
|
||||
best.namespace = "builtin"
|
||||
# If the compiler does not support C++ 14, fail with a legible error message
|
||||
try:
|
||||
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||
except RuntimeError as e:
|
||||
raise RuntimeError(
|
||||
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||
) from e
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
self, configuration: "spack.config.Configuration"
|
||||
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
|
||||
packages_yaml = configuration.get("packages")
|
||||
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
|
||||
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
|
||||
for pkg_name in ["cmake", "bison"]:
|
||||
if pkg_name not in packages_yaml:
|
||||
continue
|
||||
|
||||
candidates = packages_yaml[pkg_name].get("externals", [])
|
||||
for candidate in candidates:
|
||||
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
selected[pkg_name] = self._external_spec(s)
|
||||
break
|
||||
return selected["cmake"], selected["bison"]
|
||||
|
||||
def prototype_path(self) -> pathlib.Path:
|
||||
"""Path to a prototype concrete specfile for clingo"""
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
|
||||
if str(self.host_platform) == "linux":
|
||||
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
|
||||
if not result.exists():
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
|
||||
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
|
||||
|
||||
elif not result.exists():
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
return result
|
||||
|
||||
def concretize(self) -> "spack.spec.Spec":
|
||||
# Read the prototype and mark it NOT concrete
|
||||
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
|
||||
s._mark_concrete(False)
|
||||
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
edge.spec = self.host_python
|
||||
|
||||
if edge.spec.name == "bison" and self.external_bison:
|
||||
edge.spec = self.external_bison
|
||||
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if edge.spec.name == self.host_compiler.name:
|
||||
edge.spec = self.host_compiler
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
s._finalize_concretization()
|
||||
|
||||
# Work around the fact that the installer calls Spec.dependents() and
|
||||
# we modified edges inconsistently
|
||||
return s.copy()
|
||||
|
||||
def python_external_spec(self) -> "spack.spec.Spec":
|
||||
"""Python external spec corresponding to the current running interpreter"""
|
||||
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||
result = detector.default_libc()
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
return spack.spec.parse_with_version_concrete(initial_spec)
|
@@ -11,10 +11,9 @@
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.compilers.config
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -130,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform)
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
@@ -143,8 +142,12 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.config.compilers_for_arch(arch):
|
||||
spack.compilers.config.find_compilers()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -153,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.use_platform(
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
# Default configuration scopes excluding command line
|
||||
|
@@ -37,19 +37,23 @@
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
import spack.detection
|
||||
import spack.mirror
|
||||
import spack.environment
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.platforms.linux
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -91,7 +95,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
|
||||
# Promote (relative) paths to file urls
|
||||
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||
url = conf["info"]["url"]
|
||||
if spack.util.url.is_path_instead_of_url(url):
|
||||
if not os.path.isabs(url):
|
||||
url = os.path.join(self.metadata_dir, url)
|
||||
url = spack.util.url.path_to_file_url(url)
|
||||
self.url = url
|
||||
|
||||
@property
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
@@ -170,15 +179,7 @@ def _install_by_hash(
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
||||
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
||||
# further runtime deps are loaded by the Python interpreter. This just silences
|
||||
# warnings about missing dependencies.
|
||||
match,
|
||||
unsigned=True,
|
||||
force=True,
|
||||
sha256=pkg_sha256,
|
||||
allow_missing=True,
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
@@ -267,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
@@ -281,12 +284,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller(
|
||||
[concrete_spec.package],
|
||||
fail_fast=True,
|
||||
package_use_cache=False,
|
||||
dependencies_use_cache=False,
|
||||
).install()
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -305,11 +303,18 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -322,10 +327,11 @@ def create_bootstrapper(conf: ConfigDictionary):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled(conf: ConfigDictionary):
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
return trusted.get(name, False)
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError("source is not trusted")
|
||||
|
||||
|
||||
def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str] = None):
|
||||
@@ -355,10 +361,8 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_import(module, abstract_spec):
|
||||
return
|
||||
@@ -370,7 +374,11 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
|
||||
msg = f'cannot bootstrap the "{module}" Python module '
|
||||
if abstract_spec:
|
||||
msg += f'from spec "{abstract_spec}" '
|
||||
msg += exception_handler.grouped_message(with_tracebacks=tty.is_debug())
|
||||
if tty.is_debug():
|
||||
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -408,9 +416,8 @@ def ensure_executables_in_path_or_raise(
|
||||
exception_handler = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
if not source_is_enabled(current_config):
|
||||
continue
|
||||
with exception_handler.forward(current_config["name"], Exception):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
current_bootstrapper = create_bootstrapper(current_config)
|
||||
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
@@ -473,8 +480,7 @@ def ensure_clingo_importable_or_raise() -> None:
|
||||
|
||||
def gnupg_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
|
||||
return _root_spec(f"{root_spec_name}@2.3:")
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise() -> None:
|
||||
@@ -484,19 +490,6 @@ def ensure_gpg_in_path_or_raise() -> None:
|
||||
)
|
||||
|
||||
|
||||
def file_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap file"""
|
||||
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||
return _root_spec(root_spec_name)
|
||||
|
||||
|
||||
def ensure_file_in_path_or_raise() -> None:
|
||||
"""Ensure file is in the PATH or raise"""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["file"], abstract_spec=file_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
@@ -580,15 +573,14 @@ def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
elif sys.platform == "win32":
|
||||
ensure_file_in_path_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
@@ -605,10 +597,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
@@ -14,9 +14,9 @@
|
||||
from llnl.util import tty
|
||||
|
||||
import spack.environment
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.util.cpus
|
||||
import spack.util.executable
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.load()
|
||||
env.update_syspath_and_environ()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
|
@@ -43,10 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from multiprocessing.connection import Connection
|
||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
from typing import Dict, List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -54,29 +51,36 @@
|
||||
from llnl.util.lang import dedupe, stable_partition
|
||||
from llnl.util.symlink import symlink
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.log import MultiProcessFd
|
||||
|
||||
import spack.build_systems.cmake
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
from spack.context import Context
|
||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||
from spack.error import NoHeadersError, NoLibrariesError
|
||||
from spack.install_test import spack_install_test_log
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
@@ -88,7 +92,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -187,6 +191,14 @@ def __call__(self, *args, **kwargs):
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
on_cray = str(host_platform) == "cray"
|
||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
||||
return on_cray, using_cnl
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -230,6 +242,17 @@ def clean_environment():
|
||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||
env.unset(varname)
|
||||
|
||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
# depending on the CNL version).
|
||||
on_cray, using_cnl = _on_cray()
|
||||
if on_cray and not using_cnl:
|
||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
||||
for varname in os.environ.keys():
|
||||
if "PKGCONF" in varname:
|
||||
env.unset(varname)
|
||||
|
||||
# Unset the following variables because they can affect installation of
|
||||
# Autotools and CMake packages.
|
||||
build_system_vars = [
|
||||
@@ -295,10 +318,62 @@ def _add_werror_handling(keep_werror, env):
|
||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||
|
||||
|
||||
def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
def set_compiler_environment_variables(pkg, env):
|
||||
assert pkg.spec.concrete
|
||||
compiler = pkg.compiler
|
||||
spec = pkg.spec
|
||||
|
||||
# Make sure the executables for this compiler exist
|
||||
compiler.verify_executables()
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
|
||||
|
||||
# Populate an object with the list of environment modifications
|
||||
# and return it
|
||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
||||
# ttyout, ttyerr, etc.
|
||||
link_dir = spack.paths.build_env_path
|
||||
|
||||
# Set SPACK compiler variables so that our wrapper knows what to
|
||||
# call. If there is no compiler configured then use a default
|
||||
# wrapper which will emit an error if it is used.
|
||||
if compiler.cc:
|
||||
env.set("SPACK_CC", compiler.cc)
|
||||
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "cc"))
|
||||
if compiler.cxx:
|
||||
env.set("SPACK_CXX", compiler.cxx)
|
||||
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
|
||||
else:
|
||||
env.set("CC", os.path.join(link_dir, "c++"))
|
||||
if compiler.f77:
|
||||
env.set("SPACK_F77", compiler.f77)
|
||||
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
|
||||
else:
|
||||
env.set("F77", os.path.join(link_dir, "f77"))
|
||||
if compiler.fc:
|
||||
env.set("SPACK_FC", compiler.fc)
|
||||
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
|
||||
else:
|
||||
env.set("FC", os.path.join(link_dir, "fc"))
|
||||
|
||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
||||
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
|
||||
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
|
||||
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
|
||||
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
|
||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||
|
||||
if pkg.keep_werror is not None:
|
||||
keep_werror = pkg.keep_werror
|
||||
else:
|
||||
@@ -306,6 +381,14 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
# Don't set on cray platform because the targeting module handles this
|
||||
if spec.satisfies("platform=cray"):
|
||||
isa_arg = ""
|
||||
else:
|
||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
# env_flags are easy to accidentally override.
|
||||
inject_flags = {}
|
||||
@@ -339,27 +422,15 @@ def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
||||
pkg.flags_to_build_system_args(build_system_flags)
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
# FIXME (compiler as nodes): recover this one in the correct packages
|
||||
# compiler.setup_custom_environment(pkg, env)
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def optimization_flags(compiler, target):
|
||||
# Try to check if the current compiler comes with a version number or
|
||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||
# custom spec.
|
||||
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||
try:
|
||||
result = target.optimization_flags(compiler.name, version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
result = ""
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def set_wrapper_variables(pkg, env):
|
||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||
@@ -368,15 +439,46 @@ def set_wrapper_variables(pkg, env):
|
||||
this function computes these options in a manner that is intended to match the DAG traversal
|
||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||
is using topo order."""
|
||||
# Set compiler flags injected from the spec
|
||||
set_wrapper_environment_variables_for_flags(pkg, env)
|
||||
# Set environment variables if specified for
|
||||
# the given compiler
|
||||
compiler = pkg.compiler
|
||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
||||
|
||||
if compiler.extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path("PATH", item)
|
||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
||||
|
||||
# Working directory for the spack command itself, for debug logs.
|
||||
if spack.config.get("config:debug"):
|
||||
env.set(SPACK_DEBUG, "TRUE")
|
||||
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.paths.spack_working_dir)
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
@@ -386,65 +488,70 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))
|
||||
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK)
|
||||
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
rpath_deps = get_rpath_deps(pkg)
|
||||
|
||||
link_dirs = []
|
||||
include_dirs = []
|
||||
rpath_dirs = []
|
||||
|
||||
for dep in chain(external_link_deps, nonexternal_link_deps):
|
||||
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler
|
||||
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly
|
||||
# expensive search in `query.libs.directories` and `query.headers.directories`, which is
|
||||
# what this branch is trying to avoid.
|
||||
if is_system_path(dep.prefix):
|
||||
continue
|
||||
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
|
||||
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
|
||||
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = []
|
||||
try:
|
||||
# Locating libraries can be time consuming, so log start and finish.
|
||||
tty.debug(f"Collecting libraries for {dep.name}")
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug(f"Libraries for {dep.name} have been collected.")
|
||||
except NoLibrariesError:
|
||||
tty.debug(f"No libraries found for {dep.name}")
|
||||
def _prepend_all(list_to_modify, items_to_add):
|
||||
# Update the original list (creating a new list would be faster but
|
||||
# may not be convenient)
|
||||
for item in reversed(list(items_to_add)):
|
||||
list_to_modify.insert(0, item)
|
||||
|
||||
for default_lib_dir in ("lib", "lib64"):
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
def update_compiler_args_for_dep(dep):
|
||||
if dep in link_deps and (not is_system_path(dep.prefix)):
|
||||
query = pkg.spec[dep.name]
|
||||
dep_link_dirs = list()
|
||||
try:
|
||||
# In some circumstances (particularly for externals) finding
|
||||
# libraries packages can be time consuming, so indicate that
|
||||
# we are performing this operation (and also report when it
|
||||
# finishes).
|
||||
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||
dep_link_dirs.extend(query.libs.directories)
|
||||
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||
except NoLibrariesError:
|
||||
tty.debug("No libraries found for {0}".format(dep.name))
|
||||
|
||||
link_dirs[:0] = dep_link_dirs
|
||||
if dep.dag_hash() in rpath_hashes:
|
||||
rpath_dirs[:0] = dep_link_dirs
|
||||
for default_lib_dir in ["lib", "lib64"]:
|
||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||
if os.path.isdir(default_lib_prefix):
|
||||
dep_link_dirs.append(default_lib_prefix)
|
||||
|
||||
try:
|
||||
tty.debug(f"Collecting headers for {dep.name}")
|
||||
include_dirs[:0] = query.headers.directories
|
||||
tty.debug(f"Headers for {dep.name} have been collected.")
|
||||
except NoHeadersError:
|
||||
tty.debug(f"No headers found for {dep.name}")
|
||||
_prepend_all(link_dirs, dep_link_dirs)
|
||||
if dep in rpath_deps:
|
||||
_prepend_all(rpath_dirs, dep_link_dirs)
|
||||
|
||||
# The top-level package is heuristically rpath'ed.
|
||||
for libdir in ("lib64", "lib"):
|
||||
try:
|
||||
_prepend_all(include_dirs, query.headers.directories)
|
||||
except NoHeadersError:
|
||||
tty.debug("No headers found for {0}".format(dep.name))
|
||||
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# Just above, we prepended entries for -L/-rpath for externals. We
|
||||
# now do this for non-external packages so that Spack-built packages
|
||||
# are searched first for libraries etc.
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if not dspec.external:
|
||||
update_compiler_args_for_dep(dspec)
|
||||
|
||||
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||
# not be created for the install).
|
||||
for libdir in ["lib64", "lib"]:
|
||||
lib_path = os.path.join(pkg.prefix, libdir)
|
||||
rpath_dirs.insert(0, lib_path)
|
||||
|
||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
|
||||
if default_dynamic_linker_filter:
|
||||
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
|
||||
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
spack_managed_dirs: Set[str] = {
|
||||
@@ -473,12 +580,14 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
"""
|
||||
module = ModuleChangePropagator(pkg)
|
||||
|
||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
if context == Context.BUILD:
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||
|
||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||
module.make_jobs = jobs
|
||||
|
||||
# TODO: make these build deps that can be installed if not found.
|
||||
module.make = MakeExecutable("make", jobs)
|
||||
module.gmake = MakeExecutable("gmake", jobs)
|
||||
@@ -499,19 +608,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
# FIXME (compiler as nodes): make this more general, and not tied to three languages
|
||||
# Maybe add a callback?
|
||||
global_names = {
|
||||
"c": ("spack_cc",),
|
||||
"cxx": ("spack_cxx",),
|
||||
"fortran": ("spack_fc", "spack_f77"),
|
||||
}
|
||||
for language in ("c", "cxx", "fortran"):
|
||||
spec = pkg.spec.dependencies(virtuals=[language])
|
||||
value = None if not spec else os.path.join(link_dir, spec[0].package.link_paths[language])
|
||||
for name in global_names[language]:
|
||||
setattr(module, name, value)
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
@@ -643,6 +755,19 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
"""Get a list of all the rpaths for a package."""
|
||||
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||
deps = get_rpath_deps(pkg)
|
||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
def load_external_modules(pkg):
|
||||
"""Traverse a package's spec DAG and load any external modules.
|
||||
|
||||
@@ -678,11 +803,13 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
context == Context.TEST and pkg.test_requires_compiler
|
||||
)
|
||||
if need_compiler:
|
||||
set_compiler_environment_variables(pkg, env_mods)
|
||||
set_wrapper_variables(pkg, env_mods)
|
||||
|
||||
# Platform specific setup goes before package specific setup. This is for setting
|
||||
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
|
||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||
target = platform.target(pkg.spec.architecture.target)
|
||||
platform.setup_platform_environment(pkg, env_mods)
|
||||
|
||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||
@@ -702,8 +829,28 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
|
||||
# Load modules on an already clean environment, just before applying Spack's
|
||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||
if need_compiler:
|
||||
tty.debug("setup_package: loading compiler modules")
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray and not dirty:
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
||||
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||
if implicit_rpaths:
|
||||
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||
|
||||
# Make sure nothing's strange about the Spack environment.
|
||||
validate(env_mods, tty.warn)
|
||||
env_mods.apply_modifications()
|
||||
@@ -893,12 +1040,6 @@ def set_all_package_py_globals(self):
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
# Looping over the set of packages a second time
|
||||
# ensures all globals are loaded into the module space prior to
|
||||
# any package setup. This guarantees package setup methods have
|
||||
# access to expected module level definitions such as "spack_cc"
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
@@ -908,15 +1049,6 @@ def set_all_package_py_globals(self):
|
||||
pkg.setup_dependent_package(dependent_module, spec)
|
||||
dependent_module.propagate_changes_to_mro()
|
||||
|
||||
if self.context == Context.BUILD:
|
||||
pkg = self.specs[0].package
|
||||
module = ModuleChangePropagator(pkg)
|
||||
# std_cmake_args is not sufficiently static to be defined
|
||||
# in set_package_py_globals and is deprecated so its handled
|
||||
# here as a special case
|
||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||
module.propagate_changes_to_mro()
|
||||
|
||||
def get_env_modifications(self) -> EnvironmentModifications:
|
||||
"""Returns the environment variable modifications for the given input specs and context.
|
||||
Environment modifications include:
|
||||
@@ -986,61 +1118,45 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
||||
env.prepend_path("PATH", bin_dir)
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg):
|
||||
# Note that unlike modifications_from_dependencies, this does not include
|
||||
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||
# setup_dependent_build_environment implementations of dependency packages
|
||||
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
build_link_deps = build_deps | link_deps
|
||||
spack_built = []
|
||||
externals = []
|
||||
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||
# prepending all externals and then all non-externals
|
||||
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||
if dspec in build_link_deps:
|
||||
if dspec.external:
|
||||
externals.insert(0, dspec)
|
||||
else:
|
||||
spack_built.insert(0, dspec)
|
||||
|
||||
ordered_build_link_deps = spack_built + externals
|
||||
cmake_prefix_path_entries = []
|
||||
for spec in ordered_build_link_deps:
|
||||
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||
|
||||
return filter_system_paths(cmake_prefix_path_entries)
|
||||
|
||||
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||
function: Callable,
|
||||
kwargs: Dict,
|
||||
write_pipe: Connection,
|
||||
input_pipe: Optional[Connection],
|
||||
jsfd1: Optional[Connection],
|
||||
jsfd2: Optional[Connection],
|
||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
"""Main entry point in the child process for Spack builds.
|
||||
|
||||
``_setup_pkg_and_run`` is called by the child process created in
|
||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||
|
||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||
the following:
|
||||
|
||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||
particular build phase.
|
||||
|
||||
* ``BaseException``: any exception raised by a child build process, which will be
|
||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||
raised in the parent.
|
||||
|
||||
* The return value of ``function()``, which can be anything (except an exception).
|
||||
This is returned to the caller.
|
||||
|
||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||
them automatically in the child if they are not passed at process creation time.
|
||||
|
||||
Arguments:
|
||||
serialized_pkg: Spack package install context object (serialized form of the
|
||||
package that we'll build in the child process).
|
||||
function: function to call in the child process; serialized_pkg is passed to
|
||||
this as the first argument.
|
||||
kwargs: additional keyword arguments to pass to ``function()``.
|
||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||
child *must* send a result (or an error) back to parent on.
|
||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||
jsfd1: gmake Jobserver file descriptor 1.
|
||||
jsfd2: gmake Jobserver file descriptor 2.
|
||||
|
||||
"""
|
||||
|
||||
context: str = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||
# assume that the parent process is not going to read from it till we are done with the
|
||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||
if input_pipe is not None:
|
||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||
# We are in the child process. Python sets sys.stdin to
|
||||
# open(os.devnull) to prevent our process and its parent from
|
||||
# simultaneously reading from the original stdin. But, we assume
|
||||
# that the parent process is not going to read from it till we
|
||||
# are done with the child, so we undo Python's precaution.
|
||||
if input_multiprocess_fd is not None:
|
||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||
|
||||
pkg = serialized_pkg.restore()
|
||||
|
||||
@@ -1052,18 +1168,17 @@ def _setup_pkg_and_run(
|
||||
return_value = function(pkg, kwargs)
|
||||
write_pipe.send(return_value)
|
||||
|
||||
except spack.error.StopPhase as e:
|
||||
except StopPhase as e:
|
||||
# Do not create a full ChildError from this, it's not an error
|
||||
# it's a control statement.
|
||||
write_pipe.send(e)
|
||||
except BaseException as e:
|
||||
except BaseException:
|
||||
# catch ANYTHING that goes wrong in the child process
|
||||
exc_type, exc, tb = sys.exc_info()
|
||||
|
||||
# Need to unwind the traceback in the child because traceback
|
||||
# objects can't be sent to the parent.
|
||||
exc_type = type(e)
|
||||
tb = e.__traceback__
|
||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||
tb_string = traceback.format_exc()
|
||||
|
||||
# build up some context from the offending package so we can
|
||||
# show that, too.
|
||||
@@ -1080,8 +1195,8 @@ def _setup_pkg_and_run(
|
||||
elif context == "test":
|
||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||
|
||||
error_msg = str(e)
|
||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
error_msg = str(exc)
|
||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||
process = "test the installation" if context == "test" else "build from sources"
|
||||
error_msg = (
|
||||
"The '{}' package cannot find an attribute while trying to {}. "
|
||||
@@ -1091,7 +1206,7 @@ def _setup_pkg_and_run(
|
||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||
).format(pkg.name, process, context)
|
||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||
|
||||
# make a pickleable exception to send to parent.
|
||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||
@@ -1109,8 +1224,8 @@ def _setup_pkg_and_run(
|
||||
|
||||
finally:
|
||||
write_pipe.close()
|
||||
if input_pipe is not None:
|
||||
input_pipe.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
|
||||
def start_build_process(pkg, function, kwargs):
|
||||
@@ -1137,9 +1252,23 @@ def child_fun():
|
||||
If something goes wrong, the child process catches the error and
|
||||
passes it to the parent wrapped in a ChildError. The parent is
|
||||
expected to handle (or re-raise) the ChildError.
|
||||
|
||||
This uses `multiprocessing.Process` to create the child process. The
|
||||
mechanism used to create the process differs on different operating
|
||||
systems and for different versions of Python. In some cases "fork"
|
||||
is used (i.e. the "fork" system call) and some cases it starts an
|
||||
entirely new Python interpreter process (in the docs this is referred
|
||||
to as the "spawn" start method). Breaking it down by OS:
|
||||
|
||||
- Linux always uses fork.
|
||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||
- Windows always uses the "spawn" start method.
|
||||
|
||||
For more information on `multiprocessing` child process creation
|
||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||
"""
|
||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||
input_fd = None
|
||||
input_multiprocess_fd = None
|
||||
jobserver_fd1 = None
|
||||
jobserver_fd2 = None
|
||||
|
||||
@@ -1148,13 +1277,14 @@ def child_fun():
|
||||
try:
|
||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||
input_fd = os.dup(sys.stdin.fileno())
|
||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||
mflags = os.environ.get("MAKEFLAGS", False)
|
||||
if mflags:
|
||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||
if m:
|
||||
jobserver_fd1 = Connection(int(m.group(1)))
|
||||
jobserver_fd2 = Connection(int(m.group(2)))
|
||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=_setup_pkg_and_run,
|
||||
@@ -1163,7 +1293,7 @@ def child_fun():
|
||||
function,
|
||||
kwargs,
|
||||
write_pipe,
|
||||
input_fd,
|
||||
input_multiprocess_fd,
|
||||
jobserver_fd1,
|
||||
jobserver_fd2,
|
||||
),
|
||||
@@ -1183,8 +1313,8 @@ def child_fun():
|
||||
|
||||
finally:
|
||||
# Close the input stream in the parent process
|
||||
if input_fd is not None:
|
||||
input_fd.close()
|
||||
if input_multiprocess_fd is not None:
|
||||
input_multiprocess_fd.close()
|
||||
|
||||
def exitcode_msg(p):
|
||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||
@@ -1199,7 +1329,7 @@ def exitcode_msg(p):
|
||||
p.join()
|
||||
|
||||
# If returns a StopPhase, raise it
|
||||
if isinstance(child_result, spack.error.StopPhase):
|
||||
if isinstance(child_result, StopPhase):
|
||||
# do not print
|
||||
raise child_result
|
||||
|
||||
@@ -1222,7 +1352,7 @@ def exitcode_msg(p):
|
||||
return child_result
|
||||
|
||||
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||
|
||||
|
||||
def get_package_context(traceback, context=3):
|
||||
@@ -1375,7 +1505,7 @@ def long_message(self):
|
||||
out.write(" {0}\n".format(self.log_name))
|
||||
|
||||
# Also output the test log path IF it exists
|
||||
if self.context != "test" and have_log:
|
||||
if self.context != "test":
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
@@ -1408,6 +1538,17 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
|
||||
return ChildError(msg, module, name, traceback, log, log_type, context)
|
||||
|
||||
|
||||
class StopPhase(spack.error.SpackError):
|
||||
"""Pickle-able exception to control stopped builds."""
|
||||
|
||||
def __reduce__(self):
|
||||
return _make_stop_phase, (self.message, self.long_message)
|
||||
|
||||
|
||||
def _make_stop_phase(msg, long_msg):
|
||||
return StopPhase(msg, long_msg)
|
||||
|
||||
|
||||
def write_log_summary(out, log_type, log, last=None):
|
||||
errors, warnings = parse_log_events(log)
|
||||
nerr = len(errors)
|
||||
@@ -1441,21 +1582,21 @@ class ModuleChangePropagator:
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
def __init__(self, package):
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in package.__class__.__mro__:
|
||||
module = getattr(cls, "module", None)
|
||||
for cls in inspect.getmro(type(package)):
|
||||
module = cls.module
|
||||
|
||||
if module is None or module is spack.package_base:
|
||||
break
|
||||
|
||||
if module is self.current_module:
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
break
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
@@ -8,8 +8,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.builder
|
||||
import spack.error
|
||||
import spack.phase_callbacks
|
||||
import spack.installer
|
||||
import spack.relocate
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -35,7 +34,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
if not predicate(abs_path):
|
||||
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
||||
msg = msg.format(pkg.name, filetype, path)
|
||||
raise spack.error.InstallError(msg)
|
||||
raise spack.installer.InstallError(msg)
|
||||
|
||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||
@@ -43,7 +42,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||
msg = "Install failed for {0}. Nothing was installed!"
|
||||
raise spack.error.InstallError(msg.format(pkg.name))
|
||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||
|
||||
|
||||
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
@@ -64,7 +63,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||
|
||||
|
||||
def ensure_build_dependencies_or_raise(
|
||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
||||
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||
):
|
||||
"""Ensure that some build dependencies are present in the concrete spec.
|
||||
|
||||
@@ -72,7 +71,7 @@ def ensure_build_dependencies_or_raise(
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be checked.
|
||||
dependencies: list of package names of required build dependencies
|
||||
dependencies: list of abstract specs to be satisfied
|
||||
error_msg: brief error message to be prepended to a longer description
|
||||
|
||||
Raises:
|
||||
@@ -128,8 +127,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||
|
||||
|
||||
class BuilderWithDefaults(spack.builder.Builder):
|
||||
"""Base class for all specific builders with common callbacks registered."""
|
||||
class BaseBuilder(spack.builder.Builder):
|
||||
"""Base class for builders to register common checks"""
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
||||
spack.builder.run_after("install")(sanity_check_prefix)
|
||||
|
@@ -2,11 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
import spack.util.executable
|
||||
|
||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||
@@ -47,12 +46,18 @@ class AspellDictPackage(AutotoolsPackage):
|
||||
#: Override the default autotools builder
|
||||
AutotoolsBuilder = AspellBuilder
|
||||
|
||||
def patch(self):
|
||||
def view_destination(self, view):
|
||||
aspell_spec = self.spec["aspell"]
|
||||
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
|
||||
raise spack.package_base.ExtensionError(
|
||||
"aspell does not support non-global extensions"
|
||||
)
|
||||
aspell = aspell_spec.command
|
||||
dictdir = aspell("dump", "config", "dict-dir", output=str).strip()
|
||||
datadir = aspell("dump", "config", "data-dir", output=str).strip()
|
||||
dictdir = os.path.relpath(dictdir, aspell_spec.prefix)
|
||||
datadir = os.path.relpath(datadir, aspell_spec.prefix)
|
||||
fs.filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
|
||||
fs.filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure")
|
||||
return aspell("dump", "config", "dict-dir", output=str).strip()
|
||||
|
||||
def view_source(self):
|
||||
return self.prefix.lib
|
||||
|
||||
def patch(self):
|
||||
fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
|
||||
fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
|
||||
|
@@ -2,23 +2,19 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.compilers.libraries
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on
|
||||
from spack.multimethod import when
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
@@ -26,7 +22,7 @@
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import (
|
||||
BuilderWithDefaults,
|
||||
BaseBuilder,
|
||||
apply_macos_rpath_fixups,
|
||||
ensure_build_dependencies_or_raise,
|
||||
execute_build_time_tests,
|
||||
@@ -73,14 +69,14 @@ def flags_to_build_system_args(self, flags):
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def enable_or_disable(self, *args, **kwargs):
|
||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
||||
return self.builder.enable_or_disable(*args, **kwargs)
|
||||
|
||||
def with_or_without(self, *args, **kwargs):
|
||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
||||
return self.builder.with_or_without(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("autotools")
|
||||
class AutotoolsBuilder(BuilderWithDefaults):
|
||||
class AutotoolsBuilder(BaseBuilder):
|
||||
"""The autotools builder encodes the default way of installing software built
|
||||
with autotools. It has four phases that can be overridden, if need be:
|
||||
|
||||
@@ -161,7 +157,7 @@ class AutotoolsBuilder(BuilderWithDefaults):
|
||||
install_libtool_archives = False
|
||||
|
||||
@property
|
||||
def patch_config_files(self) -> bool:
|
||||
def patch_config_files(self):
|
||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||
distributed with the tarball.
|
||||
|
||||
@@ -181,7 +177,7 @@ def patch_config_files(self) -> bool:
|
||||
)
|
||||
|
||||
@property
|
||||
def _removed_la_files_log(self) -> str:
|
||||
def _removed_la_files_log(self):
|
||||
"""File containing the list of removed libtool archives"""
|
||||
build_dir = self.build_directory
|
||||
if not os.path.isabs(self.build_directory):
|
||||
@@ -189,15 +185,15 @@ def _removed_la_files_log(self) -> str:
|
||||
return os.path.join(build_dir, "removed_la_files.txt")
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on autotools"""
|
||||
files = [os.path.join(self.build_directory, "config.log")]
|
||||
if not self.install_libtool_archives:
|
||||
files.append(self._removed_la_files_log)
|
||||
return files
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def _do_patch_config_files(self) -> None:
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def _do_patch_config_files(self):
|
||||
"""Some packages ship with older config.guess/config.sub files and need to
|
||||
have these updated when installed on a newer architecture.
|
||||
|
||||
@@ -253,7 +249,7 @@ def runs_ok(script_abs_path):
|
||||
|
||||
# An external gnuconfig may not not have a prefix.
|
||||
if gnuconfig_dir is None:
|
||||
raise spack.error.InstallError(
|
||||
raise spack.build_environment.InstallError(
|
||||
"Spack could not find substitutes for GNU config files because no "
|
||||
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||
"prefix path instead of modules for external `gnuconfig`."
|
||||
@@ -273,7 +269,7 @@ def runs_ok(script_abs_path):
|
||||
msg += (
|
||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||
)
|
||||
raise spack.error.InstallError(msg)
|
||||
raise spack.build_environment.InstallError(msg)
|
||||
|
||||
# Filter working substitutes
|
||||
candidates = [f for f in candidates if runs_ok(f)]
|
||||
@@ -298,7 +294,9 @@ def runs_ok(script_abs_path):
|
||||
and set the prefix to the directory containing the `config.guess` and
|
||||
`config.sub` files.
|
||||
"""
|
||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
||||
raise spack.build_environment.InstallError(
|
||||
msg.format(", ".join(to_be_found), self.name)
|
||||
)
|
||||
|
||||
# Copy the good files over the bad ones
|
||||
for abs_path in to_be_patched:
|
||||
@@ -308,8 +306,8 @@ def runs_ok(script_abs_path):
|
||||
fs.copy(substitutes[name], abs_path)
|
||||
os.chmod(abs_path, mode)
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _patch_usr_bin_file(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _patch_usr_bin_file(self):
|
||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||
scripts to use file from path."""
|
||||
|
||||
@@ -320,8 +318,8 @@ def _patch_usr_bin_file(self) -> None:
|
||||
with fs.keep_modification_time(*x.filenames):
|
||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _set_autotools_environment_variables(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _set_autotools_environment_variables(self):
|
||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||
|
||||
@@ -334,8 +332,8 @@ def _set_autotools_environment_variables(self) -> None:
|
||||
"""
|
||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||
|
||||
@spack.phase_callbacks.run_before("configure")
|
||||
def _do_patch_libtool_configure(self) -> None:
|
||||
@spack.builder.run_before("configure")
|
||||
def _do_patch_libtool_configure(self):
|
||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||
further into "libtool". Note that patches that can be fixed by patching
|
||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||
@@ -362,8 +360,8 @@ def _do_patch_libtool_configure(self) -> None:
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
|
||||
@spack.phase_callbacks.run_after("configure")
|
||||
def _do_patch_libtool(self) -> None:
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
"""If configure generates a "libtool" script that does not correctly
|
||||
detect the compiler (and patch_libtool is set), patch in the correct
|
||||
values for libtool variables.
|
||||
@@ -397,44 +395,33 @@ def _do_patch_libtool(self) -> None:
|
||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||
|
||||
# Replace empty linker flag prefixes:
|
||||
if self.spec.satisfies("%nag"):
|
||||
if self.pkg.compiler.name == "nag":
|
||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||
# Therefore, we change only Fortran values:
|
||||
nag_pkg = self.spec["fortran"].package
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
x.filter(
|
||||
regex='^wl=""$',
|
||||
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
else:
|
||||
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||
if compiler_spec:
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
||||
|
||||
# Replace empty PIC flag values:
|
||||
for compiler, marker in markers.items():
|
||||
if compiler == "cc":
|
||||
language = "c"
|
||||
elif compiler == "cxx":
|
||||
language = "cxx"
|
||||
else:
|
||||
language = "fortran"
|
||||
|
||||
if language not in self.spec:
|
||||
continue
|
||||
|
||||
for cc, marker in markers.items():
|
||||
x.filter(
|
||||
regex='^pic_flag=""$',
|
||||
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||
start_at=f"# ### BEGIN {marker}",
|
||||
stop_at=f"# ### END {marker}",
|
||||
repl='pic_flag="{0}"'.format(
|
||||
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
||||
),
|
||||
start_at="# ### BEGIN {0}".format(marker),
|
||||
stop_at="# ### END {0}".format(marker),
|
||||
)
|
||||
|
||||
# Other compiler-specific patches:
|
||||
if self.spec.satisfies("%fj"):
|
||||
if self.pkg.compiler.name == "fj":
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
@@ -447,7 +434,7 @@ def _do_patch_libtool(self) -> None:
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.spec.satisfies("%nag"):
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
start_at = "# ### BEGIN {0}".format(marker)
|
||||
@@ -522,64 +509,27 @@ def _do_patch_libtool(self) -> None:
|
||||
)
|
||||
|
||||
@property
|
||||
def configure_directory(self) -> str:
|
||||
def configure_directory(self):
|
||||
"""Return the directory where 'configure' resides."""
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def configure_abs_path(self) -> str:
|
||||
def configure_abs_path(self):
|
||||
# Absolute path to configure
|
||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||
return configure_abs_path
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""Override to provide another place to build the package"""
|
||||
return self.configure_directory
|
||||
|
||||
@spack.phase_callbacks.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self) -> None:
|
||||
@spack.builder.run_before("autoreconf")
|
||||
def delete_configure_to_force_update(self):
|
||||
if self.force_autoreconf:
|
||||
fs.force_remove(self.configure_abs_path)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self) -> List[str]:
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.phase_callbacks.run_after("autoreconf")
|
||||
def set_configure_or_die(self) -> None:
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self) -> List[str]:
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def autoreconf(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
"""Not needed usually, configure should be already there"""
|
||||
|
||||
# If configure exists nothing needs to be done
|
||||
@@ -599,19 +549,45 @@ def autoreconf(
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
m = inspect.getmodule(self.pkg)
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
m.autoreconf(*autoreconf_args)
|
||||
|
||||
def configure(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||
of build deps, skips the default path of automake, move external include
|
||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||
spack dependencies."""
|
||||
return _autoreconf_search_path_args(self.spec)
|
||||
|
||||
@spack.builder.run_after("autoreconf")
|
||||
def set_configure_or_die(self):
|
||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||
is found, a module level attribute is set.
|
||||
|
||||
Raises:
|
||||
RuntimeError: if the "configure" script is not found
|
||||
"""
|
||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||
if not os.path.exists(self.configure_abs_path):
|
||||
msg = "configure script not found in {0}"
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
except ``--prefix`` which will be pre-pended to the list.
|
||||
"""
|
||||
return []
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Run "configure", with the arguments specified by the builder and an
|
||||
appropriately set prefix.
|
||||
"""
|
||||
@@ -620,64 +596,51 @@ def configure(
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.configure(*options)
|
||||
inspect.getmodule(self.pkg).configure(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
# See https://autotools.io/automake/silent.html
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*params)
|
||||
inspect.getmodule(self.pkg).make(*params)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("test")
|
||||
self.pkg._if_make_target_execute("check")
|
||||
|
||||
def _activate_or_not(
|
||||
self,
|
||||
name: str,
|
||||
activation_word: str,
|
||||
deactivation_word: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant=None,
|
||||
) -> List[str]:
|
||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||
):
|
||||
"""This function contain the current implementation details of
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||
|
||||
Args:
|
||||
name: name of the option that is being activated or not
|
||||
activation_word: the default activation word ('with' in the case of
|
||||
``with_or_without``)
|
||||
deactivation_word: the default deactivation word ('without' in the case of
|
||||
``with_or_without``)
|
||||
activation_value: callable that accepts a single value. This value is either one of the
|
||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
||||
name (str): name of the option that is being activated or not
|
||||
activation_word (str): the default activation word ('with' in the
|
||||
case of ``with_or_without``)
|
||||
deactivation_word (str): the default deactivation word ('without'
|
||||
in the case of ``with_or_without``)
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value. This value is either one of the allowed values for a
|
||||
multi-valued variant or the name of a bool-valued variant.
|
||||
Returns the parameter to be used when the value is activated.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
variant: name of the variant that is being processed (if different from option name)
|
||||
variant (str): name of the variant that is being processed
|
||||
(if different from option name)
|
||||
|
||||
Examples:
|
||||
|
||||
@@ -685,19 +648,19 @@ def _activate_or_not(
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("foo", values=("x", "y"), description=")
|
||||
variant("bar", default=True, description=")
|
||||
variant("ba_z", default=True, description=")
|
||||
variant('foo', values=('x', 'y'), description='')
|
||||
variant('bar', default=True, description='')
|
||||
variant('ba_z', default=True, description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
_activate_or_not(
|
||||
"foo", "with", "without", activation_value="prefix"
|
||||
'foo', 'with', 'without', activation_value='prefix'
|
||||
)
|
||||
_activate_or_not("bar", "with", "without")
|
||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
||||
_activate_or_not('bar', 'with', 'without')
|
||||
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
@@ -717,16 +680,17 @@ def _activate_or_not(
|
||||
Raises:
|
||||
KeyError: if name is not among known variants
|
||||
"""
|
||||
spec: spack.spec.Spec = self.pkg.spec
|
||||
args: List[str] = []
|
||||
spec = self.pkg.spec
|
||||
args = []
|
||||
|
||||
if activation_value == "prefix":
|
||||
activation_value = lambda x: spec[x].prefix
|
||||
|
||||
variant = variant or name
|
||||
|
||||
# Defensively look that the name passed as argument is among variants
|
||||
if not self.pkg.has_variant(variant):
|
||||
# Defensively look that the name passed as argument is among
|
||||
# variants
|
||||
if variant not in self.pkg.variants:
|
||||
msg = '"{0}" is not a variant of "{1}"'
|
||||
raise KeyError(msg.format(variant, self.pkg.name))
|
||||
|
||||
@@ -735,24 +699,34 @@ def _activate_or_not(
|
||||
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
vdef = self.pkg.get_variant(variant)
|
||||
if set(vdef.values) == set((True, False)): # type: ignore
|
||||
variant_desc, _ = self.pkg.variants[variant]
|
||||
if set(variant_desc.values) == set((True, False)):
|
||||
# BoolValuedVariant carry information about a single option.
|
||||
# Nonetheless, for uniformity of treatment we'll package them
|
||||
# in an iterable of one element.
|
||||
options = [(name, f"+{variant}" in spec)]
|
||||
condition = "+{name}".format(name=variant)
|
||||
options = [(name, condition in spec)]
|
||||
else:
|
||||
condition = "{variant}={value}"
|
||||
# "feature_values" is used to track values which correspond to
|
||||
# features which can be enabled or disabled as understood by the
|
||||
# package's build system. It excludes values which have special
|
||||
# meanings and do not correspond to features (e.g. "none")
|
||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
||||
feature_values = (
|
||||
getattr(variant_desc.values, "feature_values", None) or variant_desc.values
|
||||
)
|
||||
|
||||
options = [
|
||||
(value, condition.format(variant=variant, value=value) in spec)
|
||||
for value in feature_values
|
||||
]
|
||||
|
||||
# For each allowed value in the list of values
|
||||
for option_value, activated in options:
|
||||
# Search for an override in the package for this value
|
||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
||||
override_name = "{0}_or_{1}_{2}".format(
|
||||
activation_word, deactivation_word, option_value
|
||||
)
|
||||
line_generator = getattr(self, override_name, None) or getattr(
|
||||
self.pkg, override_name, None
|
||||
)
|
||||
@@ -761,24 +735,19 @@ def _activate_or_not(
|
||||
|
||||
def _default_generator(is_activated):
|
||||
if is_activated:
|
||||
line = f"--{activation_word}-{option_value}"
|
||||
line = "--{0}-{1}".format(activation_word, option_value)
|
||||
if activation_value is not None and activation_value(
|
||||
option_value
|
||||
): # NOQA=ignore=E501
|
||||
line = f"{line}={activation_value(option_value)}"
|
||||
line += "={0}".format(activation_value(option_value))
|
||||
return line
|
||||
return f"--{deactivation_word}-{option_value}"
|
||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||
|
||||
line_generator = _default_generator
|
||||
args.append(line_generator(activated))
|
||||
return args
|
||||
|
||||
def with_or_without(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
def with_or_without(self, name, activation_value=None, variant=None):
|
||||
"""Inspects a variant and returns the arguments that activate
|
||||
or deactivate the selected feature(s) for the configure options.
|
||||
|
||||
@@ -793,11 +762,12 @@ def with_or_without(
|
||||
``variant=value`` is in the spec.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: callable that accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): callable that accepts a single
|
||||
value and returns the parameter to be used leading to an entry
|
||||
of the type ``--with-{name}={parameter}``.
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -805,22 +775,18 @@ def with_or_without(
|
||||
"""
|
||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||
|
||||
def enable_or_disable(
|
||||
self,
|
||||
name: str,
|
||||
activation_value: Optional[Union[Callable, str]] = None,
|
||||
variant: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||
"""Same as
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||
|
||||
Args:
|
||||
name: name of a valid multi-valued variant
|
||||
activation_value: if present accepts a single value and returns the parameter to be
|
||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
||||
name (str): name of a valid multi-valued variant
|
||||
activation_value (typing.Callable): if present accepts a single value
|
||||
and returns the parameter to be used leading to an entry of the
|
||||
type ``--enable-{name}={parameter}``
|
||||
|
||||
The special value "prefix" can also be assigned and will return
|
||||
The special value 'prefix' can also be assigned and will return
|
||||
``spec[name].prefix`` as activation parameter.
|
||||
|
||||
Returns:
|
||||
@@ -828,15 +794,15 @@ def enable_or_disable(
|
||||
"""
|
||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def installcheck(self) -> None:
|
||||
def installcheck(self):
|
||||
"""Run "make" on the ``installcheck`` target, if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_make_target_execute("installcheck")
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
def remove_libtool_archives(self) -> None:
|
||||
@spack.builder.run_after("install")
|
||||
def remove_libtool_archives(self):
|
||||
"""Remove all .la files in prefix sub-folders if the package sets
|
||||
``install_libtool_archives`` to be False.
|
||||
"""
|
||||
@@ -858,13 +824,12 @@ def setup_build_environment(self, env):
|
||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
|
||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
||||
dirs_seen: Set[Tuple[int, int]] = set()
|
||||
flags_spack: List[str] = []
|
||||
flags_external: List[str] = []
|
||||
def _autoreconf_search_path_args(spec):
|
||||
dirs_seen = set()
|
||||
flags_spack, flags_external = [], []
|
||||
|
||||
# We don't want to add an include flag for automake's default search path.
|
||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||
|
@@ -10,8 +10,8 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import depends_on
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
@@ -69,7 +69,12 @@ class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
@property
|
||||
def cache_name(self):
|
||||
return f"{self.pkg.name}-{self.spec.architecture.platform}-{self.spec.dag_hash()}.cmake"
|
||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
||||
self.pkg.name,
|
||||
self.pkg.spec.architecture,
|
||||
self.pkg.spec.compiler.name,
|
||||
self.pkg.spec.compiler.version,
|
||||
)
|
||||
|
||||
@property
|
||||
def cache_path(self):
|
||||
@@ -84,7 +89,7 @@ def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not self.pkg.has_variant(variant):
|
||||
if variant not in self.pkg.variants:
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
@@ -112,9 +117,7 @@ def initconfig_compiler_entries(self):
|
||||
# Fortran compiler is optional
|
||||
if "FC" in os.environ:
|
||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||
system_fc_entry = cmake_cache_path(
|
||||
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
|
||||
)
|
||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
||||
else:
|
||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||
@@ -130,8 +133,8 @@ def initconfig_compiler_entries(self):
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||
" " + spack_fc_entry,
|
||||
"else()\n",
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
|
||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
||||
" " + system_fc_entry,
|
||||
"endif()\n",
|
||||
]
|
||||
@@ -159,9 +162,7 @@ def initconfig_compiler_entries(self):
|
||||
ld_flags = " ".join(flags["ldflags"])
|
||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||
# CMake has separate linker arguments for types of builds.
|
||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
ld_string = ld_format_string.format(ld_type)
|
||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||
|
||||
@@ -190,10 +191,7 @@ def initconfig_mpi_entries(self):
|
||||
|
||||
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
||||
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
||||
|
||||
# not all MPIs have Fortran wrappers
|
||||
if hasattr(spec["mpi"], "mpifc"):
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||
|
||||
# Check for slurm
|
||||
using_slurm = False
|
||||
@@ -297,6 +295,18 @@ def initconfig_hardware_entries(self):
|
||||
def std_initconfig_entries(self):
|
||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||
complete_rpath_list = cmake_rpaths_path
|
||||
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||
|
||||
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||
|
||||
return [
|
||||
"#------------------{0}".format("-" * 60),
|
||||
@@ -306,6 +316,8 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
@@ -333,7 +345,7 @@ def std_cmake_args(self):
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
@spack.phase_callbacks.run_after("install")
|
||||
@spack.builder.run_after("install")
|
||||
def install_cmake_cache(self):
|
||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||
@@ -350,10 +362,6 @@ class CachedCMakePackage(CMakePackage):
|
||||
|
||||
CMakeBuilder = CachedCMakeBuilder
|
||||
|
||||
# These dependencies are assumed in the builder
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
|
||||
def flag_handler(self, name, flags):
|
||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||
return None, None, None # handled in the cmake cache
|
||||
|
@@ -3,15 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
from spack.directives import build_system, depends_on
|
||||
from spack.multimethod import when
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
@@ -28,7 +29,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
||||
|
||||
|
||||
@spack.builder.builder("cargo")
|
||||
class CargoBuilder(BuilderWithDefaults):
|
||||
class CargoBuilder(BaseBuilder):
|
||||
"""The Cargo builder encodes the most common way of building software with
|
||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||
|
||||
@@ -71,16 +72,18 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
inspect.getmodule(pkg).cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.build_args
|
||||
)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
fs.install_tree("out", prefix)
|
||||
|
||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg.module.cargo("test", *self.check_args)
|
||||
inspect.getmodule(self.pkg).cargo("test", *self.check_args)
|
||||
|
@@ -3,29 +3,24 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from itertools import chain
|
||||
from typing import Any, List, Optional, Set, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.phase_callbacks
|
||||
import spack.spec
|
||||
import spack.util.prefix
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.util.environment import filter_system_paths
|
||||
|
||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
||||
from ._checks import BaseBuilder, execute_build_time_tests
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
# string.
|
||||
@@ -51,9 +46,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
define("PYTHON_EXECUTABLE", python_executable),
|
||||
define("Python_EXECUTABLE", python_executable),
|
||||
define("Python3_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -88,7 +83,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
ipo = False
|
||||
|
||||
if cmake.satisfies("@3.9:"):
|
||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||
|
||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||
# find_package may search there. This is not what we want.
|
||||
@@ -96,36 +91,25 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
# Do not populate CMake User Package Registry
|
||||
if cmake.satisfies("@3.15:"):
|
||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||
elif cmake.satisfies("@3.1:"):
|
||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||
|
||||
# Do not use CMake User/System Package Registry
|
||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||
if cmake.satisfies("@3.16:"):
|
||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||
elif cmake.satisfies("@3.1:3.15"):
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||
|
||||
# Export a compilation database if supported.
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
# Disable find package's config mode for versions of Boost that
|
||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
|
||||
See ``cmake --help`` for a list of valid generators.
|
||||
@@ -157,30 +141,11 @@ def _values(x):
|
||||
default=default,
|
||||
values=_values,
|
||||
description="the build system generator to use",
|
||||
when="build_system=cmake",
|
||||
)
|
||||
for x in not_used:
|
||||
conflicts(f"generator={x}")
|
||||
|
||||
|
||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
||||
attribute of direct build/test and transitive link dependencies."""
|
||||
# Add direct build/test deps
|
||||
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
|
||||
# Add transitive link deps
|
||||
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
|
||||
# Separate out externals so they do not shadow Spack prefixes
|
||||
externals, spack_built = stable_partition(
|
||||
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
|
||||
lambda x: x.external,
|
||||
)
|
||||
|
||||
return filter_system_paths(
|
||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
||||
)
|
||||
|
||||
|
||||
class CMakePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using CMake
|
||||
|
||||
@@ -272,15 +237,15 @@ def flags_to_build_system_args(self, flags):
|
||||
|
||||
# Legacy methods (used by too many packages to change them,
|
||||
# need to forward to the builder)
|
||||
def define(self, cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
def define(self, *args, **kwargs):
|
||||
return self.builder.define(*args, **kwargs)
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self, cmake_var, variant)
|
||||
def define_from_variant(self, *args, **kwargs):
|
||||
return self.builder.define_from_variant(*args, **kwargs)
|
||||
|
||||
|
||||
@spack.builder.builder("cmake")
|
||||
class CMakeBuilder(BuilderWithDefaults):
|
||||
class CMakeBuilder(BaseBuilder):
|
||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||
has three phases that can be overridden:
|
||||
|
||||
@@ -330,15 +295,15 @@ class CMakeBuilder(BuilderWithDefaults):
|
||||
build_time_test_callbacks = ["check"]
|
||||
|
||||
@property
|
||||
def archive_files(self) -> List[str]:
|
||||
def archive_files(self):
|
||||
"""Files to archive for packages based on CMake"""
|
||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||
if _supports_compilation_databases(self.pkg):
|
||||
if _supports_compilation_databases(self):
|
||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||
return files
|
||||
|
||||
@property
|
||||
def root_cmakelists_dir(self) -> str:
|
||||
def root_cmakelists_dir(self):
|
||||
"""The relative path to the directory containing CMakeLists.txt
|
||||
|
||||
This path is relative to the root of the extracted tarball,
|
||||
@@ -347,17 +312,16 @@ def root_cmakelists_dir(self) -> str:
|
||||
return self.pkg.stage.source_path
|
||||
|
||||
@property
|
||||
def generator(self) -> str:
|
||||
def generator(self):
|
||||
if self.spec.satisfies("generator=make"):
|
||||
return "Unix Makefiles"
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
return "Ninja"
|
||||
raise ValueError(
|
||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
)
|
||||
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||
raise ValueError(msg)
|
||||
|
||||
@property
|
||||
def std_cmake_args(self) -> List[str]:
|
||||
def std_cmake_args(self):
|
||||
"""Standard cmake arguments provided as a property for
|
||||
convenience of package writers
|
||||
"""
|
||||
@@ -366,9 +330,7 @@ def std_cmake_args(self) -> List[str]:
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def std_args(
|
||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
||||
) -> List[str]:
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
@@ -378,27 +340,18 @@ def std_args(
|
||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||
msg += "CMakePackage currently supports the following "
|
||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||
raise spack.error.InstallError(msg)
|
||||
raise spack.package_base.InstallError(msg)
|
||||
|
||||
try:
|
||||
build_type = pkg.spec.variants["build_type"].value
|
||||
except KeyError:
|
||||
build_type = "RelWithDebInfo"
|
||||
|
||||
define = CMakeBuilder.define
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
||||
define(
|
||||
"CMAKE_INSTALL_RPATH",
|
||||
[
|
||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
||||
],
|
||||
),
|
||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
]
|
||||
|
||||
@@ -413,34 +366,164 @@ def std_args(
|
||||
_conditional_cmake_defaults(pkg, args)
|
||||
_maybe_set_python_hints(pkg, args)
|
||||
|
||||
# Set up CMake rpath
|
||||
args.extend(
|
||||
[
|
||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_cuda_architectures(pkg)
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
return define_hip_architectures(pkg)
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
return define(cmake_var, value)
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
||||
return define_from_variant(self.pkg, cmake_var, variant)
|
||||
The resulting argument will convert boolean values to OFF/ON
|
||||
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||
values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define('BUILD_SHARED_LIBS', True),
|
||||
define('CMAKE_CXX_STANDARD', 14),
|
||||
define('swr', ['avx', 'avx2'])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
def define_from_variant(self, cmake_var, variant=None):
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
This utility function is similar to
|
||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant('cxxstd', default='11', values=('11', '14'),
|
||||
multi=False, description='')
|
||||
variant('shared', default=True, description='')
|
||||
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||
description='')
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||
self.define_from_variant('SWR')]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met,
|
||||
this function returns an empty string. CMake discards empty strings
|
||||
provided on the command line.
|
||||
"""
|
||||
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if variant not in self.pkg.variants:
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||
|
||||
if variant not in self.pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = self.pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return self.define(cmake_var, value)
|
||||
|
||||
@property
|
||||
def build_dirname(self) -> str:
|
||||
def build_dirname(self):
|
||||
"""Directory name to use when building the package."""
|
||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
||||
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||
|
||||
@property
|
||||
def build_directory(self) -> str:
|
||||
def build_directory(self):
|
||||
"""Full-path to the directory to use when building the package."""
|
||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||
|
||||
def cmake_args(self) -> List[str]:
|
||||
def cmake_args(self):
|
||||
"""List of all the arguments that must be passed to cmake, except:
|
||||
|
||||
* CMAKE_INSTALL_PREFIX
|
||||
@@ -450,56 +533,34 @@ def cmake_args(self) -> List[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
def cmake(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def cmake(self, pkg, spec, prefix):
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
pkg.module.cmake(*options)
|
||||
inspect.getmodule(self.pkg).cmake(*options)
|
||||
|
||||
def build(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
elif self.generator == "Ninja":
|
||||
self.build_targets.append("-v")
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
||||
|
||||
def install(
|
||||
self,
|
||||
pkg: spack.package_base.PackageBase,
|
||||
spec: spack.spec.Spec,
|
||||
prefix: spack.util.prefix.Prefix,
|
||||
) -> None:
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
pkg.module.make(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
elif self.generator == "Ninja":
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
|
||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
def check(self) -> None:
|
||||
def check(self):
|
||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||
and runs them if found.
|
||||
"""
|
||||
@@ -510,133 +571,3 @@ def check(self) -> None:
|
||||
elif self.generator == "Ninja":
|
||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
|
||||
|
||||
def define(cmake_var: str, value: Any) -> str:
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[define("BUILD_SHARED_LIBS", True),
|
||||
define("CMAKE_CXX_STANDARD", 14),
|
||||
define("swr", ["avx", "avx2"])]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2]
|
||||
|
||||
"""
|
||||
# Create a list of pairs. Each pair includes a configuration
|
||||
# option and whether or not that option is activated
|
||||
if isinstance(value, bool):
|
||||
kind = "BOOL"
|
||||
value = "ON" if value else "OFF"
|
||||
else:
|
||||
kind = "STRING"
|
||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||
value = ";".join(str(v) for v in value)
|
||||
else:
|
||||
value = str(value)
|
||||
|
||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||
|
||||
|
||||
def define_from_variant(
|
||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
||||
) -> str:
|
||||
"""Return a CMake command line argument from the given variant's value.
|
||||
|
||||
The optional ``variant`` argument defaults to the lower-case transform
|
||||
of ``cmake_var``.
|
||||
|
||||
Examples:
|
||||
|
||||
Given a package with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("cxxstd", default="11", values=("11", "14"),
|
||||
multi=False, description="")
|
||||
variant("shared", default=True, description="")
|
||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
||||
description="")
|
||||
|
||||
calling this function like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
self.define_from_variant("SWR"),
|
||||
]
|
||||
|
||||
will generate the following configuration options:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[
|
||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||
"-DSWR:STRING=avx;avx2",
|
||||
]
|
||||
|
||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||
|
||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
||||
returns an empty string. CMake discards empty strings provided on the command line.
|
||||
"""
|
||||
if variant is None:
|
||||
variant = cmake_var.lower()
|
||||
|
||||
if not pkg.has_variant(variant):
|
||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
||||
|
||||
if variant not in pkg.spec.variants:
|
||||
return ""
|
||||
|
||||
value = pkg.spec.variants[variant].value
|
||||
if isinstance(value, (tuple, list)):
|
||||
# Sort multi-valued variants for reproducibility
|
||||
value = sorted(value)
|
||||
|
||||
return define(cmake_var, value)
|
||||
|
||||
|
||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
||||
return ""
|
||||
|
@@ -5,21 +5,15 @@
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
from llnl.util.lang import classproperty
|
||||
|
||||
import spack.compilers.libraries
|
||||
import spack.config
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
@@ -49,9 +43,6 @@ class CompilerPackage(spack.package_base.PackageBase):
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
#: Relative path to compiler wrappers
|
||||
link_paths: Dict[str, str] = {}
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
@@ -86,14 +77,14 @@ def executables(cls) -> Sequence[str]:
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path) -> str:
|
||||
def determine_version(cls, exe: Path):
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = compiler_output(exe, version_argument=va)
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
@@ -104,7 +95,6 @@ def determine_version(cls, exe: Path) -> str:
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
@@ -152,183 +142,3 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||
|
||||
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||
#: Flag that needs to be used to pass an argument to the linker
|
||||
linker_arg: str = "-Wl,"
|
||||
#: Flag used to produce Position Independent Code
|
||||
pic_flag: str = "-fPIC"
|
||||
#: Flag used to get verbose output
|
||||
verbose_flags: str = "-v"
|
||||
#: Flag to activate OpenMP support
|
||||
openmp_flag: str = "-fopenmp"
|
||||
|
||||
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||
"""Returns the flag used to enforce a given standard for a language"""
|
||||
if language not in self.supported_languages:
|
||||
# FIXME (compiler as nodes): Use UnsupportedCompilerFlag ?
|
||||
raise RuntimeError(f"{self.spec} does not provide the '{language}' language")
|
||||
try:
|
||||
return self._standard_flag(language=language, standard=standard)
|
||||
except (KeyError, RuntimeError) as e:
|
||||
raise RuntimeError(
|
||||
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||
) from e
|
||||
|
||||
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||
raise NotImplementedError("Must be implemented by derived classes")
|
||||
|
||||
@property
|
||||
def disable_new_dtags(self) -> str:
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--disable-new-dtags"
|
||||
|
||||
@property
|
||||
def enable_new_dtags(self) -> str:
|
||||
if platform.system() == "Darwin":
|
||||
return ""
|
||||
return "--enable-new-dtags"
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
# FIXME (compiler as nodes): check if this is good enough or should be made more general
|
||||
|
||||
# The package is not used as a compiler, so skip this setup
|
||||
if not any(
|
||||
lang in dependent_spec and dependent_spec[lang].name == self.spec.name
|
||||
for lang in ("c", "cxx", "fortran")
|
||||
):
|
||||
return
|
||||
|
||||
# Populate an object with the list of environment modifications and return it
|
||||
link_dir = pathlib.Path(spack.paths.build_env_path)
|
||||
|
||||
for language, attr_name, wrapper_var_name, spack_var_name in [
|
||||
("c", "cc", "CC", "SPACK_CC"),
|
||||
("cxx", "cxx", "CXX", "SPACK_CXX"),
|
||||
("fortran", "fortran", "F77", "SPACK_F77"),
|
||||
("fortran", "fortran", "FC", "SPACK_FC"),
|
||||
]:
|
||||
if language not in dependent_spec or dependent_spec[language].name != self.spec.name:
|
||||
continue
|
||||
|
||||
if not hasattr(self, attr_name):
|
||||
continue
|
||||
|
||||
compiler = getattr(self, attr_name)
|
||||
env.set(spack_var_name, compiler)
|
||||
|
||||
if language not in self.link_paths:
|
||||
continue
|
||||
|
||||
wrapper_path = link_dir / self.link_paths.get(language)
|
||||
env.set(wrapper_var_name, str(wrapper_path))
|
||||
env.set(f"SPACK_{wrapper_var_name}_RPATH_ARG", self.rpath_arg)
|
||||
|
||||
uarch = dependent_spec.architecture.target
|
||||
version_number, _ = archspec.cpu.version_components(
|
||||
self.spec.version.dotted_numeric_string
|
||||
)
|
||||
try:
|
||||
isa_arg = uarch.optimization_flags(self.spec.name, version_number)
|
||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||
isa_arg = ""
|
||||
|
||||
if isa_arg:
|
||||
env.set(f"SPACK_TARGET_ARGS_{attr_name.upper()}", isa_arg)
|
||||
|
||||
# FIXME (compiler as nodes): make these paths language specific
|
||||
env.set("SPACK_LINKER_ARG", self.linker_arg)
|
||||
|
||||
paths = _implicit_rpaths(pkg=self)
|
||||
if paths:
|
||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(paths))
|
||||
|
||||
# Check whether we want to force RPATH or RUNPATH
|
||||
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
|
||||
env.set("SPACK_DTAGS_TO_STRIP", self.enable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", self.disable_new_dtags)
|
||||
else:
|
||||
env.set("SPACK_DTAGS_TO_STRIP", self.disable_new_dtags)
|
||||
env.set("SPACK_DTAGS_TO_ADD", self.enable_new_dtags)
|
||||
|
||||
spec = self.spec
|
||||
env.set("SPACK_COMPILER_SPEC", spec.format("{name}{@version}{variants}{/hash:7}"))
|
||||
|
||||
if spec.extra_attributes:
|
||||
extra_rpaths = spec.extra_attributes.get("extra_rpaths")
|
||||
if extra_rpaths:
|
||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
||||
env.append_path("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
||||
|
||||
# Add spack build environment path with compiler wrappers first in
|
||||
# the path. We add the compiler wrapper path, which includes default
|
||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
||||
# compiler-specific symlinks. The latter ensures that builds that
|
||||
# are sensitive to the *name* of the compiler see the right name when
|
||||
# we're building with the wrappers.
|
||||
#
|
||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
||||
# handled by putting one in the <build_env_path>/case-insensitive
|
||||
# directory. Add that to the path too.
|
||||
env_paths = []
|
||||
compiler_specific = os.path.join(
|
||||
spack.paths.build_env_path, os.path.dirname(self.link_paths["c"])
|
||||
)
|
||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
||||
env_paths.append(item)
|
||||
ci = os.path.join(item, "case-insensitive")
|
||||
if os.path.isdir(ci):
|
||||
env_paths.append(ci)
|
||||
|
||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
||||
for item in env_paths:
|
||||
env.prepend_path("PATH", item)
|
||||
env.set_path("SPACK_ENV_PATH", env_paths)
|
||||
|
||||
|
||||
def _implicit_rpaths(pkg: spack.package_base.PackageBase) -> List[str]:
|
||||
detector = spack.compilers.libraries.CompilerPropertyDetector(pkg.spec)
|
||||
paths = detector.implicit_rpaths()
|
||||
return paths
|
||||
|
||||
|
||||
@memoized
|
||||
def _compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Returns the output from the compiler invoked with the given version argument.
|
||||
|
||||
Args:
|
||||
compiler_path: path of the compiler to be invoked
|
||||
version_argument: the argument used to extract version information
|
||||
"""
|
||||
compiler = spack.util.executable.Executable(compiler_path)
|
||||
compiler_invocation_args = {
|
||||
"output": str,
|
||||
"error": str,
|
||||
"ignore_errors": ignore_errors,
|
||||
"timeout": 120,
|
||||
"fail_on_error": True,
|
||||
}
|
||||
if version_argument:
|
||||
output = compiler(version_argument, **compiler_invocation_args)
|
||||
else:
|
||||
output = compiler(**compiler_invocation_args)
|
||||
return output
|
||||
|
||||
|
||||
def compiler_output(
|
||||
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||
) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(compiler_path, required=True)
|
||||
|
||||
return _compiler_output(
|
||||
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user