Compare commits
2 Commits
deprecatio
...
hs/ci/regr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c5711e7b6 | ||
|
|
f772d97865 |
@@ -5,7 +5,7 @@ coverage:
|
|||||||
status:
|
status:
|
||||||
project:
|
project:
|
||||||
default:
|
default:
|
||||||
threshold: 2.0%
|
threshold: 0.2%
|
||||||
|
|
||||||
ignore:
|
ignore:
|
||||||
- lib/spack/spack/test/.*
|
- lib/spack/spack/test/.*
|
||||||
|
|||||||
74
.github/workflows/audit.yaml
vendored
74
.github/workflows/audit.yaml
vendored
@@ -1,74 +0,0 @@
|
|||||||
name: audit
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
with_coverage:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
python_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: audit-${{inputs.python_version}}-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Run audits on all the packages in the built-in repository
|
|
||||||
package-audits:
|
|
||||||
runs-on: ${{ matrix.system.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
system:
|
|
||||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
|
||||||
- { os: ubuntu-latest, shell: bash }
|
|
||||||
- { os: macos-latest, shell: bash }
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: ${{ matrix.system.shell }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: ${{inputs.python_version}}
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
|
||||||
- name: Setup for Windows run
|
|
||||||
if: runner.os == 'Windows'
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pywin32
|
|
||||||
- name: Package audits (with coverage)
|
|
||||||
env:
|
|
||||||
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
|
|
||||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
coverage run $(which spack) audit packages
|
|
||||||
coverage run $(which spack) audit configs
|
|
||||||
coverage run $(which spack) -d audit externals
|
|
||||||
coverage combine
|
|
||||||
- name: Package audits (without coverage)
|
|
||||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
spack -d audit packages
|
|
||||||
spack -d audit configs
|
|
||||||
spack -d audit externals
|
|
||||||
- name: Package audits (without coverage)
|
|
||||||
if: ${{ runner.os == 'Windows' }}
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
spack -d audit packages
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
spack -d audit configs
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
spack -d audit externals
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
|
||||||
with:
|
|
||||||
name: coverage-audits-${{ matrix.system.os }}
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
2
.github/workflows/bin/bootstrap-test.sh
vendored
2
.github/workflows/bin/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
$PYTHON bin/spack bootstrap disable github-actions-v0.5
|
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
|
|||||||
204
.github/workflows/bootstrap.yml
vendored
204
.github/workflows/bootstrap.yml
vendored
@@ -1,204 +0,0 @@
|
|||||||
name: Bootstrapping
|
|
||||||
|
|
||||||
on:
|
|
||||||
# This Workflow can be triggered manually
|
|
||||||
workflow_dispatch:
|
|
||||||
workflow_call:
|
|
||||||
schedule:
|
|
||||||
# nightly at 2:16 AM
|
|
||||||
- cron: '16 2 * * *'
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: bootstrap-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
distros-clingo-sources:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: ${{ matrix.image }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
|
||||||
steps:
|
|
||||||
- name: Setup Fedora
|
|
||||||
if: ${{ matrix.image == 'fedora:latest' }}
|
|
||||||
run: |
|
|
||||||
dnf install -y \
|
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
|
||||||
cmake bison bison-devel libstdc++-static
|
|
||||||
- name: Setup OpenSUSE
|
|
||||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
|
||||||
run: |
|
|
||||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
|
||||||
zypper update -y || zypper update -y
|
|
||||||
zypper install -y \
|
|
||||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
|
||||||
cmake bison
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.6
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack external find cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
clingo-sources:
|
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
|
||||||
steps:
|
|
||||||
- name: Setup macOS
|
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
brew install cmake bison tree
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.6
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack external find --not-buildable cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
tree $HOME/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
gnupg-sources:
|
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
|
||||||
steps:
|
|
||||||
- name: Setup macOS
|
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
|
||||||
run: brew install tree gawk
|
|
||||||
- name: Remove system executables
|
|
||||||
run: |
|
|
||||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
|
||||||
sudo rm $(command -v gpg gpg2 patchelf)
|
|
||||||
done
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack solve zlib
|
|
||||||
spack bootstrap disable github-actions-v0.6
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
from-binaries:
|
|
||||||
runs-on: ${{ matrix.runner }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
|
||||||
steps:
|
|
||||||
- name: Setup macOS
|
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
|
||||||
run: brew install tree
|
|
||||||
- name: Remove system executables
|
|
||||||
run: |
|
|
||||||
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
|
||||||
sudo rm $(command -v gpg gpg2 patchelf)
|
|
||||||
done
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: |
|
|
||||||
3.8
|
|
||||||
3.9
|
|
||||||
3.10
|
|
||||||
3.11
|
|
||||||
3.12
|
|
||||||
3.13
|
|
||||||
- name: Set bootstrap sources
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' '3.13'; do
|
|
||||||
not_found=1
|
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if $ver_dir/python --version ; then
|
|
||||||
export PYTHON="$ver_dir/python"
|
|
||||||
not_found=0
|
|
||||||
old_path="$PATH"
|
|
||||||
export PATH="$ver_dir:$PATH"
|
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
|
||||||
export PATH="$old_path"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if (($not_found)) ; then
|
|
||||||
echo Required python version $ver not found in runner!
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d gpg list
|
|
||||||
tree $HOME/.spack/bootstrap/store/
|
|
||||||
- name: Bootstrap File
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d python share/spack/qa/bootstrap-file.py
|
|
||||||
tree $HOME/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
windows:
|
|
||||||
runs-on: "windows-latest"
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: "3.12"
|
|
||||||
- name: Setup Windows
|
|
||||||
run: |
|
|
||||||
Remove-Item -Path (Get-Command gpg).Path
|
|
||||||
Remove-Item -Path (Get-Command file).Path
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
./share/spack/setup-env.ps1
|
|
||||||
spack bootstrap disable github-actions-v0.6
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack external find --not-buildable cmake bison
|
|
||||||
spack -d solve zlib
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
tree $env:userprofile/.spack/bootstrap/store/
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
./share/spack/setup-env.ps1
|
|
||||||
spack -d gpg list
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
tree $env:userprofile/.spack/bootstrap/store/
|
|
||||||
- name: Bootstrap File
|
|
||||||
run: |
|
|
||||||
./share/spack/setup-env.ps1
|
|
||||||
spack -d python share/spack/qa/bootstrap-file.py
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
tree $env:userprofile/.spack/bootstrap/store/
|
|
||||||
140
.github/workflows/build-containers.yml
vendored
140
.github/workflows/build-containers.yml
vendored
@@ -1,140 +0,0 @@
|
|||||||
name: Containers
|
|
||||||
|
|
||||||
on:
|
|
||||||
# This Workflow can be triggered manually
|
|
||||||
workflow_dispatch:
|
|
||||||
# Build new Spack develop containers nightly.
|
|
||||||
schedule:
|
|
||||||
- cron: '34 0 * * *'
|
|
||||||
# Run on pull requests that modify this file
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- '.github/workflows/build-containers.yml'
|
|
||||||
- 'share/spack/docker/*'
|
|
||||||
- 'share/spack/templates/container/*'
|
|
||||||
- 'lib/spack/spack/container/*'
|
|
||||||
# Let's also build & tag Spack containers on releases.
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: build_containers-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deploy-images:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
packages: write
|
|
||||||
strategy:
|
|
||||||
# Even if one container fails to build we still want the others
|
|
||||||
# to continue their builds.
|
|
||||||
fail-fast: false
|
|
||||||
# A matrix of Dockerfile paths, associated tags, and which architectures
|
|
||||||
# they support.
|
|
||||||
matrix:
|
|
||||||
# Meaning of the various items in the matrix list
|
|
||||||
# 0: Container name (e.g. ubuntu-bionic)
|
|
||||||
# 1: Platforms to build for
|
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
|
||||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
|
||||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
|
||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
|
||||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
|
||||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
|
||||||
if: github.repository == 'spack/spack'
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
|
|
||||||
- name: Determine latest release tag
|
|
||||||
id: latest
|
|
||||||
run: |
|
|
||||||
git fetch --quiet --tags
|
|
||||||
echo "tag=$(git tag --list --sort=-v:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)" | tee -a $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96
|
|
||||||
id: docker_meta
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
|
||||||
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
|
||||||
tags: |
|
|
||||||
type=schedule,pattern=nightly
|
|
||||||
type=schedule,pattern=develop
|
|
||||||
type=semver,pattern={{version}}
|
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
|
||||||
type=semver,pattern={{major}}
|
|
||||||
type=ref,event=branch
|
|
||||||
type=ref,event=pr
|
|
||||||
type=raw,value=latest,enable=${{ github.ref == format('refs/tags/{0}', steps.latest.outputs.tag) }}
|
|
||||||
|
|
||||||
- name: Generate the Dockerfile
|
|
||||||
env:
|
|
||||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
|
||||||
run: |
|
|
||||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
|
||||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
|
||||||
printf "Preparing to build ${{ env.container }} from dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile"
|
|
||||||
if [ ! -f "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile" ]; then
|
|
||||||
printf "dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile does not exist"
|
|
||||||
exit 1;
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
|
||||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
|
||||||
path: dockerfiles
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Log in to DockerHub
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
|
||||||
uses: docker/build-push-action@48aba3b46d1b1fec4febb7c5d0c644b249a11355
|
|
||||||
with:
|
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
|
||||||
push: ${{ github.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
|
||||||
|
|
||||||
merge-dockerfiles:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: deploy-images
|
|
||||||
steps:
|
|
||||||
- name: Merge Artifacts
|
|
||||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: dockerfiles
|
|
||||||
pattern: dockerfiles_*
|
|
||||||
delete-merged: true
|
|
||||||
69
.github/workflows/ci.yaml
vendored
69
.github/workflows/ci.yaml
vendored
@@ -15,6 +15,18 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
prechecks:
|
||||||
|
needs: [ changes ]
|
||||||
|
uses: ./.github/workflows/valid-style.yml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
all-prechecks:
|
||||||
|
needs: [ prechecks ]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Success
|
||||||
|
run: "true"
|
||||||
# Check which files have been updated by the PR
|
# Check which files have been updated by the PR
|
||||||
changes:
|
changes:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -24,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -62,58 +74,3 @@ jobs:
|
|||||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||||
#
|
#
|
||||||
bootstrap:
|
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
|
||||||
needs: [ prechecks, changes ]
|
|
||||||
uses: ./.github/workflows/bootstrap.yml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
unit-tests:
|
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
|
||||||
needs: [ prechecks, changes ]
|
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
prechecks:
|
|
||||||
needs: [ changes ]
|
|
||||||
uses: ./.github/workflows/valid-style.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
|
||||||
|
|
||||||
all-prechecks:
|
|
||||||
needs: [ prechecks ]
|
|
||||||
if: ${{ always() }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Success
|
|
||||||
run: |
|
|
||||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
|
||||||
echo "Unit tests failed."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
needs: [ unit-tests, prechecks ]
|
|
||||||
uses: ./.github/workflows/coverage.yml
|
|
||||||
secrets: inherit
|
|
||||||
|
|
||||||
all:
|
|
||||||
needs: [ unit-tests, coverage, bootstrap ]
|
|
||||||
if: ${{ always() }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
|
||||||
steps:
|
|
||||||
- name: Status summary
|
|
||||||
run: |
|
|
||||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
|
||||||
echo "Unit tests failed."
|
|
||||||
exit 1
|
|
||||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
|
||||||
echo "Bootstrap tests failed."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|||||||
35
.github/workflows/coverage.yml
vendored
35
.github/workflows/coverage.yml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: coverage
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Upload coverage reports to codecov once as a single bundle
|
|
||||||
upload:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
cache: 'pip'
|
|
||||||
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
|
|
||||||
|
|
||||||
- name: Download coverage artifact files
|
|
||||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
|
|
||||||
with:
|
|
||||||
pattern: coverage-*
|
|
||||||
path: coverage
|
|
||||||
merge-multiple: true
|
|
||||||
|
|
||||||
- run: ls -la coverage
|
|
||||||
- run: coverage combine -a coverage/.coverage*
|
|
||||||
- run: coverage xml
|
|
||||||
|
|
||||||
- name: "Upload coverage report to CodeCov"
|
|
||||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
|
||||||
with:
|
|
||||||
verbose: true
|
|
||||||
fail_ci_if_error: true
|
|
||||||
31
.github/workflows/nightly-win-builds.yml
vendored
31
.github/workflows/nightly-win-builds.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
name: Windows Paraview Nightly
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 2 * * *' # Run at 2 am
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-paraview-deps:
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
|
||||||
- name: Build Test
|
|
||||||
run: |
|
|
||||||
spack compiler find
|
|
||||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
|
||||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
|
||||||
exit 0
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
coverage==7.6.1
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
black==24.10.0
|
black==24.8.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.16.21.20240513
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
251
.github/workflows/unit_tests.yaml
vendored
251
.github/workflows/unit_tests.yaml
vendored
@@ -1,251 +0,0 @@
|
|||||||
name: unit tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: unit_tests-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Run unit tests with different configurations on linux
|
|
||||||
ubuntu:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest]
|
|
||||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
|
||||||
on_develop:
|
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
|
||||||
include:
|
|
||||||
- python-version: '3.6'
|
|
||||||
os: ubuntu-20.04
|
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
|
||||||
exclude:
|
|
||||||
- python-version: '3.7'
|
|
||||||
os: ubuntu-latest
|
|
||||||
on_develop: false
|
|
||||||
- python-version: '3.8'
|
|
||||||
os: ubuntu-latest
|
|
||||||
on_develop: false
|
|
||||||
- python-version: '3.9'
|
|
||||||
os: ubuntu-latest
|
|
||||||
on_develop: false
|
|
||||||
- python-version: '3.10'
|
|
||||||
os: ubuntu-latest
|
|
||||||
on_develop: false
|
|
||||||
- python-version: '3.11'
|
|
||||||
os: ubuntu-latest
|
|
||||||
on_develop: false
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install System packages
|
|
||||||
run: |
|
|
||||||
sudo apt-get -y update
|
|
||||||
# Needed for unit tests
|
|
||||||
sudo apt-get -y install \
|
|
||||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
|
||||||
cmake bison libbison-dev kcov
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
|
||||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
|
||||||
- name: Setup git configuration
|
|
||||||
run: |
|
|
||||||
# Need this for the git tests to succeed.
|
|
||||||
git --version
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
if: ${{ matrix.concretizer == 'clingo' }}
|
|
||||||
env:
|
|
||||||
SPACK_PYTHON: python
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack bootstrap now
|
|
||||||
spack -v solve zlib
|
|
||||||
- name: Run unit tests
|
|
||||||
env:
|
|
||||||
SPACK_PYTHON: python
|
|
||||||
SPACK_TEST_PARALLEL: 2
|
|
||||||
COVERAGE: true
|
|
||||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
|
||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
|
||||||
run: |
|
|
||||||
share/spack/qa/run-unit-tests
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
# Test shell integration
|
|
||||||
shell:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
- name: Install System packages
|
|
||||||
run: |
|
|
||||||
sudo apt-get -y update
|
|
||||||
# Needed for shell tests
|
|
||||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
|
||||||
- name: Setup git configuration
|
|
||||||
run: |
|
|
||||||
# Need this for the git tests to succeed.
|
|
||||||
git --version
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
- name: Run shell tests
|
|
||||||
env:
|
|
||||||
COVERAGE: true
|
|
||||||
run: |
|
|
||||||
share/spack/qa/run-shell-tests
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: coverage-shell
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
# Test RHEL8 UBI with platform Python. This job is run
|
|
||||||
# only on PRs modifying core Spack
|
|
||||||
rhel8-platform-python:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: registry.access.redhat.com/ubi8/ubi
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
dnf install -y \
|
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
|
||||||
make patch tcl unzip which xz
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- name: Setup repo and non-root user
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
git fetch --unshallow
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
useradd spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Run unit tests
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d bootstrap now --dev
|
|
||||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
|
||||||
# Test for the clingo based solver (using clingo-cffi)
|
|
||||||
clingo-cffi:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.13'
|
|
||||||
- name: Install System packages
|
|
||||||
run: |
|
|
||||||
sudo apt-get -y update
|
|
||||||
sudo apt-get -y install coreutils gfortran graphviz gnupg2
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo
|
|
||||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
|
||||||
- name: Run unit tests (full suite with coverage)
|
|
||||||
env:
|
|
||||||
COVERAGE: true
|
|
||||||
COVERAGE_FILE: coverage/.coverage-clingo-cffi
|
|
||||||
run: |
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
spack bootstrap disable github-actions-v0.5
|
|
||||||
spack bootstrap disable github-actions-v0.6
|
|
||||||
spack bootstrap status
|
|
||||||
spack solve zlib
|
|
||||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: coverage-clingo-cffi
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
# Run unit tests on MacOS
|
|
||||||
macos:
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [macos-13, macos-14]
|
|
||||||
python-version: ["3.11"]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools
|
|
||||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
|
||||||
- name: Setup Homebrew packages
|
|
||||||
run: |
|
|
||||||
brew install dash fish gcc gnupg kcov
|
|
||||||
- name: Run unit tests
|
|
||||||
env:
|
|
||||||
SPACK_TEST_PARALLEL: 4
|
|
||||||
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
. share/spack/setup-env.sh
|
|
||||||
$(which spack) bootstrap disable spack-install
|
|
||||||
$(which spack) solve zlib
|
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
# Run unit tests on Windows
|
|
||||||
windows:
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
|
||||||
- name: Create local develop
|
|
||||||
run: |
|
|
||||||
./.github/workflows/bin/setup_git.ps1
|
|
||||||
- name: Unit Test
|
|
||||||
env:
|
|
||||||
COVERAGE_FILE: coverage/.coverage-windows
|
|
||||||
run: |
|
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
|
||||||
with:
|
|
||||||
name: coverage-windows
|
|
||||||
path: coverage
|
|
||||||
include-hidden-files: true
|
|
||||||
99
.github/workflows/valid-style.yml
vendored
99
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -35,10 +35,10 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
@@ -54,95 +54,4 @@ jobs:
|
|||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
audit:
|
|
||||||
uses: ./.github/workflows/audit.yaml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
|
||||||
python_version: '3.11'
|
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
|
||||||
bootstrap-dev-rhel8:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container: registry.access.redhat.com/ubi8/ubi
|
|
||||||
steps:
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
dnf install -y \
|
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
|
||||||
make patch tcl unzip which xz
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
- name: Setup repo and non-root user
|
|
||||||
run: |
|
|
||||||
git --version
|
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
|
||||||
git fetch --unshallow
|
|
||||||
. .github/workflows/bin/setup_git.sh
|
|
||||||
useradd spack-test
|
|
||||||
chown -R spack-test .
|
|
||||||
- name: Bootstrap Spack development environment
|
|
||||||
shell: runuser -u spack-test -- bash {0}
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack debug report
|
|
||||||
spack -d bootstrap now --dev
|
|
||||||
spack -d style -t black
|
|
||||||
spack unit-test -V
|
|
||||||
import-check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: julia-actions/setup-julia@v2
|
|
||||||
with:
|
|
||||||
version: '1.10'
|
|
||||||
- uses: julia-actions/cache@v2
|
|
||||||
|
|
||||||
# PR: use the base of the PR as the old commit
|
|
||||||
- name: Checkout PR base commit
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.base.sha }}
|
|
||||||
path: old
|
|
||||||
# not a PR: use the previous commit as the old commit
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 2
|
|
||||||
path: old
|
|
||||||
- name: Checkout previous commit
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
run: git -C old reset --hard HEAD^
|
|
||||||
|
|
||||||
- name: Checkout new commit
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
path: new
|
|
||||||
- name: Install circular import checker
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
repository: haampie/circular-import-fighter
|
|
||||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
|
||||||
path: circular-import-fighter
|
|
||||||
- name: Install dependencies
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make -j dependencies
|
|
||||||
- name: Import cycles before
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make SPACK_ROOT=../old && cp solution solution.old
|
|
||||||
- name: Import cycles after
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
|
||||||
- name: Compare import cycles
|
|
||||||
working-directory: circular-import-fighter
|
|
||||||
run: |
|
|
||||||
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
|
||||||
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
|
||||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
|
||||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
|
||||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
|
||||||
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
|
||||||
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
|
||||||
fi
|
|
||||||
|
|||||||
@@ -14,26 +14,3 @@ sphinx:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
|
||||||
search:
|
|
||||||
ranking:
|
|
||||||
spack.html: -10
|
|
||||||
spack.*.html: -10
|
|
||||||
llnl.html: -10
|
|
||||||
llnl.*.html: -10
|
|
||||||
_modules/*: -10
|
|
||||||
command_index.html: -9
|
|
||||||
basic_usage.html: 5
|
|
||||||
configuration.html: 5
|
|
||||||
config_yaml.html: 5
|
|
||||||
packages_yaml.html: 5
|
|
||||||
build_settings.html: 5
|
|
||||||
environments.html: 5
|
|
||||||
containers.html: 5
|
|
||||||
mirrors.html: 5
|
|
||||||
module_file_support.html: 5
|
|
||||||
repositories.html: 5
|
|
||||||
binary_caches.html: 5
|
|
||||||
chain.html: 5
|
|
||||||
pipelines.html: 5
|
|
||||||
packaging_guide.html: 5
|
|
||||||
|
|||||||
71
CHANGELOG.md
71
CHANGELOG.md
@@ -1,64 +1,3 @@
|
|||||||
# v0.22.2 (2024-09-21)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
|
||||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
|
||||||
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
|
||||||
- Support macOS Sequoia (#45018, #45127)
|
|
||||||
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
|
||||||
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
|
||||||
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
|
||||||
- Do not halt concretization on unknown variants in externals (#45326)
|
|
||||||
- Improve validation of `develop` config section (#46485)
|
|
||||||
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
|
||||||
- Improve backwards compatibility in `include_concrete` (#45766)
|
|
||||||
- Fix issue where package tags were sometimes repeated (#45160)
|
|
||||||
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
|
||||||
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
|
||||||
- Remove debug statements in package hash computation (#45235)
|
|
||||||
- Remove redundant clingo warnings (#45269)
|
|
||||||
- Remove hard-coded layout version (#45645)
|
|
||||||
- Do not initialize previous store state in `use_store` (#45268)
|
|
||||||
- Docs improvements (#46475)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- `chapel` major update (#42197, #44931, #45304)
|
|
||||||
|
|
||||||
# v0.22.1 (2024-07-04)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Fix reuse of externals on Linux (#44316)
|
|
||||||
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
|
||||||
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
|
||||||
- Improve version detection of glibc (#44154)
|
|
||||||
- Improve heuristics for solver (#44893, #44976, #45023)
|
|
||||||
- Make strong preferences override reuse (#44373)
|
|
||||||
- Reduce verbosity when C compiler is missing (#44182)
|
|
||||||
- Make missing ccache executable an error when required (#44740)
|
|
||||||
- Make every environment view containing `python` a `venv` (#44382)
|
|
||||||
- Fix external detection for compilers with os but no target (#44156)
|
|
||||||
- Fix version optimization for roots (#44272)
|
|
||||||
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
|
||||||
- Apply fetched patches to develop specs (#44950)
|
|
||||||
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
|
||||||
- Fix issue with long filenames in build caches on Windows (#43851)
|
|
||||||
- Fix formatting issue in `spack audit` (#45045)
|
|
||||||
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
|
||||||
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
|
||||||
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
|
||||||
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
|
||||||
- Remove mesa18 and libosmesa (#44264)
|
|
||||||
- Enforce consistency of `gl` providers (#44307)
|
|
||||||
- Require libiconv for iconv (#44335, #45026).
|
|
||||||
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
|
||||||
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
|
||||||
- py-matplotlib: qualify when to do a post install (#44191)
|
|
||||||
- rust: fix v1.78.0 instructions (#44127)
|
|
||||||
- suite-sparse: improve setting of the `libs` property (#44214)
|
|
||||||
- netlib-lapack: provide blas and lapack together (#44981)
|
|
||||||
|
|
||||||
# v0.22.0 (2024-05-12)
|
# v0.22.0 (2024-05-12)
|
||||||
|
|
||||||
@@ -380,16 +319,6 @@
|
|||||||
* 344 committers to packages
|
* 344 committers to packages
|
||||||
* 45 committers to core
|
* 45 committers to core
|
||||||
|
|
||||||
# v0.21.3 (2024-10-02)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
|
||||||
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
|
||||||
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
|
|
||||||
#45721, #46445)
|
|
||||||
- Support macOS Sequoia (#45018, #45127, #43862)
|
|
||||||
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
|
|
||||||
|
|
||||||
# v0.21.2 (2024-03-01)
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
## Bugfixes
|
## Bugfixes
|
||||||
|
|||||||
11
README.md
11
README.md
@@ -46,18 +46,13 @@ See the
|
|||||||
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
|
||||||
for examples and highlights.
|
for examples and highlights.
|
||||||
|
|
||||||
To install spack and your first package, make sure you have Python & Git.
|
To install spack and your first package, make sure you have Python.
|
||||||
Then:
|
Then:
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||||
$ cd spack/bin
|
$ cd spack/bin
|
||||||
$ ./spack install zlib
|
$ ./spack install zlib
|
||||||
|
|
||||||
> [!TIP]
|
|
||||||
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
|
|
||||||
>
|
|
||||||
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
|
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
@@ -70,7 +65,7 @@ Tutorial
|
|||||||
----------------
|
----------------
|
||||||
|
|
||||||
We maintain a
|
We maintain a
|
||||||
[**hands-on tutorial**](https://spack-tutorial.readthedocs.io/).
|
[**hands-on tutorial**](https://spack.readthedocs.io/en/latest/tutorial.html).
|
||||||
It covers basic to advanced usage, packaging, developer features, and large HPC
|
It covers basic to advanced usage, packaging, developer features, and large HPC
|
||||||
deployments. You can do all of the exercises on your own laptop using a
|
deployments. You can do all of the exercises on your own laptop using a
|
||||||
Docker container.
|
Docker container.
|
||||||
|
|||||||
@@ -1,11 +1,71 @@
|
|||||||
@ECHO OFF
|
@ECHO OFF
|
||||||
|
setlocal EnableDelayedExpansion
|
||||||
:: (c) 2021 Lawrence Livermore National Laboratory
|
:: (c) 2021 Lawrence Livermore National Laboratory
|
||||||
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
:: To use this file independently of Spack's installer, execute this script in its directory, or add the
|
||||||
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
:: associated bin directory to your PATH. Invoke to launch Spack Shell.
|
||||||
::
|
::
|
||||||
:: source_dir/spack/bin/spack_cmd.bat
|
:: source_dir/spack/bin/spack_cmd.bat
|
||||||
::
|
::
|
||||||
|
pushd %~dp0..
|
||||||
|
set SPACK_ROOT=%CD%
|
||||||
|
pushd %CD%\..
|
||||||
|
set spackinstdir=%CD%
|
||||||
|
popd
|
||||||
|
|
||||||
call "%~dp0..\share\spack\setup-env.bat"
|
|
||||||
pushd %SPACK_ROOT%
|
:: Check if Python is on the PATH
|
||||||
%comspec% /K
|
if not defined python_pf_ver (
|
||||||
|
(for /f "delims=" %%F in ('where python.exe') do (
|
||||||
|
set "python_pf_ver=%%F"
|
||||||
|
goto :found_python
|
||||||
|
) ) 2> NUL
|
||||||
|
)
|
||||||
|
:found_python
|
||||||
|
if not defined python_pf_ver (
|
||||||
|
:: If not, look for Python from the Spack installer
|
||||||
|
:get_builtin
|
||||||
|
(for /f "tokens=*" %%g in ('dir /b /a:d "!spackinstdir!\Python*"') do (
|
||||||
|
set "python_ver=%%g")) 2> NUL
|
||||||
|
|
||||||
|
if not defined python_ver (
|
||||||
|
echo Python was not found on your system.
|
||||||
|
echo Please install Python or add Python to your PATH.
|
||||||
|
) else (
|
||||||
|
set "py_path=!spackinstdir!\!python_ver!"
|
||||||
|
set "py_exe=!py_path!\python.exe"
|
||||||
|
)
|
||||||
|
goto :exitpoint
|
||||||
|
) else (
|
||||||
|
:: Python is already on the path
|
||||||
|
set "py_exe=!python_pf_ver!"
|
||||||
|
(for /F "tokens=* USEBACKQ" %%F in (
|
||||||
|
`"!py_exe!" --version`) do (set "output=%%F")) 2>NUL
|
||||||
|
if not "!output:Microsoft Store=!"=="!output!" goto :get_builtin
|
||||||
|
goto :exitpoint
|
||||||
|
)
|
||||||
|
:exitpoint
|
||||||
|
|
||||||
|
set "PATH=%SPACK_ROOT%\bin\;%PATH%"
|
||||||
|
if defined py_path (
|
||||||
|
set "PATH=%py_path%;%PATH%"
|
||||||
|
)
|
||||||
|
|
||||||
|
if defined py_exe (
|
||||||
|
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||||
|
)
|
||||||
|
|
||||||
|
set "EDITOR=notepad"
|
||||||
|
|
||||||
|
DOSKEY spacktivate=spack env activate $*
|
||||||
|
|
||||||
|
@echo **********************************************************************
|
||||||
|
@echo ** Spack Package Manager
|
||||||
|
@echo **********************************************************************
|
||||||
|
|
||||||
|
IF "%1"=="" GOTO CONTINUE
|
||||||
|
set
|
||||||
|
GOTO:EOF
|
||||||
|
|
||||||
|
:continue
|
||||||
|
set PROMPT=[spack] %PROMPT%
|
||||||
|
%comspec% /k
|
||||||
|
|||||||
@@ -9,15 +9,15 @@ bootstrap:
|
|||||||
# may not be able to bootstrap all the software that Spack needs,
|
# may not be able to bootstrap all the software that Spack needs,
|
||||||
# depending on its type.
|
# depending on its type.
|
||||||
sources:
|
sources:
|
||||||
- name: github-actions-v0.6
|
- name: 'github-actions-v0.5'
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.6
|
|
||||||
- name: github-actions-v0.5
|
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||||
- name: spack-install
|
- name: 'github-actions-v0.4'
|
||||||
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||||
|
- name: 'spack-install'
|
||||||
metadata: $spack/share/spack/bootstrap/spack-install
|
metadata: $spack/share/spack/bootstrap/spack-install
|
||||||
trusted:
|
trusted:
|
||||||
# By default we trust bootstrapping from sources and from binaries
|
# By default we trust bootstrapping from sources and from binaries
|
||||||
# produced on Github via the workflow
|
# produced on Github via the workflow
|
||||||
github-actions-v0.6: true
|
|
||||||
github-actions-v0.5: true
|
github-actions-v0.5: true
|
||||||
|
github-actions-v0.4: true
|
||||||
spack-install: true
|
spack-install: true
|
||||||
|
|||||||
@@ -39,27 +39,11 @@ concretizer:
|
|||||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||||
duplicates:
|
duplicates:
|
||||||
# "none": allows a single node for any package in the DAG.
|
# "none": allows a single node for any package in the DAG.
|
||||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
# (e.g. py-setuptools, cmake etc.)
|
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||||
os_compatible: {}
|
os_compatible: {}
|
||||||
|
|
||||||
# Option to specify whether to support splicing. Splicing allows for
|
|
||||||
# the relinking of concrete package dependencies in order to better
|
|
||||||
# reuse already built packages with ABI compatible dependencies
|
|
||||||
splice:
|
|
||||||
explicit: []
|
|
||||||
automatic: false
|
|
||||||
# Maximum time, in seconds, allowed for the 'solve' phase. If set to 0, there is no time limit.
|
|
||||||
timeout: 0
|
|
||||||
# If set to true, exceeding the timeout will always result in a concretization error. If false,
|
|
||||||
# the best (suboptimal) model computed before the timeout is used.
|
|
||||||
#
|
|
||||||
# Setting this to false yields unreproducible results, so we advise to use that value only
|
|
||||||
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
|
|
||||||
error_on_timeout: true
|
|
||||||
|
|||||||
@@ -40,9 +40,9 @@ packages:
|
|||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [gcc-runtime]
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [intel-oneapi-runtime]
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
@@ -76,8 +76,6 @@ packages:
|
|||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
egl:
|
|
||||||
buildable: false
|
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
|
|||||||
@@ -1175,17 +1175,6 @@ unspecified version, but packages can depend on other packages with
|
|||||||
could depend on ``mpich@1.2:`` if it can only build with version
|
could depend on ``mpich@1.2:`` if it can only build with version
|
||||||
``1.2`` or higher of ``mpich``.
|
``1.2`` or higher of ``mpich``.
|
||||||
|
|
||||||
.. note:: Windows Spec Syntax Caveats
|
|
||||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
|
||||||
Spack's spec dependency syntax uses the carat (``^``) character, however this is an escape string in CMD
|
|
||||||
so it must be escaped with an additional carat (i.e. ``^^``).
|
|
||||||
CMD also will attempt to interpret strings with ``=`` characters in them. Any spec including this symbol
|
|
||||||
must double quote the string.
|
|
||||||
|
|
||||||
Note: All of these issues are unique to CMD, they can be avoided by using Powershell.
|
|
||||||
|
|
||||||
For more context on these caveats see the related issues: `carat <https://github.com/spack/spack/issues/42833>`_ and `equals <https://github.com/spack/spack/issues/43348>`_
|
|
||||||
|
|
||||||
Below are more details about the specifiers that you can add to specs.
|
Below are more details about the specifiers that you can add to specs.
|
||||||
|
|
||||||
.. _version-specifier:
|
.. _version-specifier:
|
||||||
@@ -1359,10 +1348,6 @@ For example, for the ``stackstart`` variant:
|
|||||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||||
|
|
||||||
Spack also allows variants to be propagated from a package that does
|
|
||||||
not have that variant.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Compiler Flags
|
Compiler Flags
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -166,106 +166,3 @@ while `py-numpy` still needs an older version:
|
|||||||
|
|
||||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||||
default behavior is ``duplicates:strategy:minimal``.
|
default behavior is ``duplicates:strategy:minimal``.
|
||||||
|
|
||||||
--------
|
|
||||||
Splicing
|
|
||||||
--------
|
|
||||||
|
|
||||||
The ``splice`` key covers config attributes for splicing specs in the solver.
|
|
||||||
|
|
||||||
"Splicing" is a method for replacing a dependency with another spec
|
|
||||||
that provides the same package or virtual. There are two types of
|
|
||||||
splices, referring to different behaviors for shared dependencies
|
|
||||||
between the root spec and the new spec replacing a dependency:
|
|
||||||
"transitive" and "intransitive". A "transitive" splice is one that
|
|
||||||
resolves all conflicts by taking the dependency from the new node. An
|
|
||||||
"intransitive" splice is one that resolves all conflicts by taking the
|
|
||||||
dependency from the original root. From a theory perspective, hybrid
|
|
||||||
splices are possible but are not modeled by Spack.
|
|
||||||
|
|
||||||
All spliced specs retain a ``build_spec`` attribute that points to the
|
|
||||||
original Spec before any splice occurred. The ``build_spec`` for a
|
|
||||||
non-spliced spec is itself.
|
|
||||||
|
|
||||||
The figure below shows examples of transitive and intransitive splices:
|
|
||||||
|
|
||||||
.. figure:: images/splices.png
|
|
||||||
:align: center
|
|
||||||
|
|
||||||
The concretizer can be configured to explicitly splice particular
|
|
||||||
replacements for a target spec. Splicing will allow the user to make
|
|
||||||
use of generically built public binary caches, while swapping in
|
|
||||||
highly optimized local builds for performance critical components
|
|
||||||
and/or components that interact closely with the specific hardware
|
|
||||||
details of the system. The most prominent candidate for splicing is
|
|
||||||
MPI providers. MPI packages have relatively well-understood ABI
|
|
||||||
characteristics, and most High Performance Computing facilities deploy
|
|
||||||
highly optimized MPI packages tailored to their particular
|
|
||||||
hardware. The following config block configures Spack to replace
|
|
||||||
whatever MPI provider each spec was concretized to use with the
|
|
||||||
particular package of ``mpich`` with the hash that begins ``abcdef``.
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
concretizer:
|
|
||||||
splice:
|
|
||||||
explicit:
|
|
||||||
- target: mpi
|
|
||||||
replacement: mpich/abcdef
|
|
||||||
transitive: false
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
When configuring an explicit splice, you as the user take on the
|
|
||||||
responsibility for ensuring ABI compatibility between the specs
|
|
||||||
matched by the target and the replacement you provide. If they are
|
|
||||||
not compatible, Spack will not warn you and your application will
|
|
||||||
fail to run.
|
|
||||||
|
|
||||||
The ``target`` field of an explicit splice can be any abstract
|
|
||||||
spec. The ``replacement`` field must be a spec that includes the hash
|
|
||||||
of a concrete spec, and the replacement must either be the same
|
|
||||||
package as the target, provide the virtual that is the target, or
|
|
||||||
provide a virtual that the target provides. The ``transitive`` field
|
|
||||||
is optional -- by default, splices will be transitive.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
With explicit splices configured, it is possible for Spack to
|
|
||||||
concretize to a spec that does not satisfy the input. For example,
|
|
||||||
with the config above ``hdf5 ^mvapich2`` will concretize to user
|
|
||||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
|
||||||
will warn the user in this case, but will not fail the
|
|
||||||
concretization.
|
|
||||||
|
|
||||||
.. _automatic_splicing:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
Automatic Splicing
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
The Spack solver can be configured to do automatic splicing for
|
|
||||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
|
||||||
config section
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
concretizer:
|
|
||||||
splice:
|
|
||||||
automatic: True
|
|
||||||
|
|
||||||
Packages can include ABI-compatibility information using the
|
|
||||||
``can_splice`` directive. See :ref:`the packaging
|
|
||||||
guide<abi_compatibility>` for instructions on specifying ABI
|
|
||||||
compatibility using the ``can_splice`` directive.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The ``can_splice`` directive is experimental and may be changed in
|
|
||||||
future versions.
|
|
||||||
|
|
||||||
When automatic splicing is enabled, the concretizer will combine any
|
|
||||||
number of ABI-compatible specs if possible to reuse installed packages
|
|
||||||
and packages available from binary caches. The end result of these
|
|
||||||
specs is equivalent to a series of transitive/intransitive splices,
|
|
||||||
but the series may be non-obvious.
|
|
||||||
|
|||||||
@@ -130,19 +130,14 @@ before or after a particular phase. For example, in ``perl``, we see:
|
|||||||
|
|
||||||
@run_after("install")
|
@run_after("install")
|
||||||
def install_cpanm(self):
|
def install_cpanm(self):
|
||||||
spec = self.spec
|
spec = self.spec
|
||||||
maker = make
|
|
||||||
cpan_dir = join_path("cpanm", "cpanm")
|
if spec.satisfies("+cpanm"):
|
||||||
if sys.platform == "win32":
|
with working_dir(join_path("cpanm", "cpanm")):
|
||||||
maker = nmake
|
perl = spec["perl"].command
|
||||||
cpan_dir = join_path(self.stage.source_path, cpan_dir)
|
perl("Makefile.PL")
|
||||||
cpan_dir = windows_sfn(cpan_dir)
|
make()
|
||||||
if "+cpanm" in spec:
|
make("install")
|
||||||
with working_dir(cpan_dir):
|
|
||||||
perl = spec["perl"].command
|
|
||||||
perl("Makefile.PL")
|
|
||||||
maker()
|
|
||||||
maker("install")
|
|
||||||
|
|
||||||
This extra step automatically installs ``cpanm`` in addition to the
|
This extra step automatically installs ``cpanm`` in addition to the
|
||||||
base Perl installation.
|
base Perl installation.
|
||||||
@@ -181,14 +176,8 @@ In the ``perl`` package, we can see:
|
|||||||
|
|
||||||
@run_after("build")
|
@run_after("build")
|
||||||
@on_package_attributes(run_tests=True)
|
@on_package_attributes(run_tests=True)
|
||||||
def build_test(self):
|
def test(self):
|
||||||
if sys.platform == "win32":
|
make("test")
|
||||||
win32_dir = os.path.join(self.stage.source_path, "win32")
|
|
||||||
win32_dir = windows_sfn(win32_dir)
|
|
||||||
with working_dir(win32_dir):
|
|
||||||
nmake("test", ignore_quotes=True)
|
|
||||||
else:
|
|
||||||
make("test")
|
|
||||||
|
|
||||||
As you can guess, this runs ``make test`` *after* building the package,
|
As you can guess, this runs ``make test`` *after* building the package,
|
||||||
if and only if testing is requested. Again, this is not specific to
|
if and only if testing is requested. Again, this is not specific to
|
||||||
|
|||||||
@@ -49,14 +49,14 @@ following phases:
|
|||||||
#. ``install`` - install the package
|
#. ``install`` - install the package
|
||||||
|
|
||||||
Package developers often add unit tests that can be invoked with
|
Package developers often add unit tests that can be invoked with
|
||||||
``scons test`` or ``scons check``. Spack provides a ``build_test`` method
|
``scons test`` or ``scons check``. Spack provides a ``test`` method
|
||||||
to handle this. Since we don't know which one the package developer
|
to handle this. Since we don't know which one the package developer
|
||||||
chose, the ``build_test`` method does nothing by default, but can be easily
|
chose, the ``test`` method does nothing by default, but can be easily
|
||||||
overridden like so:
|
overridden like so:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def build_test(self):
|
def test(self):
|
||||||
scons("check")
|
scons("check")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. chain:
|
.. chain:
|
||||||
|
|
||||||
=============================================
|
============================
|
||||||
Chaining Spack Installations (upstreams.yaml)
|
Chaining Spack Installations
|
||||||
=============================================
|
============================
|
||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
|
|||||||
@@ -210,20 +210,15 @@ def setup(sphinx):
|
|||||||
# Spack classes that are private and we don't want to expose
|
# Spack classes that are private and we don't want to expose
|
||||||
("py:class", "spack.provider_index._IndexBase"),
|
("py:class", "spack.provider_index._IndexBase"),
|
||||||
("py:class", "spack.repo._PrependFileLoader"),
|
("py:class", "spack.repo._PrependFileLoader"),
|
||||||
("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
|
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||||
# Spack classes that intersphinx is unable to resolve
|
# Spack classes that intersphinx is unable to resolve
|
||||||
("py:class", "spack.version.StandardVersion"),
|
("py:class", "spack.version.StandardVersion"),
|
||||||
("py:class", "spack.spec.DependencySpec"),
|
("py:class", "spack.spec.DependencySpec"),
|
||||||
("py:class", "spack.spec.ArchSpec"),
|
|
||||||
("py:class", "spack.spec.InstallStatus"),
|
("py:class", "spack.spec.InstallStatus"),
|
||||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||||
("py:class", "spack.install_test.Pb"),
|
("py:class", "spack.install_test.Pb"),
|
||||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
|
||||||
("py:class", "spack.compiler.CompilerCache"),
|
|
||||||
# TypeVar that is not handled correctly
|
|
||||||
("py:class", "llnl.util.lang.T"),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
|||||||
@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
|
|||||||
higher-precedence scopes first. So, settings in a higher-precedence file
|
higher-precedence scopes first. So, settings in a higher-precedence file
|
||||||
can override those with the same key in a lower-precedence one. For
|
can override those with the same key in a lower-precedence one. For
|
||||||
list-valued settings, Spack *prepends* higher-precedence settings to
|
list-valued settings, Spack *prepends* higher-precedence settings to
|
||||||
lower-precedence settings. Completely ignoring lower-precedence configuration
|
lower-precedence settings. Completely ignoring higher-level configuration
|
||||||
options is supported with the ``::`` notation for keys (see
|
options is supported with the ``::`` notation for keys (see
|
||||||
:ref:`config-overrides` below).
|
:ref:`config-overrides` below).
|
||||||
|
|
||||||
@@ -511,7 +511,6 @@ Spack understands over a dozen special variables. These are:
|
|||||||
* ``$target_family``. The target family for the current host, as
|
* ``$target_family``. The target family for the current host, as
|
||||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||||
* ``$date``: the current date in the format YYYY-MM-DD
|
* ``$date``: the current date in the format YYYY-MM-DD
|
||||||
* ``$spack_short_version``: the Spack version truncated to the first components.
|
|
||||||
|
|
||||||
|
|
||||||
Note that, as with shell variables, you can write these as ``$varname``
|
Note that, as with shell variables, you can write these as ``$varname``
|
||||||
|
|||||||
@@ -184,7 +184,7 @@ Style Tests
|
|||||||
|
|
||||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||||
a series of style guides for Python that provide suggestions for everything
|
a series of style guides for Python that provide suggestions for everything
|
||||||
from variable naming to indentation. In order to limit the number of PRs that
|
from variable naming to indentation. In order to limit the number of PRs that
|
||||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||||
@@ -316,215 +316,6 @@ documentation tests to make sure there are no errors. Documentation changes can
|
|||||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||||
to ask when you submit your PR.
|
to ask when you submit your PR.
|
||||||
|
|
||||||
.. _spack-builders-and-pipelines:
|
|
||||||
|
|
||||||
^^^^^^^^^
|
|
||||||
GitLab CI
|
|
||||||
^^^^^^^^^
|
|
||||||
|
|
||||||
""""""""""""""""""
|
|
||||||
Build Cache Stacks
|
|
||||||
""""""""""""""""""
|
|
||||||
|
|
||||||
Spack welcomes the contribution of software stacks of interest to the community. These
|
|
||||||
stacks are used to test package recipes and generate publicly available build caches.
|
|
||||||
Spack uses GitLab CI for managing the orchestration of build jobs.
|
|
||||||
|
|
||||||
GitLab Entry Point
|
|
||||||
~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Add stack entrypoint to the ``share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml``. There
|
|
||||||
are two stages required for each new stack, the generation stage and the build stage.
|
|
||||||
|
|
||||||
The generate stage is defined using the job template ``.generate`` configured with
|
|
||||||
environment variables defining the name of the stack in ``SPACK_CI_STACK_NAME`` and the
|
|
||||||
platform (``SPACK_TARGET_PLATFORM``) and architecture (``SPACK_TARGET_ARCH``) configuration,
|
|
||||||
and the tags associated with the class of runners to build on.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The ``SPACK_CI_STACK_NAME`` must match the name of the directory containing the
|
|
||||||
stacks ``spack.yaml``.
|
|
||||||
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The platform and architecture variables are specified in order to select the
|
|
||||||
correct configurations from the generic configurations used in Spack CI. The
|
|
||||||
configurations currently available are:
|
|
||||||
|
|
||||||
* ``.cray_rhel_zen4``
|
|
||||||
* ``.cray_sles_zen4``
|
|
||||||
* ``.darwin_aarch64``
|
|
||||||
* ``.darwin_x86_64``
|
|
||||||
* ``.linux_aarch64``
|
|
||||||
* ``.linux_icelake``
|
|
||||||
* ``.linux_neoverse_n1``
|
|
||||||
* ``.linux_neoverse_v1``
|
|
||||||
* ``.linux_neoverse_v2``
|
|
||||||
* ``.linux_power``
|
|
||||||
* ``.linux_skylake``
|
|
||||||
* ``.linux_x86_64``
|
|
||||||
* ``.linux_x86_64_v4``
|
|
||||||
|
|
||||||
New configurations can be added to accommodate new platforms and architectures.
|
|
||||||
|
|
||||||
|
|
||||||
The build stage is defined as a trigger job that consumes the GitLab CI pipeline generated in
|
|
||||||
the generate stage for this stack. Build stage jobs use the ``.build`` job template which
|
|
||||||
handles the basic configuration.
|
|
||||||
|
|
||||||
An example entry point for a new stack called ``my-super-cool-stack``
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
.my-super-cool-stack:
|
|
||||||
extends: [ ".linux_x86_64_v3" ]
|
|
||||||
variables:
|
|
||||||
SPACK_CI_STACK_NAME: my-super-cool-stack
|
|
||||||
tags: [ "all", "tags", "your", "job", "needs"]
|
|
||||||
|
|
||||||
my-super-cool-stack-generate:
|
|
||||||
extends: [ ".generate", ".my-super-cool-stack" ]
|
|
||||||
image: my-super-cool-stack-image:0.0.1
|
|
||||||
|
|
||||||
my-super-cool-stack-build:
|
|
||||||
extends: [ ".build", ".my-super-cool-stack" ]
|
|
||||||
trigger:
|
|
||||||
include:
|
|
||||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
|
||||||
job: my-super-cool-stack-generate
|
|
||||||
strategy: depend
|
|
||||||
needs:
|
|
||||||
- artifacts: True
|
|
||||||
job: my-super-cool-stack-generate
|
|
||||||
|
|
||||||
|
|
||||||
Stack Configuration
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The stack configuration is a spack environment file with two additional sections added.
|
|
||||||
Stack configurations should be located in ``share/spack/gitlab/cloud_pipelines/stacks/<stack_name>/spack.yaml``.
|
|
||||||
|
|
||||||
The ``ci`` section is generally used to define stack specific mappings such as image or tags.
|
|
||||||
For more information on what can go into the ``ci`` section refer to the docs on pipelines.
|
|
||||||
|
|
||||||
The ``cdash`` section is used for defining where to upload the results of builds. Spack configures
|
|
||||||
most of the details for posting pipeline results to
|
|
||||||
`cdash.spack.io <https://cdash.spack.io/index.php?project=Spack+Testing>`_. The only
|
|
||||||
requirement in the stack configuration is to define a ``build-group`` that is unique,
|
|
||||||
this is usually the long name of the stack.
|
|
||||||
|
|
||||||
An example stack that builds ``zlib``.
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
view: false
|
|
||||||
packages:
|
|
||||||
all:
|
|
||||||
require: ["%gcc", "target=x86_64_v3"]
|
|
||||||
specs:
|
|
||||||
- zlib
|
|
||||||
|
|
||||||
ci:
|
|
||||||
pipeline-gen
|
|
||||||
- build-job:
|
|
||||||
image: my-super-cool-stack-image:0.0.1
|
|
||||||
|
|
||||||
cdash:
|
|
||||||
build-group: My Super Cool Stack
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The ``image`` used in the ``*-generate`` job must match exactly the ``image`` used in the ``build-job``.
|
|
||||||
When the images do not match the build job may fail.
|
|
||||||
|
|
||||||
|
|
||||||
"""""""""""""""""""
|
|
||||||
Registering Runners
|
|
||||||
"""""""""""""""""""
|
|
||||||
|
|
||||||
Contributing computational resources to Spack's CI build farm is one way to help expand the
|
|
||||||
capabilities and offerings of the public Spack build caches. Currently, Spack utilizes linux runners
|
|
||||||
from AWS, Google, and the University of Oregon (UO).
|
|
||||||
|
|
||||||
Runners require three key peices:
|
|
||||||
* Runner Registration Token
|
|
||||||
* Accurate tags
|
|
||||||
* OIDC Authentication script
|
|
||||||
* GPG keys
|
|
||||||
|
|
||||||
|
|
||||||
Minimum GitLab Runner Version: ``16.1.0``
|
|
||||||
`Intallation instructions <https://docs.gitlab.com/runner/install/>`_
|
|
||||||
|
|
||||||
Registration Token
|
|
||||||
~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The first step to contribute new runners is to open an issue in the `spack infrastructure <https://github.com/spack/spack-infrastructure/issues/new?assignees=&labels=runner-registration&projects=&template=runner_registration.yml>`_
|
|
||||||
project. This will be reported to the spack infrastructure team who will guide users through the process
|
|
||||||
of registering new runners for Spack CI.
|
|
||||||
|
|
||||||
The information needed to register a runner is the motivation for the new resources, a semi-detailed description of
|
|
||||||
the runner, and finallly the point of contact for maintaining the software on the runner.
|
|
||||||
|
|
||||||
The point of contact will then work with the infrastruture team to obtain runner registration token(s) for interacting with
|
|
||||||
with Spack's GitLab instance. Once the runner is active, this point of contact will also be responsible for updating the
|
|
||||||
GitLab runner software to keep pace with Spack's Gitlab.
|
|
||||||
|
|
||||||
Tagging
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
In the initial stages of runner registration it is important to **exclude** the special tag ``spack``. This will prevent
|
|
||||||
the new runner(s) from being picked up for production CI jobs while it is configured and evaluated. Once it is determined
|
|
||||||
that the runner is ready for production use the ``spack`` tag will be added.
|
|
||||||
|
|
||||||
Because gitlab has no concept of tag exclustion, runners that provide specialized resource also require specialized tags.
|
|
||||||
For example, a basic CPU only x86_64 runner may have a tag ``x86_64`` associated with it. However, a runner containing an
|
|
||||||
CUDA capable GPU may have the tag ``x86_64-cuda`` to denote that it should only be used for packages that will benefit from
|
|
||||||
a CUDA capable resource.
|
|
||||||
|
|
||||||
OIDC
|
|
||||||
~~~~
|
|
||||||
|
|
||||||
Spack runners use OIDC authentication for connecting to the appropriate AWS bucket
|
|
||||||
which is used for coordinating the communication of binaries between build jobs. In
|
|
||||||
order to configure OIDC authentication, Spack CI runners use a python script with minimal
|
|
||||||
dependencies. This script can be configured for runners as seen here using the ``pre_build_script``.
|
|
||||||
|
|
||||||
.. code-block:: toml
|
|
||||||
|
|
||||||
[[runners]]
|
|
||||||
pre_build_script = """
|
|
||||||
echo 'Executing Spack pre-build setup script'
|
|
||||||
|
|
||||||
for cmd in "${PY3:-}" python3 python; do
|
|
||||||
if command -v > /dev/null "$cmd"; then
|
|
||||||
export PY3="$(command -v "$cmd")"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ -z "${PY3:-}" ]; then
|
|
||||||
echo "Unable to find python3 executable"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
$PY3 -c "import urllib.request;urllib.request.urlretrieve('https://raw.githubusercontent.com/spack/spack-infrastructure/main/scripts/gitlab_runner_pre_build/pre_build.py', 'pre_build.py')"
|
|
||||||
$PY3 pre_build.py > envvars
|
|
||||||
|
|
||||||
. ./envvars
|
|
||||||
rm -f envvars
|
|
||||||
unset GITLAB_OIDC_TOKEN
|
|
||||||
"""
|
|
||||||
|
|
||||||
GPG Keys
|
|
||||||
~~~~~~~~
|
|
||||||
|
|
||||||
Runners that may be utilized for ``protected`` CI require the registration of an intermediate signing key that
|
|
||||||
can be used to sign packages. For more information on package signing read :ref:`key_architecture`.
|
|
||||||
|
|
||||||
--------
|
--------
|
||||||
Coverage
|
Coverage
|
||||||
--------
|
--------
|
||||||
|
|||||||
@@ -333,9 +333,13 @@ inserting them at different places in the spack code base. Whenever a hook
|
|||||||
type triggers by way of a function call, we find all the hooks of that type,
|
type triggers by way of a function call, we find all the hooks of that type,
|
||||||
and run them.
|
and run them.
|
||||||
|
|
||||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||||
This section will cover the basic kind of hooks, and how to write them.
|
can use hook functions. The files are automatically parsed, so if you write
|
||||||
|
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||||
|
you can then write hook functions in that file that will be automatically detected,
|
||||||
|
and run whenever your hook is called. This section will cover the basic kind
|
||||||
|
of hooks, and how to write them.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Types of Hooks
|
Types of Hooks
|
||||||
@@ -708,27 +712,27 @@ Release branches
|
|||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
There are currently two types of Spack releases: :ref:`major releases
|
There are currently two types of Spack releases: :ref:`major releases
|
||||||
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
||||||
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
||||||
diagram of how Spack release branches work::
|
diagram of how Spack release branches work::
|
||||||
|
|
||||||
o branch: develop (latest version, v0.23.0.dev0)
|
o branch: develop (latest version, v0.19.0.dev0)
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.22, tag: v0.22.1
|
| o branch: releases/v0.18, tag: v0.18.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.22.0
|
| o tag: v0.18.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
o
|
o
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.21, tag: v0.21.2
|
| o branch: releases/v0.17, tag: v0.17.2
|
||||||
o |
|
o |
|
||||||
| o tag: v0.21.1
|
| o tag: v0.17.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.21.0
|
| o tag: v0.17.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
@@ -739,8 +743,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
|||||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||||
|
|
||||||
Each Spack release series also has a corresponding branch, e.g.
|
Each Spack release series also has a corresponding branch, e.g.
|
||||||
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
||||||
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
||||||
tagged version on a release branch. Minor releases are back-ported from
|
tagged version on a release branch. Minor releases are back-ported from
|
||||||
develop onto release branches. This is typically done by cherry-picking
|
develop onto release branches. This is typically done by cherry-picking
|
||||||
bugfix commits off of ``develop``.
|
bugfix commits off of ``develop``.
|
||||||
@@ -770,40 +774,27 @@ for more details.
|
|||||||
Scheduling work for releases
|
Scheduling work for releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
We schedule work for **major releases** through `milestones
|
We schedule work for releases by creating `GitHub projects
|
||||||
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
||||||
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
several open release projects. For example, below are two releases (from
|
||||||
<https://github.com/spack/spack/labels>`_.
|
some past version of the page linked above):
|
||||||
|
|
||||||
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
.. image:: images/projects.png
|
||||||
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
|
||||||
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
|
||||||
when the release is made, and a new milestone is created for the next major release.
|
|
||||||
|
|
||||||
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
This image shows one release in progress for ``0.15.1`` and another for
|
||||||
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
``0.16.0``. Each of these releases has a project board containing issues
|
||||||
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
and pull requests. GitHub shows a status bar with completed work in
|
||||||
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
green, work in progress in purple, and work not started yet in gray, so
|
||||||
the project board.
|
it's fairly easy to see progress.
|
||||||
|
|
||||||
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
Spack's project boards are not firm commitments so we move work between
|
||||||
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
releases frequently. If we need to make a release and some tasks are not
|
||||||
release milestone, rather than delaying the release to complete them.
|
yet done, we will simply move them to the next minor or major release, rather
|
||||||
|
than delaying the release to complete them.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
For more on using GitHub project boards, see `GitHub's documentation
|
||||||
Backporting bug fixes
|
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
|
||||||
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
|
||||||
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
|
||||||
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
|
||||||
that the bug fix should be backported to.
|
|
||||||
|
|
||||||
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
|
||||||
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
|
||||||
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
|
||||||
Typically there are one or two backport pull requests open at any given time.
|
|
||||||
|
|
||||||
.. _major-releases:
|
.. _major-releases:
|
||||||
|
|
||||||
@@ -811,21 +802,25 @@ Typically there are one or two backport pull requests open at any given time.
|
|||||||
Making major releases
|
Making major releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming all required work from the milestone is completed, the steps to make the major release
|
Assuming a project board has already been created and all required work
|
||||||
are:
|
completed, the steps to make the major release are:
|
||||||
|
|
||||||
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
#. Create two new project boards:
|
||||||
release.
|
|
||||||
|
|
||||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
* One for the next major release
|
||||||
|
* One for the next point release
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to the next milestone.
|
#. Move any optional tasks that are not done to one of the new project boards.
|
||||||
|
|
||||||
|
In general, small bugfixes should go to the next point release. Major
|
||||||
|
features, refactors, and changes that could affect concretization should
|
||||||
|
go in the next major release.
|
||||||
|
|
||||||
#. Create a branch for the release, based on ``develop``:
|
#. Create a branch for the release, based on ``develop``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout -b releases/v0.23 develop
|
$ git checkout -b releases/v0.15 develop
|
||||||
|
|
||||||
For a version ``vX.Y.Z``, the branch's name should be
|
For a version ``vX.Y.Z``, the branch's name should be
|
||||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||||
@@ -861,8 +856,8 @@ are:
|
|||||||
|
|
||||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||||
For instance when you have just released ``v0.23.0``, set the version
|
For instance when you have just released ``v0.15.0``, set the version
|
||||||
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -871,52 +866,82 @@ are:
|
|||||||
#. Follow the steps in :ref:`announcing-releases`.
|
#. Follow the steps in :ref:`announcing-releases`.
|
||||||
|
|
||||||
|
|
||||||
.. _patch-releases:
|
.. _point-releases:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Making patch releases
|
Making point releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
Assuming a project board has already been created and all required work
|
||||||
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
completed, the steps to make the point release are:
|
||||||
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
|
||||||
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
|
||||||
changes are fresh in the mind of the developer.
|
|
||||||
|
|
||||||
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
#. Create a new project board for the next point release.
|
||||||
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
|
||||||
|
|
||||||
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
#. Move any optional tasks that are not done to the next project board.
|
||||||
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
|
||||||
squashed), cherry-pick each associated commit individually. Never force push to the
|
|
||||||
``backports/vX.Y.Z`` branch.
|
|
||||||
|
|
||||||
.. warning::
|
#. Check out the release branch (it should already exist).
|
||||||
|
|
||||||
Sometimes you may **still** get merge conflicts even if you have
|
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
||||||
cherry-picked all the commits in order. This generally means there
|
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
||||||
is some other intervening pull request that the one you're trying
|
|
||||||
to pick depends on. In these cases, you'll need to make a judgment
|
|
||||||
call regarding those pull requests. Consider the number of affected
|
|
||||||
files and/or the resulting differences.
|
|
||||||
|
|
||||||
1. If the changes are small, you might just cherry-pick it.
|
.. code-block:: console
|
||||||
|
|
||||||
2. If the changes are large, then you may decide that this fix is not
|
$ git checkout releases/v0.15
|
||||||
worth including in a patch release, in which case you should remove
|
|
||||||
the label from the pull request. Remember that large, manual backports
|
|
||||||
are seldom the right choice for a patch release.
|
|
||||||
|
|
||||||
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
||||||
release as follows:
|
in the project, create it. This pull request ought to be created as early as
|
||||||
|
possible when working on a release project, so that we can build the release
|
||||||
|
commits incrementally, and identify potential conflicts at an early stage.
|
||||||
|
|
||||||
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
#. Cherry-pick each pull request in the ``Done`` column of the release
|
||||||
release.
|
project board onto the ``Backports vX.Y.Z`` pull request.
|
||||||
|
|
||||||
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
This is **usually** fairly simple since we squash the commits from the
|
||||||
|
vast majority of pull requests. That means there is only one commit
|
||||||
|
per pull request to cherry-pick. For example, `this pull request
|
||||||
|
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
||||||
|
they were squashed into a single commit on merge. You can see the
|
||||||
|
commit that was created here:
|
||||||
|
|
||||||
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
.. image:: images/pr-commit.png
|
||||||
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
|
||||||
|
You can easily cherry pick it like this (assuming you already have the
|
||||||
|
release branch checked out):
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ git cherry-pick 7e46da7
|
||||||
|
|
||||||
|
For pull requests that were rebased (or not squashed), you'll need to
|
||||||
|
cherry-pick each associated commit individually.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
It is important to cherry-pick commits in the order they happened,
|
||||||
|
otherwise you can get conflicts while cherry-picking. When
|
||||||
|
cherry-picking look at the merge date,
|
||||||
|
**not** the number of the pull request or the date it was opened.
|
||||||
|
|
||||||
|
Sometimes you may **still** get merge conflicts even if you have
|
||||||
|
cherry-picked all the commits in order. This generally means there
|
||||||
|
is some other intervening pull request that the one you're trying
|
||||||
|
to pick depends on. In these cases, you'll need to make a judgment
|
||||||
|
call regarding those pull requests. Consider the number of affected
|
||||||
|
files and or the resulting differences.
|
||||||
|
|
||||||
|
1. If the dependency changes are small, you might just cherry-pick it,
|
||||||
|
too. If you do this, add the task to the release board.
|
||||||
|
|
||||||
|
2. If the changes are large, then you may decide that this fix is not
|
||||||
|
worth including in a point release, in which case you should remove
|
||||||
|
the task from the release project.
|
||||||
|
|
||||||
|
3. You can always decide to manually back-port the fix to the release
|
||||||
|
branch if neither of the above options makes sense, but this can
|
||||||
|
require a lot of work. It's seldom the right choice.
|
||||||
|
|
||||||
|
#. When all the commits from the project board are cherry-picked into
|
||||||
|
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
||||||
|
|
||||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||||
@@ -925,22 +950,20 @@ release as follows:
|
|||||||
release branch. See `the changelog from 0.14.1
|
release branch. See `the changelog from 0.14.1
|
||||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||||
|
|
||||||
#. Make sure CI passes on the **backports pull request**, including:
|
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||||
|
is needed to keep track in the release branch of all the commits that were
|
||||||
|
cherry-picked.
|
||||||
|
|
||||||
|
#. Make sure CI passes on the release branch, including:
|
||||||
|
|
||||||
* Regular unit tests
|
* Regular unit tests
|
||||||
* Build tests
|
* Build tests
|
||||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||||
|
|
||||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
If CI does not pass, you'll need to figure out why, and make changes
|
||||||
is needed to keep track in the release branch of all the commits that were
|
to the release branch until it does. You can make more commits, modify
|
||||||
cherry-picked.
|
or remove cherry-picked commits, or cherry-pick **more** from
|
||||||
|
``develop`` to make this happen.
|
||||||
#. Make sure CI passes on the last commit of the **release branch**.
|
|
||||||
|
|
||||||
#. In the rare case you need to include additional commits in the patch release after the backports
|
|
||||||
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
|
||||||
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
|
||||||
repeat the process. Avoid repeated force pushes to the release branch.
|
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -1015,31 +1038,25 @@ Updating `releases/latest`
|
|||||||
|
|
||||||
If the new release is the **highest** Spack release yet, you should
|
If the new release is the **highest** Spack release yet, you should
|
||||||
also tag it as ``releases/latest``. For example, suppose the highest
|
also tag it as ``releases/latest``. For example, suppose the highest
|
||||||
release is currently ``0.22.3``:
|
release is currently ``0.15.3``:
|
||||||
|
|
||||||
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
||||||
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
||||||
|
|
||||||
* If you are making a new release of an **older** major version of
|
* If you are making a new release of an **older** major version of
|
||||||
Spack, e.g. ``0.21.4``, then you should not tag it as
|
Spack, e.g. ``0.14.4``, then you should not tag it as
|
||||||
``releases/latest`` (as there are newer major versions).
|
``releases/latest`` (as there are newer major versions).
|
||||||
|
|
||||||
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
To tag ``releases/latest``, do this:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
||||||
|
$ git tag --force releases/latest
|
||||||
|
$ git push --force --tags
|
||||||
|
|
||||||
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
||||||
|
``releases/latest`` tag with the new one.
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ git tag --force releases/latest vX.Y.Z
|
|
||||||
$ git push --force git@github.com:spack/spack releases/latest
|
|
||||||
|
|
||||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
|
||||||
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
|
||||||
local tags.
|
|
||||||
|
|
||||||
|
|
||||||
.. _announcing-releases:
|
.. _announcing-releases:
|
||||||
|
|||||||
@@ -5,56 +5,49 @@
|
|||||||
|
|
||||||
.. _environments:
|
.. _environments:
|
||||||
|
|
||||||
=====================================
|
=========================
|
||||||
Environments (spack.yaml, spack.lock)
|
Environments (spack.yaml)
|
||||||
=====================================
|
=========================
|
||||||
|
|
||||||
An environment is used to group a set of specs intended for some purpose
|
An environment is used to group together a set of specs for the
|
||||||
to be built, rebuilt, and deployed in a coherent fashion. Environments
|
purpose of building, rebuilding and deploying in a coherent fashion.
|
||||||
define aspects of the installation of the software, such as:
|
Environments provide a number of advantages over the *à la carte*
|
||||||
|
approach of building and loading individual Spack modules:
|
||||||
|
|
||||||
#. *which* specs to install;
|
#. Environments separate the steps of (a) choosing what to
|
||||||
#. *how* those specs are configured; and
|
install, (b) concretizing, and (c) installing. This allows
|
||||||
#. *where* the concretized software will be installed.
|
Environments to remain stable and repeatable, even if Spack packages
|
||||||
|
are upgraded: specs are only re-concretized when the user
|
||||||
Aggregating this information into an environment for processing has advantages
|
explicitly asks for it. It is even possible to reliably
|
||||||
over the *à la carte* approach of building and loading individual Spack modules.
|
transport environments between different computers running
|
||||||
|
different versions of Spack!
|
||||||
With environments, you concretize, install, or load (activate) all of the
|
#. Environments allow several specs to be built at once; a more robust
|
||||||
specs with a single command. Concretization fully configures the specs
|
solution than ad-hoc scripts making multiple calls to ``spack
|
||||||
and dependencies of the environment in preparation for installing the
|
install``.
|
||||||
software. This is a more robust solution than ad-hoc installation scripts.
|
#. An Environment that is built as a whole can be loaded as a whole
|
||||||
And you can share an environment or even re-use it on a different computer.
|
into the user environment. An Environment can be built to maintain
|
||||||
|
a filesystem view of its packages, and the environment can load
|
||||||
Environment definitions, especially *how* specs are configured, allow the
|
that view into the user environment at activation time. Spack can
|
||||||
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized.
|
also generate a script to load all modules related to an
|
||||||
|
environment.
|
||||||
Defining *where* specs are installed supports a filesystem view of the
|
|
||||||
environment. Yet Spack maintains a single installation of the software that
|
|
||||||
can be re-used across multiple environments.
|
|
||||||
|
|
||||||
Activating an environment determines *when* all of the associated (and
|
|
||||||
installed) specs are loaded so limits the software loaded to those specs
|
|
||||||
actually needed by the environment. Spack can even generate a script to
|
|
||||||
load all modules related to an environment.
|
|
||||||
|
|
||||||
Other packaging systems also provide environments that are similar in
|
Other packaging systems also provide environments that are similar in
|
||||||
some ways to Spack environments; for example, `Conda environments
|
some ways to Spack environments; for example, `Conda environments
|
||||||
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
|
||||||
`Python Virtual Environments
|
`Python Virtual Environments
|
||||||
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
|
||||||
provide some distinctive features though:
|
provide some distinctive features:
|
||||||
|
|
||||||
#. A spec installed "in" an environment is no different from the same
|
#. A spec installed "in" an environment is no different from the same
|
||||||
spec installed anywhere else in Spack.
|
spec installed anywhere else in Spack. Environments are assembled
|
||||||
#. Spack environments may contain more than one spec of the same
|
simply by collecting together a set of specs.
|
||||||
|
#. Spack Environments may contain more than one spec of the same
|
||||||
package.
|
package.
|
||||||
|
|
||||||
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
Spack uses a "manifest and lock" model similar to `Bundler gemfiles
|
||||||
<https://bundler.io/man/gemfile.5.html>`_ and other package managers.
|
<https://bundler.io/man/gemfile.5.html>`_ and other package
|
||||||
The environment's user input file (or manifest), is named ``spack.yaml``.
|
managers. The user input file is named ``spack.yaml`` and the lock
|
||||||
The lock file, which contains the fully configured and concretized specs,
|
file is named ``spack.lock``
|
||||||
is named ``spack.lock``.
|
|
||||||
|
|
||||||
.. _environments-using:
|
.. _environments-using:
|
||||||
|
|
||||||
@@ -75,60 +68,55 @@ An environment is created by:
|
|||||||
|
|
||||||
$ spack env create myenv
|
$ spack env create myenv
|
||||||
|
|
||||||
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created
|
Spack then creates the directory ``var/spack/environments/myenv``.
|
||||||
to manage the environment.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
All managed environments by default are stored in the
|
All managed environments by default are stored in the ``var/spack/environments`` folder.
|
||||||
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed
|
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
|
||||||
by setting the ``environments_root`` variable in ``config.yaml``.
|
|
||||||
|
|
||||||
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and
|
In the ``var/spack/environments/myenv`` directory, Spack creates the
|
||||||
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User
|
file ``spack.yaml`` and the hidden directory ``.spack-env``.
|
||||||
interaction occurs through the ``spack.yaml`` file and the Spack commands
|
|
||||||
that affect it. Metadata and, by default, the view are stored in the
|
Spack stores metadata in the ``.spack-env`` directory. User
|
||||||
``.spack-env`` directory. When the environment is concretized, Spack creates
|
interaction will occur through the ``spack.yaml`` file and the Spack
|
||||||
the ``spack.lock`` file with the fully configured specs and dependencies for
|
commands that affect it. When the environment is concretized, Spack
|
||||||
|
will create a file ``spack.lock`` with the concrete information for
|
||||||
the environment.
|
the environment.
|
||||||
|
|
||||||
The ``.spack-env`` subdirectory also contains:
|
In addition to being the default location for the view associated with
|
||||||
|
an Environment, the ``.spack-env`` directory also contains:
|
||||||
|
|
||||||
* ``repo/``: A subdirectory acting as the repo consisting of the Spack
|
* ``repo/``: A repo consisting of the Spack packages used in this
|
||||||
packages used in the environment. It allows the environment to build
|
environment. This allows the environment to build the same, in
|
||||||
the same, in theory, even on different versions of Spack with different
|
theory, even on different versions of Spack with different
|
||||||
packages!
|
packages!
|
||||||
* ``logs/``: A subdirectory containing the build logs for the packages
|
* ``logs/``: A directory containing the build logs for the packages
|
||||||
in this environment.
|
in this Environment.
|
||||||
|
|
||||||
Spack Environments can also be created from either the user input, or
|
Spack Environments can also be created from either a manifest file
|
||||||
manifest, file or the lockfile. Create an environment from a manifest using:
|
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||||
|
To create an Environment from a manifest:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create myenv spack.yaml
|
$ spack env create myenv spack.yaml
|
||||||
|
|
||||||
The resulting environment is guaranteed to have the same root specs as
|
To create an Environment from a ``spack.lock`` lockfile:
|
||||||
the original but may concretize differently in the presence of different
|
|
||||||
explicit or default configuration settings (e.g., a different version of
|
|
||||||
Spack or for a different user account).
|
|
||||||
|
|
||||||
Create an environment from a ``spack.lock`` file using:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create myenv spack.lock
|
$ spack env create myenv spack.lock
|
||||||
|
|
||||||
The resulting environment, when on the same or a compatible machine, is
|
Either of these commands can also take a full path to the
|
||||||
guaranteed to initially have the same concrete specs as the original.
|
initialization file.
|
||||||
|
|
||||||
.. note::
|
A Spack Environment created from a ``spack.yaml`` manifest is
|
||||||
|
guaranteed to have the same root specs as the original Environment,
|
||||||
Environment creation also accepts a full path to the file.
|
but may concretize differently. A Spack Environment created from a
|
||||||
|
``spack.lock`` lockfile is guaranteed to have the same concrete specs
|
||||||
If the path is not under the ``$SPACK_ROOT/var/spack/environments``
|
as the original Environment. Either may obviously then differ as the
|
||||||
directory then the source is referred to as an
|
user modifies it.
|
||||||
:ref:`independent environment <independent_environments>`.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Activating an Environment
|
Activating an Environment
|
||||||
@@ -141,7 +129,7 @@ To activate an environment, use the following command:
|
|||||||
$ spack env activate myenv
|
$ spack env activate myenv
|
||||||
|
|
||||||
By default, the ``spack env activate`` will load the view associated
|
By default, the ``spack env activate`` will load the view associated
|
||||||
with the environment into the user environment. The ``-v,
|
with the Environment into the user environment. The ``-v,
|
||||||
--with-view`` argument ensures this behavior, and the ``-V,
|
--with-view`` argument ensures this behavior, and the ``-V,
|
||||||
--without-view`` argument activates the environment without changing
|
--without-view`` argument activates the environment without changing
|
||||||
the user environment variables.
|
the user environment variables.
|
||||||
@@ -154,11 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment, if it is
|
The ``activate`` command can also be used to create a new environment if it does not already
|
||||||
not already defined, by adding the ``--create`` flag. Managed and independent
|
exist.
|
||||||
environments can both be created using the same flags that `spack env create`
|
|
||||||
accepts. If an environment already exists then spack will simply activate it
|
|
||||||
and ignore the create-specific flags.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -183,50 +168,49 @@ or the shortcut alias
|
|||||||
If the environment was activated with its view, deactivating the
|
If the environment was activated with its view, deactivating the
|
||||||
environment will remove the view from the user environment.
|
environment will remove the view from the user environment.
|
||||||
|
|
||||||
.. _independent_environments:
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Anonymous Environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
Apart from managed environments, Spack also supports anonymous environments.
|
||||||
Independent Environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Independent environments can be located in any directory outside of Spack.
|
Anonymous environments can be placed in any directory of choice.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||||
that are still used in a managed environment. This is not the case for independent
|
that are still used in a managed environment. This is not the case for anonymous
|
||||||
environments.
|
environments.
|
||||||
|
|
||||||
To create an independent environment, use one of the following commands:
|
To create an anonymous environment, use one of the following commands:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create --dir my_env
|
$ spack env create --dir my_env
|
||||||
$ spack env create ./my_env
|
$ spack env create ./my_env
|
||||||
|
|
||||||
As a shorthand, you can also create an independent environment upon activation if it does not
|
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||||
already exist:
|
already exist:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create ./my_env
|
$ spack env activate --create ./my_env
|
||||||
|
|
||||||
For convenience, Spack can also place an independent environment in a temporary directory for you:
|
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --temp
|
$ spack env activate --temp
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment-Aware Commands
|
Environment Sensitive Commands
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack commands are environment-aware. For example, the ``find``
|
Spack commands are environment sensitive. For example, the ``find``
|
||||||
command shows only the specs in the active environment if an
|
command shows only the specs in the active Environment if an
|
||||||
environment has been activated. Otherwise it shows all specs in
|
Environment has been activated. Similarly, the ``install`` and
|
||||||
the Spack instance. The same rule applies to the ``install`` and
|
``uninstall`` commands act on the active environment.
|
||||||
``uninstall`` commands.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -271,33 +255,32 @@ the Spack instance. The same rule applies to the ``install`` and
|
|||||||
|
|
||||||
|
|
||||||
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
Note that when we installed the abstract spec ``zlib@1.2.8``, it was
|
||||||
presented as a root of the environment. All explicitly installed
|
presented as a root of the Environment. All explicitly installed
|
||||||
packages will be listed as roots of the environment.
|
packages will be listed as roots of the Environment.
|
||||||
|
|
||||||
All of the Spack commands that act on the list of installed specs are
|
All of the Spack commands that act on the list of installed specs are
|
||||||
environment-aware in this way, including ``install``,
|
Environment-sensitive in this way, including ``install``,
|
||||||
``uninstall``, ``find``, ``extensions``, etcetera. In the
|
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||||
:ref:`environment-configuration` section we will discuss
|
:ref:`environment-configuration` section we will discuss
|
||||||
environment-aware commands further.
|
Environment-sensitive commands further.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Adding Abstract Specs
|
Adding Abstract Specs
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
An abstract spec is the user-specified spec before Spack applies
|
An abstract spec is the user-specified spec before Spack has applied
|
||||||
defaults or dependency information.
|
any defaults or dependency information.
|
||||||
|
|
||||||
Users can add abstract specs to an environment using the ``spack add``
|
Users can add abstract specs to an Environment using the ``spack add``
|
||||||
command. The most important component of an environment is a list of
|
command. The most important component of an Environment is a list of
|
||||||
abstract specs.
|
abstract specs.
|
||||||
|
|
||||||
Adding a spec adds it as a root spec of the environment in the user
|
Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
|
||||||
input file (``spack.yaml``). It does not affect the concrete specs
|
used to define the roots of the Environment, but does not affect the
|
||||||
in the lock file (``spack.lock``) and it does not install the spec.
|
concrete specs in the lockfile, nor does it install the spec.
|
||||||
|
|
||||||
The ``spack add`` command is environment-aware. It adds the spec to the
|
The ``spack add`` command is environment aware. It adds to the
|
||||||
currently active environment. An error is generated if there isn't an
|
currently active environment. All environment aware commands can also
|
||||||
active environment. All environment-aware commands can also
|
|
||||||
be called using the ``spack -e`` flag to specify the environment.
|
be called using the ``spack -e`` flag to specify the environment.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -317,11 +300,11 @@ or
|
|||||||
Concretizing
|
Concretizing
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
|
|
||||||
Once user specs have been added to an environment, they can be concretized.
|
Once some user specs have been added to an environment, they can be concretized.
|
||||||
There are three different modes of operation to concretize an environment,
|
There are at the moment three different modes of operation to concretize an environment,
|
||||||
explained in detail in :ref:`environments_concretization_config`.
|
which are explained in details in :ref:`environments_concretization_config`.
|
||||||
Regardless of which mode of operation is chosen, the following
|
Regardless of which mode of operation has been chosen, the following
|
||||||
command will ensure all of the root specs are concretized according to the
|
command will ensure all the root specs are concretized according to the
|
||||||
constraints that are prescribed in the configuration:
|
constraints that are prescribed in the configuration:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -330,15 +313,16 @@ constraints that are prescribed in the configuration:
|
|||||||
|
|
||||||
In the case of specs that are not concretized together, the command
|
In the case of specs that are not concretized together, the command
|
||||||
above will concretize only the specs that were added and not yet
|
above will concretize only the specs that were added and not yet
|
||||||
concretized. Forcing a re-concretization of all of the specs can be done
|
concretized. Forcing a re-concretization of all the specs can be done
|
||||||
by adding the ``-f`` option:
|
instead with this command:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
[myenv]$ spack concretize -f
|
[myenv]$ spack concretize -f
|
||||||
|
|
||||||
Without the option, Spack guarantees that already concretized specs are
|
When the ``-f`` flag is not used to reconcretize all specs, Spack
|
||||||
unchanged in the environment.
|
guarantees that already concretized specs are unchanged in the
|
||||||
|
environment.
|
||||||
|
|
||||||
The ``concretize`` command does not install any packages. For packages
|
The ``concretize`` command does not install any packages. For packages
|
||||||
that have already been installed outside of the environment, the
|
that have already been installed outside of the environment, the
|
||||||
@@ -371,16 +355,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
|
|||||||
Installing an Environment
|
Installing an Environment
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
In addition to adding individual specs to an environment, one
|
In addition to installing individual specs into an Environment, one
|
||||||
can install the entire environment at once using the command
|
can install the entire Environment at once using the command
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
[myenv]$ spack install
|
[myenv]$ spack install
|
||||||
|
|
||||||
If the environment has been concretized, Spack will install the
|
If the Environment has been concretized, Spack will install the
|
||||||
concretized specs. Otherwise, ``spack install`` will concretize
|
concretized specs. Otherwise, ``spack install`` will first concretize
|
||||||
the environment before installing the concretized specs.
|
the Environment and then install the concretized specs.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -401,17 +385,17 @@ the environment before installing the concretized specs.
|
|||||||
|
|
||||||
|
|
||||||
As it installs, ``spack install`` creates symbolic links in the
|
As it installs, ``spack install`` creates symbolic links in the
|
||||||
``logs/`` directory in the environment, allowing for easy inspection
|
``logs/`` directory in the Environment, allowing for easy inspection
|
||||||
of build logs related to that environment. The ``spack install``
|
of build logs related to that environment. The ``spack install``
|
||||||
command also stores a Spack repo containing the ``package.py`` file
|
command also stores a Spack repo containing the ``package.py`` file
|
||||||
used at install time for each package in the ``repos/`` directory in
|
used at install time for each package in the ``repos/`` directory in
|
||||||
the environment.
|
the Environment.
|
||||||
|
|
||||||
The ``--no-add`` option can be used in a concrete environment to tell
|
The ``--no-add`` option can be used in a concrete environment to tell
|
||||||
spack to install specs already present in the environment but not to
|
spack to install specs already present in the environment but not to
|
||||||
add any new root specs to the environment. For root specs provided
|
add any new root specs to the environment. For root specs provided
|
||||||
to ``spack install`` on the command line, ``--no-add`` is the default,
|
to ``spack install`` on the command line, ``--no-add`` is the default,
|
||||||
while for dependency specs, it is optional. In other
|
while for dependency specs on the other hand, it is optional. In other
|
||||||
words, if there is an unambiguous match in the active concrete environment
|
words, if there is an unambiguous match in the active concrete environment
|
||||||
for a root spec provided to ``spack install`` on the command line, spack
|
for a root spec provided to ``spack install`` on the command line, spack
|
||||||
does not require you to specify the ``--no-add`` option to prevent the spec
|
does not require you to specify the ``--no-add`` option to prevent the spec
|
||||||
@@ -425,22 +409,12 @@ Developing Packages in a Spack Environment
|
|||||||
|
|
||||||
The ``spack develop`` command allows one to develop Spack packages in
|
The ``spack develop`` command allows one to develop Spack packages in
|
||||||
an environment. It requires a spec containing a concrete version, and
|
an environment. It requires a spec containing a concrete version, and
|
||||||
will configure Spack to install the package from local source.
|
will configure Spack to install the package from local source. By
|
||||||
If a version is not provided from the command line interface then spack
|
default, it will also clone the package to a subdirectory in the
|
||||||
will automatically pick the highest version the package has defined.
|
environment. This package will have a special variant ``dev_path``
|
||||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
|
||||||
preferred in this selection process.
|
|
||||||
By default, ``spack develop`` will also clone the package to a subdirectory in the
|
|
||||||
environment for the local source. This package will have a special variant ``dev_path``
|
|
||||||
set, and Spack will ensure the package and its dependents are rebuilt
|
set, and Spack will ensure the package and its dependents are rebuilt
|
||||||
any time the environment is installed if the package's local source
|
any time the environment is installed if the package's local source
|
||||||
code has been modified. Spack's native implementation to check for modifications
|
code has been modified. Spack ensures that all instances of a
|
||||||
is to check if ``mtime`` is newer than the installation.
|
|
||||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
|
||||||
in your package class. This is particularly useful for projects using custom spack repo's
|
|
||||||
to drive development and want to optimize performance.
|
|
||||||
|
|
||||||
Spack ensures that all instances of a
|
|
||||||
developed package in the environment are concretized to match the
|
developed package in the environment are concretized to match the
|
||||||
version (and other constraints) passed as the spec argument to the
|
version (and other constraints) passed as the spec argument to the
|
||||||
``spack develop`` command.
|
``spack develop`` command.
|
||||||
@@ -450,7 +424,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
|
|||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
the ``main`` branch of the package, and ``spack install`` will install from
|
the ``main`` branch of the package, and ``spack install`` will install from
|
||||||
that git clone if ``foo`` is in the environment.
|
that git clone if ``foo`` is in the environment.
|
||||||
Further development on ``foo`` can be tested by re-installing the environment,
|
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||||
and eventually committed and pushed to the upstream git repo.
|
and eventually committed and pushed to the upstream git repo.
|
||||||
|
|
||||||
If the package being developed supports out-of-source builds then users can use the
|
If the package being developed supports out-of-source builds then users can use the
|
||||||
@@ -635,7 +609,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
|
|||||||
Inline configurations
|
Inline configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Inline environment-scope configuration is done using the same yaml
|
Inline Environment-scope configuration is done using the same yaml
|
||||||
format as standard Spack configuration scopes, covered in the
|
format as standard Spack configuration scopes, covered in the
|
||||||
:ref:`configuration` section. Each section is contained under a
|
:ref:`configuration` section. Each section is contained under a
|
||||||
top-level yaml object with it's name. For example, a ``spack.yaml``
|
top-level yaml object with it's name. For example, a ``spack.yaml``
|
||||||
@@ -660,7 +634,7 @@ Included configurations
|
|||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml
|
Spack environments allow an ``include`` heading in their yaml
|
||||||
schema. This heading pulls in external configuration files and applies
|
schema. This heading pulls in external configuration files and applies
|
||||||
them to the environment.
|
them to the Environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -673,9 +647,6 @@ them to the environment.
|
|||||||
Environments can include files or URLs. File paths can be relative or
|
Environments can include files or URLs. File paths can be relative or
|
||||||
absolute. URLs include the path to the text for individual files or
|
absolute. URLs include the path to the text for individual files or
|
||||||
can be the path to a directory containing configuration files.
|
can be the path to a directory containing configuration files.
|
||||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
|
||||||
schemes). Spack-specific, environment and user path variables may be
|
|
||||||
used in these paths. See :ref:`config-file-variables` for more information.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
@@ -690,7 +661,7 @@ have higher precedence, as the included configs are applied in reverse order.
|
|||||||
Manually Editing the Specs List
|
Manually Editing the Specs List
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
The list of abstract/root specs in the environment is maintained in
|
The list of abstract/root specs in the Environment is maintained in
|
||||||
the ``spack.yaml`` manifest under the heading ``specs``.
|
the ``spack.yaml`` manifest under the heading ``specs``.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
@@ -798,7 +769,7 @@ evaluates to the cross-product of those specs. Spec matrices also
|
|||||||
contain an ``excludes`` directive, which eliminates certain
|
contain an ``excludes`` directive, which eliminates certain
|
||||||
combinations from the evaluated result.
|
combinations from the evaluated result.
|
||||||
|
|
||||||
The following two environment manifests are identical:
|
The following two Environment manifests are identical:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -873,7 +844,7 @@ files are identical.
|
|||||||
In short files like the example, it may be easier to simply list the
|
In short files like the example, it may be easier to simply list the
|
||||||
included specs. However for more complicated examples involving many
|
included specs. However for more complicated examples involving many
|
||||||
packages across many toolchains, separately factored lists make
|
packages across many toolchains, separately factored lists make
|
||||||
environments substantially more manageable.
|
Environments substantially more manageable.
|
||||||
|
|
||||||
Additionally, the ``-l`` option to the ``spack add`` command allows
|
Additionally, the ``-l`` option to the ``spack add`` command allows
|
||||||
one to add to named lists in the definitions section of the manifest
|
one to add to named lists in the definitions section of the manifest
|
||||||
@@ -922,8 +893,9 @@ The valid variables for a ``when`` clause are:
|
|||||||
|
|
||||||
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
||||||
|
|
||||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
#. ``hostname``. The hostname of the system.
|
||||||
executable in the user's PATH).
|
|
||||||
|
#. ``full_hostname``. The fully qualified hostname of the system.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
SpecLists as Constraints
|
SpecLists as Constraints
|
||||||
@@ -1042,7 +1014,7 @@ file snippet we define a view named ``mpis``, rooted at
|
|||||||
``/path/to/view`` in which all projections use the package name,
|
``/path/to/view`` in which all projections use the package name,
|
||||||
version, and compiler name to determine the path for a given
|
version, and compiler name to determine the path for a given
|
||||||
package. This view selects all packages that depend on MPI, and
|
package. This view selects all packages that depend on MPI, and
|
||||||
excludes those built with the GCC compiler at version 18.5.
|
excludes those built with the PGI compiler at version 18.5.
|
||||||
The root specs with their (transitive) link and run type dependencies
|
The root specs with their (transitive) link and run type dependencies
|
||||||
will be put in the view due to the ``link: all`` option,
|
will be put in the view due to the ``link: all`` option,
|
||||||
and the files in the view will be symlinks to the spack install
|
and the files in the view will be symlinks to the spack install
|
||||||
@@ -1056,7 +1028,7 @@ directories.
|
|||||||
mpis:
|
mpis:
|
||||||
root: /path/to/view
|
root: /path/to/view
|
||||||
select: [^mpi]
|
select: [^mpi]
|
||||||
exclude: ['%gcc@18.5']
|
exclude: ['%pgi@18.5']
|
||||||
projections:
|
projections:
|
||||||
all: '{name}/{version}-{compiler.name}'
|
all: '{name}/{version}-{compiler.name}'
|
||||||
link: all
|
link: all
|
||||||
@@ -1089,7 +1061,7 @@ true``). The argument ``--without-view`` can be used to create an
|
|||||||
environment without any view configured.
|
environment without any view configured.
|
||||||
|
|
||||||
The ``spack env view`` command can be used to change the manage views
|
The ``spack env view`` command can be used to change the manage views
|
||||||
of an environment. The subcommand ``spack env view enable`` will add a
|
of an Environment. The subcommand ``spack env view enable`` will add a
|
||||||
view named ``default`` to an environment. It takes an optional
|
view named ``default`` to an environment. It takes an optional
|
||||||
argument to specify the path for the new default view. The subcommand
|
argument to specify the path for the new default view. The subcommand
|
||||||
``spack env view disable`` will remove the view named ``default`` from
|
``spack env view disable`` will remove the view named ``default`` from
|
||||||
@@ -1257,7 +1229,7 @@ gets installed and is available for use in the ``env`` target.
|
|||||||
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
$(SPACK) -e . env depfile -o $@ --make-prefix spack
|
||||||
|
|
||||||
env: spack/env
|
env: spack/env
|
||||||
$(info environment installed!)
|
$(info Environment installed!)
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf spack.lock env.mk spack/
|
rm -rf spack.lock env.mk spack/
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
apt update
|
apt update
|
||||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||||
|
|
||||||
.. tab-item:: RHEL
|
.. tab-item:: RHEL
|
||||||
|
|
||||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
|
|
||||||
dnf install epel-release
|
dnf install epel-release
|
||||||
dnf group install "Development Tools"
|
dnf group install "Development Tools"
|
||||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||||
|
|
||||||
.. tab-item:: macOS Brew
|
.. tab-item:: macOS Brew
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
brew update
|
brew update
|
||||||
brew install gcc git zip
|
brew install curl gcc git gnupg zip
|
||||||
|
|
||||||
------------
|
------------
|
||||||
Installation
|
Installation
|
||||||
@@ -61,15 +61,10 @@ Getting Spack is easy. You can clone it from the `github repository
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||||
|
|
||||||
This will create a directory called ``spack``.
|
This will create a directory called ``spack``.
|
||||||
|
|
||||||
.. note::
|
|
||||||
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
|
||||||
|
|
||||||
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
|
||||||
|
|
||||||
.. _shell-support:
|
.. _shell-support:
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
@@ -283,6 +278,10 @@ compilers`` or ``spack compiler list``:
|
|||||||
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
|
||||||
-- clang -------------------------------------------------------
|
-- clang -------------------------------------------------------
|
||||||
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
clang@3.4 clang@3.3 clang@3.2 clang@3.1
|
||||||
|
-- pgi ---------------------------------------------------------
|
||||||
|
pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
|
||||||
|
pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
|
||||||
|
pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
|
||||||
|
|
||||||
Any of these compilers can be used to build Spack packages. More on
|
Any of these compilers can be used to build Spack packages. More on
|
||||||
how this is done is in :ref:`sec-specs`.
|
how this is done is in :ref:`sec-specs`.
|
||||||
@@ -802,6 +801,65 @@ flags to the ``icc`` command:
|
|||||||
spec: intel@15.0.24.4.9.3
|
spec: intel@15.0.24.4.9.3
|
||||||
|
|
||||||
|
|
||||||
|
^^^
|
||||||
|
PGI
|
||||||
|
^^^
|
||||||
|
|
||||||
|
PGI comes with two sets of compilers for C++ and Fortran,
|
||||||
|
distinguishable by their names. "Old" compilers:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||||
|
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
|
||||||
|
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
|
||||||
|
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
|
||||||
|
|
||||||
|
"New" compilers:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
|
||||||
|
cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
|
||||||
|
f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||||
|
fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
|
||||||
|
|
||||||
|
Older installations of PGI contains just the old compilers; whereas
|
||||||
|
newer installations contain the old and the new. The new compiler is
|
||||||
|
considered preferable, as some packages
|
||||||
|
(``hdf``) will not build with the old compiler.
|
||||||
|
|
||||||
|
When auto-detecting a PGI compiler, there are cases where Spack will
|
||||||
|
find the old compilers, when you really want it to find the new
|
||||||
|
compilers. It is best to check this ``compilers.yaml``; and if the old
|
||||||
|
compilers are being used, change ``pgf77`` and ``pgf90`` to
|
||||||
|
``pgfortran``.
|
||||||
|
|
||||||
|
Other issues:
|
||||||
|
|
||||||
|
* There are reports that some packages will not build with PGI,
|
||||||
|
including ``libpciaccess`` and ``openssl``. A workaround is to
|
||||||
|
build these packages with another compiler and then use them as
|
||||||
|
dependencies for PGI-build packages. For example:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install openmpi%pgi ^libpciaccess%gcc
|
||||||
|
|
||||||
|
|
||||||
|
* PGI requires a license to use; see :ref:`licensed-compilers` for more
|
||||||
|
information on installation.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
It is believed the problem with HDF 4 is that everything is
|
||||||
|
compiled with the ``F77`` compiler, but at some point some Fortran
|
||||||
|
90 code slipped in there. So compilers that can handle both FORTRAN
|
||||||
|
77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
|
||||||
|
compilers specific to one or the other (``pgf77``, ``pgf90``) won't
|
||||||
|
work.
|
||||||
|
|
||||||
|
|
||||||
^^^
|
^^^
|
||||||
NAG
|
NAG
|
||||||
^^^
|
^^^
|
||||||
@@ -1326,7 +1384,6 @@ Required:
|
|||||||
* Microsoft Visual Studio
|
* Microsoft Visual Studio
|
||||||
* Python
|
* Python
|
||||||
* Git
|
* Git
|
||||||
* 7z
|
|
||||||
|
|
||||||
Optional:
|
Optional:
|
||||||
* Intel Fortran (needed for some packages)
|
* Intel Fortran (needed for some packages)
|
||||||
@@ -1392,13 +1449,6 @@ as the project providing Git support on Windows. This is additionally the recomm
|
|||||||
for installing Git on Windows, a link to which can be found above. Spack requires the
|
for installing Git on Windows, a link to which can be found above. Spack requires the
|
||||||
utilities vendored by this project.
|
utilities vendored by this project.
|
||||||
|
|
||||||
"""
|
|
||||||
7zip
|
|
||||||
"""
|
|
||||||
|
|
||||||
A tool for extracting ``.xz`` files is required for extracting source tarballs. The latest 7zip
|
|
||||||
can be located at https://sourceforge.net/projects/sevenzip/.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Step 2: Install and setup Spack
|
Step 2: Install and setup Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -1425,14 +1475,16 @@ in a Windows CMD prompt.
|
|||||||
Step 3: Run and configure Spack
|
Step 3: Run and configure Spack
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
On Windows, Spack supports both primary native shells, Powershell and the traditional command prompt.
|
To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
|
||||||
To use Spack, pick your favorite shell, and run ``bin\spack_cmd.bat`` or ``share/spack/setup-env.ps1``
|
directory. This will provide a Windows command prompt with an environment properly set up with Spack
|
||||||
(you may need to Run as Administrator) from the top-level spack
|
and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
|
||||||
directory. This will provide a Spack enabled shell. If you receive a warning message that Python is not in your ``PATH``
|
|
||||||
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
(which may happen if you installed Python from the website and not the Windows Store) add the location
|
||||||
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
|
||||||
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
by using the ``Edit the system environment variables`` utility in Windows Control Panel.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Alternatively, Powershell can be used in place of CMD
|
||||||
|
|
||||||
To configure Spack, first run the following command inside the Spack console:
|
To configure Spack, first run the following command inside the Spack console:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -1497,7 +1549,7 @@ and not tabs, so ensure that this is the case when editing one directly.
|
|||||||
|
|
||||||
.. note:: Cygwin
|
.. note:: Cygwin
|
||||||
The use of Cygwin is not officially supported by Spack and is not tested.
|
The use of Cygwin is not officially supported by Spack and is not tested.
|
||||||
However Spack will not prevent this, so use if choosing to use Spack
|
However Spack will not throw an error, so use if choosing to use Spack
|
||||||
with Cygwin, know that no functionality is garunteed.
|
with Cygwin, know that no functionality is garunteed.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
@@ -1511,12 +1563,21 @@ Spack console via:
|
|||||||
|
|
||||||
spack install cpuinfo
|
spack install cpuinfo
|
||||||
|
|
||||||
If in the previous step, you did not have CMake or Ninja installed, running the command above should install both packages
|
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
|
||||||
|
|
||||||
.. note:: Spec Syntax Caveats
|
"""""""""""""""""""""""""""
|
||||||
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
|
Windows Compatible Packages
|
||||||
See the Spack spec syntax doc for more information
|
"""""""""""""""""""""""""""
|
||||||
|
|
||||||
|
Not all spack packages currently have Windows support. Some are inherently incompatible with the
|
||||||
|
platform, and others simply have yet to be ported. To view the current set of packages with Windows
|
||||||
|
support, the list command should be used via `spack list -t windows`. If there's a package you'd like
|
||||||
|
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
|
||||||
|
the port yourself.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
This is by no means a comprehensive list, some packages may have ports that were not tagged
|
||||||
|
while others may just work out of the box on Windows and have not been tagged as such.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
For developers
|
For developers
|
||||||
@@ -1526,3 +1587,6 @@ The intent is to provide a Windows installer that will automatically set up
|
|||||||
Python, Git, and Spack, instead of requiring the user to do so manually.
|
Python, Git, and Spack, instead of requiring the user to do so manually.
|
||||||
Instructions for creating the installer are at
|
Instructions for creating the installer are at
|
||||||
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
|
||||||
|
|
||||||
|
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
|
||||||
|
available at each run of the CI on develop or any PR.
|
||||||
|
|||||||
BIN
lib/spack/docs/images/pr-commit.png
Normal file
BIN
lib/spack/docs/images/pr-commit.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
BIN
lib/spack/docs/images/projects.png
Normal file
BIN
lib/spack/docs/images/projects.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 68 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 358 KiB |
@@ -12,6 +12,10 @@
|
|||||||
Spack
|
Spack
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
.. epigraph::
|
||||||
|
|
||||||
|
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||||
|
|
||||||
Spack is a package management tool designed to support multiple
|
Spack is a package management tool designed to support multiple
|
||||||
versions and configurations of software on a wide variety of platforms
|
versions and configurations of software on a wide variety of platforms
|
||||||
and environments. It was designed for large supercomputing centers,
|
and environments. It was designed for large supercomputing centers,
|
||||||
@@ -35,15 +39,10 @@ package:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
|
$ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
|
||||||
$ cd spack/bin
|
$ cd spack/bin
|
||||||
$ ./spack install libelf
|
$ ./spack install libelf
|
||||||
|
|
||||||
.. note::
|
|
||||||
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
|
|
||||||
|
|
||||||
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
|
|
||||||
|
|
||||||
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
If you're new to spack and want to start using it, see :doc:`getting_started`,
|
||||||
or refer to the full manual below.
|
or refer to the full manual below.
|
||||||
|
|
||||||
|
|||||||
@@ -457,11 +457,11 @@ For instance, the following config options,
|
|||||||
tcl:
|
tcl:
|
||||||
all:
|
all:
|
||||||
suffixes:
|
suffixes:
|
||||||
^python@3: 'python{^python.version}'
|
^python@3.12: 'python-3.12'
|
||||||
^openblas: 'openblas'
|
^openblas: 'openblas'
|
||||||
|
|
||||||
will add a ``python-3.12.1`` version string to any packages compiled with
|
will add a ``python-3.12`` version string to any packages compiled with
|
||||||
Python matching the spec, ``python@3``. This is useful to know which
|
Python matching the spec, ``python@3.12``. This is useful to know which
|
||||||
version of Python a set of Python extensions is associated with. Likewise, the
|
version of Python a set of Python extensions is associated with. Likewise, the
|
||||||
``openblas`` string is attached to any program that has openblas in the spec,
|
``openblas`` string is attached to any program that has openblas in the spec,
|
||||||
most likely via the ``+blas`` variant specification.
|
most likely via the ``+blas`` variant specification.
|
||||||
|
|||||||
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
|||||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||||
large repositories that have separate portions that can be built independently.
|
large repositories that have separate portions that can be built independently.
|
||||||
If paths provided are directories then all the subdirectories and associated files
|
If paths provided are directories then all the subdirectories and associated files
|
||||||
will also be cloned.
|
will also be cloned.
|
||||||
|
|
||||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||||
|
|
||||||
@@ -1367,8 +1367,8 @@ Submodules
|
|||||||
git-submodule``.
|
git-submodule``.
|
||||||
|
|
||||||
Sparse-Checkout
|
Sparse-Checkout
|
||||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||||
This feature allows you to only clone what you need from a large repository.
|
This feature allows you to only clone what you need from a large repository.
|
||||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||||
@@ -1928,29 +1928,71 @@ to the empty list.
|
|||||||
String. A URL pointing to license setup instructions for the software.
|
String. A URL pointing to license setup instructions for the software.
|
||||||
Defaults to the empty string.
|
Defaults to the empty string.
|
||||||
|
|
||||||
For example, let's take a look at the Arm Forge package.
|
For example, let's take a look at the package for the PGI compilers.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
# Licensing
|
# Licensing
|
||||||
license_required = True
|
license_required = True
|
||||||
license_comment = "#"
|
license_comment = "#"
|
||||||
license_files = ["licences/Licence"]
|
license_files = ["license.dat"]
|
||||||
license_vars = [
|
license_vars = ["PGROUPD_LICENSE_FILE", "LM_LICENSE_FILE"]
|
||||||
"ALLINEA_LICENSE_DIR",
|
license_url = "http://www.pgroup.com/doc/pgiinstall.pdf"
|
||||||
"ALLINEA_LICENCE_DIR",
|
|
||||||
"ALLINEA_LICENSE_FILE",
|
|
||||||
"ALLINEA_LICENCE_FILE",
|
|
||||||
]
|
|
||||||
license_url = "https://developer.arm.com/documentation/101169/latest/Use-Arm-Licence-Server"
|
|
||||||
|
|
||||||
Arm Forge requires a license. Its license manager uses the ``#`` symbol to denote a comment.
|
As you can see, PGI requires a license. Its license manager, FlexNet, uses
|
||||||
It expects the license file to be named ``License`` and to be located in a ``licenses`` directory
|
the ``#`` symbol to denote a comment. It expects the license file to be
|
||||||
in the installation prefix.
|
named ``license.dat`` and to be located directly in the installation prefix.
|
||||||
|
If you would like the installation file to be located elsewhere, simply set
|
||||||
|
``PGROUPD_LICENSE_FILE`` or ``LM_LICENSE_FILE`` after installation. For
|
||||||
|
further instructions on installation and licensing, see the URL provided.
|
||||||
|
|
||||||
If you would like the installation file to be located elsewhere, simply set ``ALLINEA_LICENSE_DIR`` or
|
Let's walk through a sample PGI installation to see exactly what Spack is
|
||||||
one of the other license variables after installation. For further instructions on installation and
|
and isn't capable of. Since PGI does not provide a download URL, it must
|
||||||
licensing, see the URL provided.
|
be downloaded manually. It can either be added to a mirror or located in
|
||||||
|
the current directory when ``spack install pgi`` is run. See :ref:`mirrors`
|
||||||
|
for instructions on setting up a mirror.
|
||||||
|
|
||||||
|
After running ``spack install pgi``, the first thing that will happen is
|
||||||
|
Spack will create a global license file located at
|
||||||
|
``$SPACK_ROOT/etc/spack/licenses/pgi/license.dat``. It will then open up the
|
||||||
|
file using :ref:`your favorite editor <controlling-the-editor>`. It will look like
|
||||||
|
this:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
# A license is required to use pgi.
|
||||||
|
#
|
||||||
|
# The recommended solution is to store your license key in this global
|
||||||
|
# license file. After installation, the following symlink(s) will be
|
||||||
|
# added to point to this file (relative to the installation prefix):
|
||||||
|
#
|
||||||
|
# license.dat
|
||||||
|
#
|
||||||
|
# Alternatively, use one of the following environment variable(s):
|
||||||
|
#
|
||||||
|
# PGROUPD_LICENSE_FILE
|
||||||
|
# LM_LICENSE_FILE
|
||||||
|
#
|
||||||
|
# If you choose to store your license in a non-standard location, you may
|
||||||
|
# set one of these variable(s) to the full pathname to the license file, or
|
||||||
|
# port@host if you store your license keys on a dedicated license server.
|
||||||
|
# You will likely want to set this variable in a module file so that it
|
||||||
|
# gets loaded every time someone tries to use pgi.
|
||||||
|
#
|
||||||
|
# For further information on how to acquire a license, please refer to:
|
||||||
|
#
|
||||||
|
# http://www.pgroup.com/doc/pgiinstall.pdf
|
||||||
|
#
|
||||||
|
# You may enter your license below.
|
||||||
|
|
||||||
|
You can add your license directly to this file, or tell FlexNet to use a
|
||||||
|
license stored on a separate license server. Here is an example that
|
||||||
|
points to a license server called licman1:
|
||||||
|
|
||||||
|
.. code-block:: none
|
||||||
|
|
||||||
|
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
|
||||||
|
USE_SERVER
|
||||||
|
|
||||||
If your package requires the license to install, you can reference the
|
If your package requires the license to install, you can reference the
|
||||||
location of this global license using ``self.global_license_file``.
|
location of this global license using ``self.global_license_file``.
|
||||||
@@ -2350,7 +2392,7 @@ by the ``--jobs`` option:
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
:emphasize-lines: 7, 11
|
:emphasize-lines: 7, 11
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
class Xios(Package):
|
class Xios(Package):
|
||||||
...
|
...
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
@@ -2461,14 +2503,15 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
|||||||
|
|
||||||
depends_on("libelf@0.8")
|
depends_on("libelf@0.8")
|
||||||
|
|
||||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
You can also specify a requirement for a particular variant or for
|
||||||
restrictions, you can also specify variants if this package requires
|
specific compiler flags:
|
||||||
optional features of the dependency.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("libelf@0.8 +parser +pic")
|
depends_on("libelf@0.8+debug")
|
||||||
|
depends_on("libelf debug=True")
|
||||||
|
depends_on("libelf cppflags='-fPIC'")
|
||||||
|
|
||||||
Both users *and* package authors can use the same spec syntax to refer
|
Both users *and* package authors can use the same spec syntax to refer
|
||||||
to different package configurations. Users use the spec syntax on the
|
to different package configurations. Users use the spec syntax on the
|
||||||
@@ -2476,82 +2519,46 @@ command line to find installed packages or to install packages with
|
|||||||
particular constraints, and package authors can use specs to describe
|
particular constraints, and package authors can use specs to describe
|
||||||
relationships between packages.
|
relationships between packages.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Specifying backward and forward compatibility
|
Version ranges
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Packages are often compatible with a range of versions of their
|
Although some packages require a specific version for their dependencies,
|
||||||
dependencies. This is typically referred to as backward and forward
|
most can be built with a range of versions. For example, if you are
|
||||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
writing a package for a legacy Python module that only works with Python
|
||||||
directive using version ranges.
|
2.4 through 2.6, this would look like:
|
||||||
|
|
||||||
**Backwards compatibility** means that the package requires at least a
|
|
||||||
certain version of its dependency:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@3.10:")
|
depends_on("python@2.4:2.6")
|
||||||
|
|
||||||
In this case, the package requires Python 3.10 or newer.
|
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||||
|
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||||
Commonly, packages drop support for older versions of a dependency as
|
you want to specify that a package works with any version of Python 3 (or
|
||||||
they release new versions. In Spack you can conveniently add every
|
higher), this would look like:
|
||||||
backward compatibility rule as a separate line:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
# backward compatibility with Python
|
depends_on("python@3:")
|
||||||
depends_on("python@3.8:")
|
|
||||||
depends_on("python@3.9:", when="@1.2:")
|
|
||||||
depends_on("python@3.10:", when="@1.4:")
|
|
||||||
|
|
||||||
This means that in general we need Python 3.8 or newer; from version
|
Here we leave out the upper bound. If you want to say that a package
|
||||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
requires Python 2, you can similarly leave out the lower bound:
|
||||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
|
||||||
ranges in the ``when`` clauses.
|
|
||||||
|
|
||||||
**Forward compatibility** means that the package requires at most a
|
|
||||||
certain version of its dependency. Forward compatibility rules are
|
|
||||||
necessary when there are breaking changes in the dependency that the
|
|
||||||
package cannot handle. In Spack we often add forward compatibility
|
|
||||||
bounds only at the time a new, breaking version of a dependency is
|
|
||||||
released. As with backward compatibility, it is typical to see a list
|
|
||||||
of forward compatibility bounds in a package file as seperate lines:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
# forward compatibility with Python
|
depends_on("python@:2")
|
||||||
depends_on("python@:3.12", when="@:1.10")
|
|
||||||
depends_on("python@:3.13", when="@:1.12")
|
|
||||||
|
|
||||||
Notice how the ``:`` now appears before the version number both in the
|
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||||
dependency and in the ``when`` clause. This tells Spack that in general
|
``@:3`` means "up to and including any 3.x version".
|
||||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
|
||||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
|
||||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
|
||||||
compatibility with Python 3.14.
|
|
||||||
|
|
||||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
You can also simply write
|
||||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
|
||||||
bounds.
|
|
||||||
|
|
||||||
So far we have seen open-ended version ranges, which is by far the most
|
|
||||||
common use case. It is also possible to specify both a lower and an upper bound
|
|
||||||
on the version of a dependency, like this:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@3.10:3.12")
|
depends_on("python@2.7")
|
||||||
|
|
||||||
There is short syntax to specify that a package is compatible with say any
|
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||||
``3.x`` version:
|
``@2.7:2.7``.
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
depends_on("python@3")
|
|
||||||
|
|
||||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
|
||||||
Python version 3 and at most any version ``3.x.y``.
|
|
||||||
|
|
||||||
In very rare cases, you may need to specify an exact version, for example
|
In very rare cases, you may need to specify an exact version, for example
|
||||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||||
@@ -2925,9 +2932,9 @@ make sense during the build phase may not be needed at runtime, and vice versa.
|
|||||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||||
this, Spack provides four different methods that can be overridden in a package:
|
this, Spack provides four different methods that can be overridden in a package:
|
||||||
|
|
||||||
1. :meth:`setup_build_environment <spack.builder.BaseBuilder.setup_build_environment>`
|
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||||
3. :meth:`setup_dependent_build_environment <spack.builder.BaseBuilder.setup_dependent_build_environment>`
|
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||||
|
|
||||||
The Qt package, for instance, uses this call:
|
The Qt package, for instance, uses this call:
|
||||||
@@ -5378,7 +5385,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
|||||||
determine whether it needs to also set up build dependencies (see
|
determine whether it needs to also set up build dependencies (see
|
||||||
:ref:`test-build-tests`).
|
:ref:`test-build-tests`).
|
||||||
|
|
||||||
The ``MyPackage`` package below provides two basic test examples:
|
The ``MyPackage`` package below provides two basic test examples:
|
||||||
``test_example`` and ``test_example2``. The first runs the installed
|
``test_example`` and ``test_example2``. The first runs the installed
|
||||||
``example`` and ensures its output contains an expected string. The second
|
``example`` and ensures its output contains an expected string. The second
|
||||||
runs ``example2`` without checking output so is only concerned with confirming
|
runs ``example2`` without checking output so is only concerned with confirming
|
||||||
@@ -5695,7 +5702,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
|||||||
the appropriate relative paths under the test stage directory prior to
|
the appropriate relative paths under the test stage directory prior to
|
||||||
executing stand-alone tests.
|
executing stand-alone tests.
|
||||||
|
|
||||||
.. tip::
|
.. tip::
|
||||||
|
|
||||||
*Perform test-related conversions once when copying files.*
|
*Perform test-related conversions once when copying files.*
|
||||||
|
|
||||||
@@ -7071,46 +7078,6 @@ might write:
|
|||||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||||
|
|
||||||
.. _abi_compatibility:
|
|
||||||
|
|
||||||
----------------------------
|
|
||||||
Specifying ABI Compatibility
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Packages can include ABI-compatibility information using the
|
|
||||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
|
||||||
always replace version 1.0, then the package could have:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
can_splice("foo@1.0", when="@1.1")
|
|
||||||
|
|
||||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
|
||||||
other packages providing the same virtual. For example, ``zlib-ng``
|
|
||||||
could specify:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
|
||||||
|
|
||||||
Some packages have ABI-compatibility that is dependent on matching
|
|
||||||
variant values, either for all variants or for some set of
|
|
||||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
|
||||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
|
||||||
cover all single-value variants.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
|
||||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
|
||||||
|
|
||||||
The concretizer will use ABI compatibility to determine automatic
|
|
||||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
The ``can_splice`` directive is experimental, and may be replaced
|
|
||||||
by a higher-level interface in future versions of Spack.
|
|
||||||
|
|
||||||
.. _package_class_structure:
|
.. _package_class_structure:
|
||||||
|
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ Functional Example
|
|||||||
------------------
|
------------------
|
||||||
|
|
||||||
The simplest fully functional standalone example of a working pipeline can be
|
The simplest fully functional standalone example of a working pipeline can be
|
||||||
examined live at this example `project <https://gitlab.com/spack/pipeline-quickstart>`_
|
examined live at this example `project <https://gitlab.com/scott.wittenburg/spack-pipeline-demo>`_
|
||||||
on gitlab.com.
|
on gitlab.com.
|
||||||
|
|
||||||
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
Here's the ``.gitlab-ci.yml`` file from that example that builds and runs the
|
||||||
@@ -67,46 +67,39 @@ pipeline:
|
|||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
stages: [ "generate", "build" ]
|
stages: [generate, build]
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
SPACK_REPOSITORY: "https://github.com/spack/spack.git"
|
SPACK_REPO: https://github.com/scottwittenburg/spack.git
|
||||||
SPACK_REF: "develop-2024-10-06"
|
SPACK_REF: pipelines-reproducible-builds
|
||||||
SPACK_USER_CONFIG_PATH: ${CI_PROJECT_DIR}
|
|
||||||
SPACK_BACKTRACE: 1
|
|
||||||
|
|
||||||
generate-pipeline:
|
generate-pipeline:
|
||||||
tags:
|
|
||||||
- saas-linux-small-amd64
|
|
||||||
stage: generate
|
stage: generate
|
||||||
|
tags:
|
||||||
|
- docker
|
||||||
image:
|
image:
|
||||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||||
script:
|
entrypoint: [""]
|
||||||
- git clone ${SPACK_REPOSITORY}
|
before_script:
|
||||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
- git clone ${SPACK_REPO}
|
||||||
|
- pushd spack && git checkout ${SPACK_REF} && popd
|
||||||
- . "./spack/share/spack/setup-env.sh"
|
- . "./spack/share/spack/setup-env.sh"
|
||||||
- spack --version
|
script:
|
||||||
- spack env activate --without-view .
|
- spack env activate --without-view .
|
||||||
- spack -d -v --color=always
|
- spack -d ci generate
|
||||||
ci generate
|
|
||||||
--check-index-only
|
|
||||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||||
|
|
||||||
build-pipeline:
|
build-jobs:
|
||||||
stage: build
|
stage: build
|
||||||
trigger:
|
trigger:
|
||||||
include:
|
include:
|
||||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
- artifact: "jobs_scratch_dir/pipeline.yml"
|
||||||
job: generate-pipeline
|
job: generate-pipeline
|
||||||
strategy: depend
|
strategy: depend
|
||||||
needs:
|
|
||||||
- artifacts: True
|
|
||||||
job: generate-pipeline
|
|
||||||
|
|
||||||
|
|
||||||
The key thing to note above is that there are two jobs: The first job to run,
|
The key thing to note above is that there are two jobs: The first job to run,
|
||||||
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
``generate-pipeline``, runs the ``spack ci generate`` command to generate a
|
||||||
@@ -121,93 +114,82 @@ And here's the spack environment built by the pipeline represented as a
|
|||||||
spack:
|
spack:
|
||||||
view: false
|
view: false
|
||||||
concretizer:
|
concretizer:
|
||||||
unify: true
|
unify: false
|
||||||
reuse: false
|
|
||||||
|
|
||||||
definitions:
|
definitions:
|
||||||
- pkgs:
|
- pkgs:
|
||||||
- zlib
|
- zlib
|
||||||
- bzip2 ~debug
|
- bzip2
|
||||||
- compiler:
|
- arch:
|
||||||
- '%gcc'
|
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||||
|
|
||||||
specs:
|
specs:
|
||||||
- matrix:
|
- matrix:
|
||||||
- - $pkgs
|
- - $pkgs
|
||||||
- - $compiler
|
- - $arch
|
||||||
|
|
||||||
|
mirrors: { "mirror": "s3://spack-public/mirror" }
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
target: gitlab
|
enable-artifacts-buildcache: True
|
||||||
|
rebuild-index: False
|
||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- any-job:
|
- any-job:
|
||||||
tags:
|
|
||||||
- saas-linux-small-amd64
|
|
||||||
image:
|
|
||||||
name: ghcr.io/spack/ubuntu20.04-runner-x86_64:2023-01-01
|
|
||||||
before_script:
|
before_script:
|
||||||
- git clone ${SPACK_REPOSITORY}
|
- git clone ${SPACK_REPO}
|
||||||
- cd spack && git checkout ${SPACK_REF} && cd ../
|
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
|
||||||
- . "./spack/share/spack/setup-env.sh"
|
- . "./spack/share/spack/setup-env.sh"
|
||||||
- spack --version
|
- build-job:
|
||||||
- export SPACK_USER_CONFIG_PATH=${CI_PROJECT_DIR}
|
tags: [docker]
|
||||||
- spack config blame mirrors
|
image:
|
||||||
|
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
|
||||||
|
entrypoint: [""]
|
||||||
|
|
||||||
|
|
||||||
|
The elements of this file important to spack ci pipelines are described in more
|
||||||
|
detail below, but there are a couple of things to note about the above working
|
||||||
|
example:
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The use of ``reuse: false`` in spack environments used for pipelines is
|
There is no ``script`` attribute specified for here. The reason for this is
|
||||||
almost always what you want, as without it your pipelines will not rebuild
|
Spack CI will automatically generate reasonable default scripts. More
|
||||||
packages even if package hashes have changed. This is due to the concretizer
|
detail on what is in these scripts can be found below.
|
||||||
strongly preferring known hashes when ``reuse: true``.
|
|
||||||
|
|
||||||
The ``ci`` section in the above environment file contains the bare minimum
|
Also notice the ``before_script`` section. It is required when using any of the
|
||||||
configuration required for ``spack ci generate`` to create a working pipeline.
|
default scripts to source the ``setup-env.sh`` script in order to inform
|
||||||
The ``target: gitlab`` tells spack that the desired pipeline output is for
|
the default scripts where to find the ``spack`` executable.
|
||||||
gitlab. However, this isn't strictly required, as currently gitlab is the
|
|
||||||
only possible output format for pipelines. The ``pipeline-gen`` section
|
|
||||||
contains the key information needed to specify attributes for the generated
|
|
||||||
jobs. Notice that it contains a list which has only a single element in
|
|
||||||
this case. In real pipelines it will almost certainly have more elements,
|
|
||||||
and in those cases, order is important: spack starts at the bottom of the
|
|
||||||
list and works upwards when applying attributes.
|
|
||||||
|
|
||||||
But in this simple case, we use only the special key ``any-job`` to
|
Normally ``enable-artifacts-buildcache`` is not recommended in production as it
|
||||||
indicate that spack should apply the specified attributes (``tags``, ``image``,
|
results in large binary artifacts getting transferred back and forth between
|
||||||
and ``before_script``) to any job it generates. This includes jobs for
|
gitlab and the runners. But in this example on gitlab.com where there is no
|
||||||
building/pushing all packages, a ``rebuild-index`` job at the end of the
|
shared, persistent file system, and where no secrets are stored for giving
|
||||||
pipeline, as well as any ``noop`` jobs that might be needed by gitlab when
|
permission to write to an S3 bucket, ``enabled-buildcache-artifacts`` is the only
|
||||||
no rebuilds are required.
|
way to propagate binaries from jobs to their dependents.
|
||||||
|
|
||||||
Something to note is that in this simple case, we rely on spack to
|
Also, it is usually a good idea to let the pipeline generate a final "rebuild the
|
||||||
generate a reasonable script for the package build jobs (it just creates
|
buildcache index" job, so that subsequent pipeline generation can quickly determine
|
||||||
a script that invokes ``spack ci rebuild``).
|
which specs are up to date and which need to be rebuilt (it's a good idea for other
|
||||||
|
reasons as well, but those are out of scope for this discussion). In this case we
|
||||||
|
have disabled it (using ``rebuild-index: False``) because the index would only be
|
||||||
|
generated in the artifacts mirror anyway, and consequently would not be available
|
||||||
|
during subsequent pipeline runs.
|
||||||
|
|
||||||
Another thing to note is the use of the ``SPACK_USER_CONFIG_DIR`` environment
|
.. note::
|
||||||
variable in any generated jobs. The purpose of this is to make spack
|
With the addition of reproducible builds (#22887) a previously working
|
||||||
aware of one final file in the example, the one that contains the mirror
|
pipeline will require some changes:
|
||||||
configuration. This file, ``mirrors.yaml`` looks like this:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
* In the build-jobs, the environment location changed.
|
||||||
|
This will typically show as a ``KeyError`` in the failing job. Be sure to
|
||||||
|
point to ``${SPACK_CONCRETE_ENV_DIR}``.
|
||||||
|
|
||||||
mirrors:
|
* When using ``include`` in your environment, be sure to make the included
|
||||||
buildcache-destination:
|
files available in the build jobs. This means adding those files to the
|
||||||
url: oci://registry.gitlab.com/spack/pipeline-quickstart
|
artifact directory. Those files will also be missing in the reproducibility
|
||||||
binary: true
|
artifact.
|
||||||
access_pair:
|
|
||||||
id_variable: CI_REGISTRY_USER
|
|
||||||
secret_variable: CI_REGISTRY_PASSWORD
|
|
||||||
|
|
||||||
|
* Because the location of the environment changed, including files with
|
||||||
Note the name of the mirror is ``buildcache-destination``, which is required
|
relative path may have to be adapted to work both in the project context
|
||||||
as of Spack 0.23 (see below for more information). The mirror url simply
|
(generation job) and in the concrete env dir context (build job).
|
||||||
points to the container registry associated with the project, while
|
|
||||||
``id_variable`` and ``secret_variable`` refer to to environment variables
|
|
||||||
containing the access credentials for the mirror.
|
|
||||||
|
|
||||||
When spack builds packages for this example project, they will be pushed to
|
|
||||||
the project container registry, where they will be available for subsequent
|
|
||||||
jobs to install as dependencies, or for other pipelines to use to build runnable
|
|
||||||
container images.
|
|
||||||
|
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
Spack commands supporting pipelines
|
Spack commands supporting pipelines
|
||||||
@@ -435,6 +417,15 @@ configuration with a ``script`` attribute. Specifying a signing job without a sc
|
|||||||
does not create a signing job and the job configuration attributes will be ignored.
|
does not create a signing job and the job configuration attributes will be ignored.
|
||||||
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
Cleanup (cleanup)
|
||||||
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
|
||||||
|
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
|
||||||
|
script, but do expect that the spack command is in the path and require a
|
||||||
|
``before_script`` to be specified that sources the ``setup-env.sh`` script.
|
||||||
|
|
||||||
.. _noop_jobs:
|
.. _noop_jobs:
|
||||||
|
|
||||||
^^^^^^^^^^^^
|
^^^^^^^^^^^^
|
||||||
@@ -601,77 +592,6 @@ the attributes will be merged starting from the bottom match going up to the top
|
|||||||
|
|
||||||
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
In the case that no match is found in a submapping section, no additional attributes will be applied.
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Dynamic Mapping Sections
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
|
|
||||||
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
|
|
||||||
behavior, but it does follow the rest of the merge rules for configurations.
|
|
||||||
|
|
||||||
The dynamic mapping service needs to implement a single REST API interface for getting
|
|
||||||
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
|
|
||||||
|
|
||||||
example request.
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
|
|
||||||
|
|
||||||
|
|
||||||
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
|
|
||||||
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
200 OK
|
|
||||||
|
|
||||||
{
|
|
||||||
"variables":
|
|
||||||
{
|
|
||||||
"KUBERNETES_CPU_REQUEST": "500m",
|
|
||||||
"KUBERNETES_MEMORY_REQUEST": "2G",
|
|
||||||
},
|
|
||||||
"retry": { "max:": "1"}
|
|
||||||
"script+:":
|
|
||||||
[
|
|
||||||
"echo \"Job modified by my-dyn-mapping.spack.io\""
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
|
|
||||||
|
|
||||||
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
|
|
||||||
respectively. It is also possible to configure required attributes under ``required`` section.
|
|
||||||
|
|
||||||
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
|
|
||||||
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
|
|
||||||
|
|
||||||
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
|
|
||||||
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
|
|
||||||
|
|
||||||
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
|
|
||||||
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
ci:
|
|
||||||
- dynamic-mapping:
|
|
||||||
endpoint: my-dyn-mapping.spack.io/allocation
|
|
||||||
timeout: 10
|
|
||||||
verify_ssl: True
|
|
||||||
header:
|
|
||||||
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
|
|
||||||
MY_CONFIG: "fuzz_allocation:false"
|
|
||||||
allow:
|
|
||||||
- variables
|
|
||||||
ignore:
|
|
||||||
- script
|
|
||||||
require: []
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
Bootstrapping
|
Bootstrapping
|
||||||
^^^^^^^^^^^^^
|
^^^^^^^^^^^^^
|
||||||
@@ -750,6 +670,15 @@ environment/stack file, and in that case no bootstrapping will be done (only the
|
|||||||
specs will be staged for building) and the runners will be expected to already
|
specs will be staged for building) and the runners will be expected to already
|
||||||
have all needed compilers installed and configured for spack to use.
|
have all needed compilers installed and configured for spack to use.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
Pipeline Buildcache
|
||||||
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The ``enable-artifacts-buildcache`` key
|
||||||
|
takes a boolean and determines whether the pipeline uses artifacts to store and
|
||||||
|
pass along the buildcaches from one stage to the next (the default if you don't
|
||||||
|
provide this option is ``False``).
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
Broken Specs URL
|
Broken Specs URL
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.1.3
|
sphinx==7.4.7
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.21.2
|
docutils==0.20.1
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
pytest==8.3.4
|
pytest==8.3.3
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.10.0
|
black==24.8.0
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
|||||||
238
lib/spack/env/cc
vendored
238
lib/spack/env/cc
vendored
@@ -101,9 +101,10 @@ setsep() {
|
|||||||
esac
|
esac
|
||||||
}
|
}
|
||||||
|
|
||||||
# prepend LISTNAME ELEMENT
|
# prepend LISTNAME ELEMENT [SEP]
|
||||||
#
|
#
|
||||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||||
|
# assuming the list is separated by SEP.
|
||||||
# Handles empty lists and single-element lists.
|
# Handles empty lists and single-element lists.
|
||||||
prepend() {
|
prepend() {
|
||||||
varname="$1"
|
varname="$1"
|
||||||
@@ -237,36 +238,6 @@ esac
|
|||||||
}
|
}
|
||||||
"
|
"
|
||||||
|
|
||||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
|
||||||
# which are used to prioritize paths when assembling the final command line.
|
|
||||||
|
|
||||||
# init_path_lists LISTNAME
|
|
||||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
|
||||||
init_path_lists() {
|
|
||||||
eval "spack_store_$1=\"\""
|
|
||||||
eval "$1=\"\""
|
|
||||||
eval "system_$1=\"\""
|
|
||||||
}
|
|
||||||
|
|
||||||
# assign_path_lists LISTNAME1 LISTNAME2
|
|
||||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
|
||||||
assign_path_lists() {
|
|
||||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
|
||||||
eval "$1=\"\${$2}\""
|
|
||||||
eval "system_$1=\"\${system_$2}\""
|
|
||||||
}
|
|
||||||
|
|
||||||
# append_path_lists LISTNAME ELT
|
|
||||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
|
||||||
append_path_lists() {
|
|
||||||
path_order "$2"
|
|
||||||
case $? in
|
|
||||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
|
||||||
1) eval "append $1 \"\$2\"" ;;
|
|
||||||
2) eval "append system_$1 \"\$2\"" ;;
|
|
||||||
esac
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check if optional parameters are defined
|
# Check if optional parameters are defined
|
||||||
# If we aren't asking for debug flags, don't add them
|
# If we aren't asking for debug flags, don't add them
|
||||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||||
@@ -499,7 +470,12 @@ input_command="$*"
|
|||||||
parse_Wl() {
|
parse_Wl() {
|
||||||
while [ $# -ne 0 ]; do
|
while [ $# -ne 0 ]; do
|
||||||
if [ "$wl_expect_rpath" = yes ]; then
|
if [ "$wl_expect_rpath" = yes ]; then
|
||||||
append_path_lists return_rpath_dirs_list "$1"
|
path_order "$1"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
@@ -508,14 +484,24 @@ parse_Wl() {
|
|||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
fi
|
fi
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
if [ -z "$arg" ]; then
|
if [ -z "$arg" ]; then
|
||||||
shift; continue
|
shift; continue
|
||||||
fi
|
fi
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
wl_expect_rpath=yes
|
wl_expect_rpath=yes
|
||||||
@@ -523,7 +509,8 @@ parse_Wl() {
|
|||||||
"$dtags_to_strip")
|
"$dtags_to_strip")
|
||||||
;;
|
;;
|
||||||
-Wl)
|
-Wl)
|
||||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||||
|
# it.
|
||||||
return 1
|
return 1
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
@@ -542,10 +529,21 @@ categorize_arguments() {
|
|||||||
return_other_args_list=""
|
return_other_args_list=""
|
||||||
return_isystem_was_used=""
|
return_isystem_was_used=""
|
||||||
|
|
||||||
init_path_lists return_isystem_include_dirs_list
|
return_isystem_spack_store_include_dirs_list=""
|
||||||
init_path_lists return_include_dirs_list
|
return_isystem_system_include_dirs_list=""
|
||||||
init_path_lists return_lib_dirs_list
|
return_isystem_include_dirs_list=""
|
||||||
init_path_lists return_rpath_dirs_list
|
|
||||||
|
return_spack_store_include_dirs_list=""
|
||||||
|
return_system_include_dirs_list=""
|
||||||
|
return_include_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_lib_dirs_list=""
|
||||||
|
return_system_lib_dirs_list=""
|
||||||
|
return_lib_dirs_list=""
|
||||||
|
|
||||||
|
return_spack_store_rpath_dirs_list=""
|
||||||
|
return_system_rpath_dirs_list=""
|
||||||
|
return_rpath_dirs_list=""
|
||||||
|
|
||||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||||
wl_expect_rpath=no
|
wl_expect_rpath=no
|
||||||
@@ -611,17 +609,32 @@ categorize_arguments() {
|
|||||||
arg="${1#-isystem}"
|
arg="${1#-isystem}"
|
||||||
return_isystem_was_used=true
|
return_isystem_was_used=true
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||||
|
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||||
|
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-I*)
|
-I*)
|
||||||
arg="${1#-I}"
|
arg="${1#-I}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
append_path_lists return_include_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||||
|
1) append return_include_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_include_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-L*)
|
-L*)
|
||||||
arg="${1#-L}"
|
arg="${1#-L}"
|
||||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||||
append_path_lists return_lib_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||||
|
1) append return_lib_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_lib_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-l*)
|
-l*)
|
||||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||||
@@ -654,17 +667,32 @@ categorize_arguments() {
|
|||||||
break
|
break
|
||||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||||
append_path_lists return_rpath_dirs_list "$1"
|
path_order "$1"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||||
|
1) append return_rpath_dirs_list "$1" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$1" ;;
|
||||||
|
esac
|
||||||
xlinker_expect_rpath=no
|
xlinker_expect_rpath=no
|
||||||
else
|
else
|
||||||
case "$1" in
|
case "$1" in
|
||||||
-rpath=*)
|
-rpath=*)
|
||||||
arg="${1#-rpath=}"
|
arg="${1#-rpath=}"
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
--rpath=*)
|
--rpath=*)
|
||||||
arg="${1#--rpath=}"
|
arg="${1#--rpath=}"
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
path_order "$arg"
|
||||||
|
case $? in
|
||||||
|
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||||
|
1) append return_rpath_dirs_list "$arg" ;;
|
||||||
|
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||||
|
esac
|
||||||
;;
|
;;
|
||||||
-rpath|--rpath)
|
-rpath|--rpath)
|
||||||
xlinker_expect_rpath=yes
|
xlinker_expect_rpath=yes
|
||||||
@@ -681,36 +709,7 @@ categorize_arguments() {
|
|||||||
"$dtags_to_strip")
|
"$dtags_to_strip")
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
# if mode is not ld, we can just add to other args
|
append return_other_args_list "$1"
|
||||||
if [ "$mode" != "ld" ]; then
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
shift
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
# if we're in linker mode, we need to parse raw RPATH args
|
|
||||||
case "$1" in
|
|
||||||
-rpath=*)
|
|
||||||
arg="${1#-rpath=}"
|
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
|
||||||
;;
|
|
||||||
--rpath=*)
|
|
||||||
arg="${1#--rpath=}"
|
|
||||||
append_path_lists return_rpath_dirs_list "$arg"
|
|
||||||
;;
|
|
||||||
-rpath|--rpath)
|
|
||||||
if [ $# -eq 1 ]; then
|
|
||||||
# -rpath without value: let the linker raise an error.
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
shift
|
|
||||||
append_path_lists return_rpath_dirs_list "$1"
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
append return_other_args_list "$1"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
shift
|
shift
|
||||||
@@ -732,10 +731,21 @@ categorize_arguments() {
|
|||||||
|
|
||||||
categorize_arguments "$@"
|
categorize_arguments "$@"
|
||||||
|
|
||||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
assign_path_lists include_dirs_list return_include_dirs_list
|
system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
include_dirs_list="$return_include_dirs_list"
|
||||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
|
||||||
|
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
|
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
|
||||||
|
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
|
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
|
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
|
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
|
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
|
|
||||||
isystem_was_used="$return_isystem_was_used"
|
isystem_was_used="$return_isystem_was_used"
|
||||||
other_args_list="$return_other_args_list"
|
other_args_list="$return_other_args_list"
|
||||||
@@ -811,10 +821,21 @@ IFS="$lsep"
|
|||||||
categorize_arguments $spack_flags_list
|
categorize_arguments $spack_flags_list
|
||||||
unset IFS
|
unset IFS
|
||||||
|
|
||||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
|
||||||
|
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||||
|
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||||
|
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||||
|
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||||
|
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||||
|
|
||||||
|
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||||
|
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||||
|
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||||
|
|
||||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||||
spack_flags_other_args_list="$return_other_args_list"
|
spack_flags_other_args_list="$return_other_args_list"
|
||||||
@@ -873,7 +894,7 @@ esac
|
|||||||
case "$mode" in
|
case "$mode" in
|
||||||
cpp|cc|as|ccld)
|
cpp|cc|as|ccld)
|
||||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||||
else
|
else
|
||||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||||
@@ -889,63 +910,64 @@ args_list="$flags_list"
|
|||||||
|
|
||||||
# Include search paths partitioned by (in store, non-sytem, system)
|
# Include search paths partitioned by (in store, non-sytem, system)
|
||||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||||
extend args_list spack_store_include_dirs_list -I
|
extend args_list spack_store_include_dirs_list -I
|
||||||
|
|
||||||
extend args_list spack_flags_include_dirs_list -I
|
extend args_list spack_flags_include_dirs_list -I
|
||||||
extend args_list include_dirs_list -I
|
extend args_list include_dirs_list -I
|
||||||
|
|
||||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
extend args_list system_spack_flags_include_dirs_list -I
|
extend args_list spack_flags_system_include_dirs_list -I
|
||||||
extend args_list system_include_dirs_list -I
|
extend args_list system_include_dirs_list -I
|
||||||
|
|
||||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||||
|
|
||||||
# Library search paths partitioned by (in store, non-sytem, system)
|
# Library search paths partitioned by (in store, non-sytem, system)
|
||||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||||
extend args_list spack_store_lib_dirs_list "-L"
|
extend args_list spack_store_lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list spack_flags_lib_dirs_list "-L"
|
extend args_list spack_flags_lib_dirs_list "-L"
|
||||||
extend args_list lib_dirs_list "-L"
|
extend args_list lib_dirs_list "-L"
|
||||||
|
|
||||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||||
extend args_list system_lib_dirs_list "-L"
|
extend args_list system_lib_dirs_list "-L"
|
||||||
|
|
||||||
# RPATHs arguments
|
# RPATHs arguments
|
||||||
rpath_prefix=""
|
|
||||||
case "$mode" in
|
case "$mode" in
|
||||||
ccld)
|
ccld)
|
||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$linker_arg$dtags_to_add"
|
append args_list "$linker_arg$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
rpath_prefix="$rpath"
|
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
|
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||||
|
extend args_list rpath_dirs_list "$rpath"
|
||||||
|
|
||||||
|
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||||
|
extend args_list system_rpath_dirs_list "$rpath"
|
||||||
;;
|
;;
|
||||||
ld)
|
ld)
|
||||||
if [ -n "$dtags_to_add" ] ; then
|
if [ -n "$dtags_to_add" ] ; then
|
||||||
append args_list "$dtags_to_add"
|
append args_list "$dtags_to_add"
|
||||||
fi
|
fi
|
||||||
rpath_prefix="-rpath${lsep}"
|
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
|
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||||
|
|
||||||
|
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||||
|
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
|
||||||
if [ -n "$rpath_prefix" ]; then
|
|
||||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
|
||||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
|
||||||
|
|
||||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
|
||||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
|
||||||
|
|
||||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
|
||||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Other arguments from the input command
|
# Other arguments from the input command
|
||||||
extend args_list other_args_list
|
extend args_list other_args_list
|
||||||
extend args_list spack_flags_other_args_list
|
extend args_list spack_flags_other_args_list
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -81,13 +81,8 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu
|
|||||||
self.generation = generation
|
self.generation = generation
|
||||||
# Only relevant for AArch64
|
# Only relevant for AArch64
|
||||||
self.cpu_part = cpu_part
|
self.cpu_part = cpu_part
|
||||||
|
# Cache the ancestor computation
|
||||||
# Cache the "ancestor" computation
|
|
||||||
self._ancestors = None
|
self._ancestors = None
|
||||||
# Cache the "generic" computation
|
|
||||||
self._generic = None
|
|
||||||
# Cache the "family" computation
|
|
||||||
self._family = None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ancestors(self):
|
def ancestors(self):
|
||||||
@@ -120,9 +115,6 @@ def __eq__(self, other):
|
|||||||
and self.cpu_part == other.cpu_part
|
and self.cpu_part == other.cpu_part
|
||||||
)
|
)
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self.name)
|
|
||||||
|
|
||||||
@coerce_target_names
|
@coerce_target_names
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self == other
|
||||||
@@ -179,22 +171,18 @@ def __contains__(self, feature):
|
|||||||
@property
|
@property
|
||||||
def family(self):
|
def family(self):
|
||||||
"""Returns the architecture family a given target belongs to"""
|
"""Returns the architecture family a given target belongs to"""
|
||||||
if self._family is None:
|
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
msg = "a target is expected to belong to just one architecture family"
|
||||||
msg = "a target is expected to belong to just one architecture family"
|
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
assert len(roots) == 1, msg
|
||||||
assert len(roots) == 1, msg
|
|
||||||
self._family = roots.pop()
|
|
||||||
|
|
||||||
return self._family
|
return roots.pop()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generic(self):
|
def generic(self):
|
||||||
"""Returns the best generic architecture that is compatible with self"""
|
"""Returns the best generic architecture that is compatible with self"""
|
||||||
if self._generic is None:
|
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
return max(generics, key=lambda x: len(x.ancestors))
|
||||||
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
|
||||||
return self._generic
|
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Returns a dictionary representation of this object."""
|
"""Returns a dictionary representation of this object."""
|
||||||
|
|||||||
@@ -1482,6 +1482,7 @@
|
|||||||
"cldemote",
|
"cldemote",
|
||||||
"movdir64b",
|
"movdir64b",
|
||||||
"movdiri",
|
"movdiri",
|
||||||
|
"pdcm",
|
||||||
"serialize",
|
"serialize",
|
||||||
"waitpkg"
|
"waitpkg"
|
||||||
],
|
],
|
||||||
@@ -2236,84 +2237,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"zen5": {
|
|
||||||
"from": ["zen4"],
|
|
||||||
"vendor": "AuthenticAMD",
|
|
||||||
"features": [
|
|
||||||
"abm",
|
|
||||||
"aes",
|
|
||||||
"avx",
|
|
||||||
"avx2",
|
|
||||||
"avx512_bf16",
|
|
||||||
"avx512_bitalg",
|
|
||||||
"avx512bw",
|
|
||||||
"avx512cd",
|
|
||||||
"avx512dq",
|
|
||||||
"avx512f",
|
|
||||||
"avx512ifma",
|
|
||||||
"avx512vbmi",
|
|
||||||
"avx512_vbmi2",
|
|
||||||
"avx512vl",
|
|
||||||
"avx512_vnni",
|
|
||||||
"avx512_vp2intersect",
|
|
||||||
"avx512_vpopcntdq",
|
|
||||||
"avx_vnni",
|
|
||||||
"bmi1",
|
|
||||||
"bmi2",
|
|
||||||
"clflushopt",
|
|
||||||
"clwb",
|
|
||||||
"clzero",
|
|
||||||
"cppc",
|
|
||||||
"cx16",
|
|
||||||
"f16c",
|
|
||||||
"flush_l1d",
|
|
||||||
"fma",
|
|
||||||
"fsgsbase",
|
|
||||||
"gfni",
|
|
||||||
"ibrs_enhanced",
|
|
||||||
"mmx",
|
|
||||||
"movbe",
|
|
||||||
"movdir64b",
|
|
||||||
"movdiri",
|
|
||||||
"pclmulqdq",
|
|
||||||
"popcnt",
|
|
||||||
"rdseed",
|
|
||||||
"sse",
|
|
||||||
"sse2",
|
|
||||||
"sse4_1",
|
|
||||||
"sse4_2",
|
|
||||||
"sse4a",
|
|
||||||
"ssse3",
|
|
||||||
"tsc_adjust",
|
|
||||||
"vaes",
|
|
||||||
"vpclmulqdq",
|
|
||||||
"xsavec",
|
|
||||||
"xsaveopt"
|
|
||||||
],
|
|
||||||
"compilers": {
|
|
||||||
"gcc": [
|
|
||||||
{
|
|
||||||
"versions": "14.1:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"aocc": [
|
|
||||||
{
|
|
||||||
"versions": "5.0:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"clang": [
|
|
||||||
{
|
|
||||||
"versions": "19.1:",
|
|
||||||
"name": "znver5",
|
|
||||||
"flags": "-march={name} -mtune={name}"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"ppc64": {
|
"ppc64": {
|
||||||
"from": [],
|
"from": [],
|
||||||
"vendor": "generic",
|
"vendor": "generic",
|
||||||
@@ -2921,7 +2844,8 @@
|
|||||||
"asimdrdm",
|
"asimdrdm",
|
||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"asimddp"
|
"asimddp",
|
||||||
|
"ssbs"
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -3018,6 +2942,7 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3085,7 +3010,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "11:",
|
"versions": "11:",
|
||||||
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12:",
|
"versions": "12:",
|
||||||
@@ -3141,6 +3066,7 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
@@ -3253,6 +3179,7 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
|
|||||||
@@ -41,20 +41,6 @@ def comma_and(sequence: List[str]) -> str:
|
|||||||
return comma_list(sequence, "and")
|
return comma_list(sequence, "and")
|
||||||
|
|
||||||
|
|
||||||
def ordinal(number: int) -> str:
|
|
||||||
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
number: int to convert to ordinal number
|
|
||||||
|
|
||||||
Returns: number's corresponding ordinal
|
|
||||||
"""
|
|
||||||
idx = (number % 10) << 1
|
|
||||||
tens = number % 100 // 10
|
|
||||||
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
|
|
||||||
return f"{number}{suffix}"
|
|
||||||
|
|
||||||
|
|
||||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||||
return [f"{q}{e}{q}" for e in sequence]
|
return [f"{q}{e}{q}" for e in sequence]
|
||||||
|
|||||||
@@ -20,24 +20,11 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||||
Callable,
|
|
||||||
Deque,
|
|
||||||
Dict,
|
|
||||||
Generator,
|
|
||||||
Iterable,
|
|
||||||
List,
|
|
||||||
Match,
|
|
||||||
Optional,
|
|
||||||
Sequence,
|
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Union,
|
|
||||||
)
|
|
||||||
|
|
||||||
import llnl.util.symlink
|
import llnl.util.symlink
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||||
|
|
||||||
from ..path import path_to_os_path, system_path_filter
|
from ..path import path_to_os_path, system_path_filter
|
||||||
@@ -60,7 +47,6 @@
|
|||||||
"copy_mode",
|
"copy_mode",
|
||||||
"filter_file",
|
"filter_file",
|
||||||
"find",
|
"find",
|
||||||
"find_first",
|
|
||||||
"find_headers",
|
"find_headers",
|
||||||
"find_all_headers",
|
"find_all_headers",
|
||||||
"find_libraries",
|
"find_libraries",
|
||||||
@@ -98,8 +84,6 @@
|
|||||||
"visit_directory_tree",
|
"visit_directory_tree",
|
||||||
]
|
]
|
||||||
|
|
||||||
Path = Union[str, pathlib.Path]
|
|
||||||
|
|
||||||
if sys.version_info < (3, 7, 4):
|
if sys.version_info < (3, 7, 4):
|
||||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||||
# files on Lustre when using Python < 3.7.4
|
# files on Lustre when using Python < 3.7.4
|
||||||
@@ -1688,203 +1672,105 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
|||||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||||
|
|
||||||
|
|
||||||
def find(
|
def find(root, files, recursive=True):
|
||||||
root: Union[Path, Sequence[Path]],
|
"""Search for ``files`` starting from the ``root`` directory.
|
||||||
files: Union[str, Sequence[str]],
|
|
||||||
recursive: bool = True,
|
Like GNU/BSD find but written entirely in Python.
|
||||||
max_depth: Optional[int] = None,
|
|
||||||
) -> List[str]:
|
Examples:
|
||||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
|
||||||
returns a deterministic result for the same input and directory structure when run multiple
|
.. code-block:: console
|
||||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
|
||||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
$ find /usr -name python
|
||||||
symlinked directories.
|
|
||||||
|
is equivalent to:
|
||||||
|
|
||||||
|
>>> find('/usr', 'python')
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ find /usr/local/bin -maxdepth 1 -name python
|
||||||
|
|
||||||
|
is equivalent to:
|
||||||
|
|
||||||
|
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||||
|
|
||||||
Accepts any glob characters accepted by fnmatch:
|
Accepts any glob characters accepted by fnmatch:
|
||||||
|
|
||||||
========== ====================================
|
========== ====================================
|
||||||
Pattern Meaning
|
Pattern Meaning
|
||||||
========== ====================================
|
========== ====================================
|
||||||
``*`` matches one or more characters
|
``*`` matches everything
|
||||||
``?`` matches any single character
|
``?`` matches any single character
|
||||||
``[seq]`` matches any character in ``seq``
|
``[seq]`` matches any character in ``seq``
|
||||||
``[!seq]`` matches any character not in ``seq``
|
``[!seq]`` matches any character not in ``seq``
|
||||||
========== ====================================
|
========== ====================================
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
|
||||||
|
|
||||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
|
||||||
depth 2.
|
|
||||||
|
|
||||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
|
||||||
|
|
||||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
|
||||||
``/var`` at any depth.
|
|
||||||
|
|
||||||
>>> find("/usr", "GL/*.h", recursive=True)
|
|
||||||
|
|
||||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
root: One or more root directories to start searching from
|
root (str): The root directory to start searching from
|
||||||
files: One or more filename patterns to search for
|
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
recursive (bool): if False search only root folder,
|
||||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
if True descends top-down from the root. Defaults to True.
|
||||||
|
|
||||||
Returns a list of absolute, matching file paths.
|
Returns:
|
||||||
|
list: The files that have been found
|
||||||
"""
|
"""
|
||||||
if isinstance(root, (str, pathlib.Path)):
|
|
||||||
root = [root]
|
|
||||||
elif not isinstance(root, collections.abc.Sequence):
|
|
||||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
|
||||||
|
|
||||||
if isinstance(files, str):
|
if isinstance(files, str):
|
||||||
files = [files]
|
files = [files]
|
||||||
elif not isinstance(files, collections.abc.Sequence):
|
|
||||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
|
||||||
|
|
||||||
# If recursive is false, max_depth can only be None or 0
|
if recursive:
|
||||||
if max_depth and not recursive:
|
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
result = _find_recursive(root, files)
|
||||||
|
else:
|
||||||
|
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||||
|
result = _find_non_recursive(root, files)
|
||||||
|
|
||||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
tty.debug(f"Find complete: {root} {str(files)}")
|
||||||
if not recursive:
|
|
||||||
max_depth = 0
|
|
||||||
elif max_depth is None:
|
|
||||||
max_depth = sys.maxsize
|
|
||||||
result = _find_max_depth(root, files, max_depth)
|
|
||||||
tty.debug(f"Find complete: {root} {files}")
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
@system_path_filter
|
||||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
def _find_recursive(root, search_files):
|
||||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||||
|
# we want to poke the filesystem as little as possible, but still maintain
|
||||||
|
# stability in the order of the answer. Thus we are recording each library
|
||||||
|
# found in a key, and reconstructing the stable order later.
|
||||||
|
found_files = collections.defaultdict(list)
|
||||||
|
|
||||||
|
# Make the path absolute to have os.walk also return an absolute path
|
||||||
|
root = os.path.abspath(root)
|
||||||
|
for path, _, list_files in os.walk(root):
|
||||||
|
for search_file in search_files:
|
||||||
|
matches = glob.glob(os.path.join(path, search_file))
|
||||||
|
matches = [os.path.join(path, x) for x in matches]
|
||||||
|
found_files[search_file].extend(matches)
|
||||||
|
|
||||||
|
answer = []
|
||||||
|
for search_file in search_files:
|
||||||
|
answer.extend(found_files[search_file])
|
||||||
|
|
||||||
|
return answer
|
||||||
|
|
||||||
|
|
||||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
@system_path_filter
|
||||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
def _find_non_recursive(root, search_files):
|
||||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||||
return (s.st_ino, s.st_dev)
|
# can return files in any order (does not preserve stability)
|
||||||
|
found_files = collections.defaultdict(list)
|
||||||
|
|
||||||
|
# Make the path absolute to have absolute path returned
|
||||||
|
root = os.path.abspath(root)
|
||||||
|
|
||||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
for search_file in search_files:
|
||||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
matches = glob.glob(os.path.join(root, search_file))
|
||||||
unique_files: List[str] = []
|
matches = [os.path.join(root, x) for x in matches]
|
||||||
# tuple of (inode, device) for each file without following symlinks
|
found_files[search_file].extend(matches)
|
||||||
visited: Set[Tuple[int, int]] = set()
|
|
||||||
for path in paths:
|
|
||||||
try:
|
|
||||||
stat_info = os.lstat(path)
|
|
||||||
except OSError as e:
|
|
||||||
_log_file_access_issue(e, path)
|
|
||||||
continue
|
|
||||||
file_id = _file_id(stat_info)
|
|
||||||
if file_id not in visited:
|
|
||||||
unique_files.append(path)
|
|
||||||
visited.add(file_id)
|
|
||||||
return unique_files
|
|
||||||
|
|
||||||
|
answer = []
|
||||||
|
for search_file in search_files:
|
||||||
|
answer.extend(found_files[search_file])
|
||||||
|
|
||||||
def _find_max_depth(
|
return answer
|
||||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
|
||||||
) -> List[str]:
|
|
||||||
"""See ``find`` for the public API."""
|
|
||||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
|
||||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
|
||||||
# every directory we visit within max_depth.
|
|
||||||
filename_only_patterns = {
|
|
||||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
|
||||||
}
|
|
||||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
|
||||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
|
||||||
# Ordered dictionary that keeps track of what pattern found which files
|
|
||||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
|
||||||
# Ensure returned paths are always absolute
|
|
||||||
roots = [os.path.abspath(r) for r in roots]
|
|
||||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
|
||||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
|
||||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
|
||||||
visited_dirs: Set[Tuple[int, int]] = set()
|
|
||||||
|
|
||||||
for root in roots:
|
|
||||||
try:
|
|
||||||
stat_root = os.stat(root)
|
|
||||||
except OSError as e:
|
|
||||||
_log_file_access_issue(e, root)
|
|
||||||
continue
|
|
||||||
dir_id = _file_id(stat_root)
|
|
||||||
if dir_id not in visited_dirs:
|
|
||||||
dir_queue.appendleft((0, root))
|
|
||||||
visited_dirs.add(dir_id)
|
|
||||||
|
|
||||||
while dir_queue:
|
|
||||||
depth, curr_dir = dir_queue.pop()
|
|
||||||
try:
|
|
||||||
dir_iter = os.scandir(curr_dir)
|
|
||||||
except OSError as e:
|
|
||||||
_log_file_access_issue(e, curr_dir)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Use glob.glob for complex patterns.
|
|
||||||
for pattern_name, pattern in complex_patterns.items():
|
|
||||||
matched_paths[pattern_name].extend(
|
|
||||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
|
||||||
)
|
|
||||||
|
|
||||||
# List of subdirectories by path and (inode, device) tuple
|
|
||||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
|
||||||
|
|
||||||
with dir_iter:
|
|
||||||
for dir_entry in dir_iter:
|
|
||||||
|
|
||||||
# Match filename only patterns
|
|
||||||
if filename_only_patterns:
|
|
||||||
m = regex.match(os.path.normcase(dir_entry.name))
|
|
||||||
if m:
|
|
||||||
for pattern_name in filename_only_patterns:
|
|
||||||
if m.group(pattern_name):
|
|
||||||
matched_paths[pattern_name].append(dir_entry.path)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Collect subdirectories
|
|
||||||
if depth >= max_depth:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not dir_entry.is_dir(follow_symlinks=True):
|
|
||||||
continue
|
|
||||||
if sys.platform == "win32":
|
|
||||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
|
||||||
# to call os.stat
|
|
||||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
|
||||||
else:
|
|
||||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
|
||||||
except OSError as e:
|
|
||||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
|
||||||
_log_file_access_issue(e, dir_entry.path)
|
|
||||||
continue
|
|
||||||
|
|
||||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
|
||||||
|
|
||||||
# Enqueue subdirectories in a deterministic order
|
|
||||||
if subdirs:
|
|
||||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
|
||||||
for subdir, subdir_id in subdirs:
|
|
||||||
if subdir_id not in visited_dirs:
|
|
||||||
dir_queue.appendleft((depth + 1, subdir))
|
|
||||||
visited_dirs.add(subdir_id)
|
|
||||||
|
|
||||||
# Sort the matched paths for deterministic output
|
|
||||||
for paths in matched_paths.values():
|
|
||||||
paths.sort()
|
|
||||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
|
||||||
|
|
||||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
|
||||||
# multiple times
|
|
||||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
|
||||||
|
|
||||||
|
|
||||||
# Utilities for libraries and headers
|
# Utilities for libraries and headers
|
||||||
@@ -2323,9 +2209,7 @@ def find_system_libraries(libraries, shared=True):
|
|||||||
return libraries_found
|
return libraries_found
|
||||||
|
|
||||||
|
|
||||||
def find_libraries(
|
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
|
||||||
):
|
|
||||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||||
|
|
||||||
Accepts any glob characters accepted by fnmatch:
|
Accepts any glob characters accepted by fnmatch:
|
||||||
@@ -2346,8 +2230,6 @@ def find_libraries(
|
|||||||
otherwise for static. Defaults to True.
|
otherwise for static. Defaults to True.
|
||||||
recursive (bool): if False search only root folder,
|
recursive (bool): if False search only root folder,
|
||||||
if True descends top-down from the root. Defaults to False.
|
if True descends top-down from the root. Defaults to False.
|
||||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
|
||||||
if recursive is False
|
|
||||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||||
search for runtime shared libs (.DLL), otherwise, search
|
search for runtime shared libs (.DLL), otherwise, search
|
||||||
for .Lib files. If shared is false, this has no meaning.
|
for .Lib files. If shared is false, this has no meaning.
|
||||||
@@ -2356,7 +2238,6 @@ def find_libraries(
|
|||||||
Returns:
|
Returns:
|
||||||
LibraryList: The libraries that have been found
|
LibraryList: The libraries that have been found
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if isinstance(libraries, str):
|
if isinstance(libraries, str):
|
||||||
libraries = [libraries]
|
libraries = [libraries]
|
||||||
elif not isinstance(libraries, collections.abc.Sequence):
|
elif not isinstance(libraries, collections.abc.Sequence):
|
||||||
@@ -2389,10 +2270,8 @@ def find_libraries(
|
|||||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||||
|
|
||||||
if not recursive:
|
if not recursive:
|
||||||
if max_depth:
|
|
||||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
|
||||||
# If not recursive, look for the libraries directly in root
|
# If not recursive, look for the libraries directly in root
|
||||||
return LibraryList(find(root, libraries, recursive=False))
|
return LibraryList(find(root, libraries, False))
|
||||||
|
|
||||||
# To speedup the search for external packages configured e.g. in /usr,
|
# To speedup the search for external packages configured e.g. in /usr,
|
||||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||||
@@ -2410,7 +2289,7 @@ def find_libraries(
|
|||||||
if found_libs:
|
if found_libs:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
found_libs = find(root, libraries, True)
|
||||||
|
|
||||||
return LibraryList(found_libs)
|
return LibraryList(found_libs)
|
||||||
|
|
||||||
@@ -2773,6 +2652,22 @@ def prefixes(path):
|
|||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def md5sum(file):
|
||||||
|
"""Compute the MD5 sum of a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file (str): file to be checksummed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
MD5 sum of the file's content
|
||||||
|
"""
|
||||||
|
md5 = hashlib.md5()
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
md5.update(f.read())
|
||||||
|
return md5.digest()
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def remove_directory_contents(dir):
|
def remove_directory_contents(dir):
|
||||||
"""Remove all contents of a directory."""
|
"""Remove all contents of a directory."""
|
||||||
@@ -2823,25 +2718,6 @@ def temporary_dir(
|
|||||||
remove_directory_contents(tmp_dir)
|
remove_directory_contents(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
|
||||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
|
||||||
for functions or external tools that do not support in-place editing. Notice that this function
|
|
||||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
|
||||||
since we assume the call site will create a new inode at the same path."""
|
|
||||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
|
||||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
|
||||||
)
|
|
||||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
|
||||||
os.close(tmp_fd)
|
|
||||||
try:
|
|
||||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
|
||||||
yield tmp_path
|
|
||||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
|
||||||
finally:
|
|
||||||
os.unlink(tmp_path)
|
|
||||||
|
|
||||||
|
|
||||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||||
"""Create a small summary of the given file. Does not error
|
"""Create a small summary of the given file. Does not error
|
||||||
when file does not exist.
|
when file does not exist.
|
||||||
|
|||||||
@@ -5,17 +5,14 @@
|
|||||||
|
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import contextlib
|
import contextlib
|
||||||
import fnmatch
|
|
||||||
import functools
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import typing
|
|
||||||
import warnings
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
from typing import Any, Callable, Iterable, List, Tuple
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = r"^\.#|~$"
|
ignore_modules = r"^\.#|~$"
|
||||||
@@ -861,19 +858,6 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
|||||||
return line_list
|
return line_list
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
|
||||||
PatternStr = re.Pattern[str]
|
|
||||||
else:
|
|
||||||
PatternStr = typing.Pattern[str]
|
|
||||||
|
|
||||||
|
|
||||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
|
||||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
|
||||||
names, and values are filename patterns. The output is a regex that matches any of the
|
|
||||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
|
||||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def nullcontext(*args, **kwargs):
|
def nullcontext(*args, **kwargs):
|
||||||
"""Empty context manager.
|
"""Empty context manager.
|
||||||
@@ -886,12 +870,18 @@ class UnhashableArguments(TypeError):
|
|||||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
def enum(**kwargs):
|
||||||
|
"""Return an enum-like class.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
**kwargs: explicit dictionary of enums
|
||||||
|
"""
|
||||||
|
return type("Enum", (object,), kwargs)
|
||||||
|
|
||||||
|
|
||||||
def stable_partition(
|
def stable_partition(
|
||||||
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool]
|
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||||
) -> Tuple[List[T], List[T]]:
|
) -> Tuple[List[Any], List[Any]]:
|
||||||
"""Partition the input iterable according to a custom predicate.
|
"""Partition the input iterable according to a custom predicate.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -903,13 +893,12 @@ def stable_partition(
|
|||||||
Tuple of the list of elements evaluating to True, and
|
Tuple of the list of elements evaluating to True, and
|
||||||
list of elements evaluating to False.
|
list of elements evaluating to False.
|
||||||
"""
|
"""
|
||||||
true_items: List[T] = []
|
true_items, false_items = [], []
|
||||||
false_items: List[T] = []
|
|
||||||
for item in input_iterable:
|
for item in input_iterable:
|
||||||
if predicate_fn(item):
|
if predicate_fn(item):
|
||||||
true_items.append(item)
|
true_items.append(item)
|
||||||
else:
|
continue
|
||||||
false_items.append(item)
|
false_items.append(item)
|
||||||
return true_items, false_items
|
return true_items, false_items
|
||||||
|
|
||||||
|
|
||||||
@@ -921,21 +910,6 @@ def ensure_last(lst, *elements):
|
|||||||
lst.append(lst.pop(lst.index(elt)))
|
lst.append(lst.pop(lst.index(elt)))
|
||||||
|
|
||||||
|
|
||||||
class Const:
|
|
||||||
"""Class level constant, raises when trying to set the attribute"""
|
|
||||||
|
|
||||||
__slots__ = ["value"]
|
|
||||||
|
|
||||||
def __init__(self, value):
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
return self.value
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
|
||||||
|
|
||||||
|
|
||||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||||
"""Base class that behaves like a list, just with a different type.
|
"""Base class that behaves like a list, just with a different type.
|
||||||
|
|
||||||
@@ -1040,42 +1014,3 @@ def __init__(self, callback):
|
|||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance, owner):
|
||||||
return self.callback(owner)
|
return self.callback(owner)
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedProperty:
|
|
||||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
|
||||||
|
|
||||||
Derived classes must define a factory method to return an adaptor for the deprecated
|
|
||||||
property, if the descriptor is not set to error.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__slots__ = ["name"]
|
|
||||||
|
|
||||||
#: 0 - Nothing
|
|
||||||
#: 1 - Warning
|
|
||||||
#: 2 - Error
|
|
||||||
error_lvl = 0
|
|
||||||
|
|
||||||
def __init__(self, name: str) -> None:
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
|
||||||
if instance is None:
|
|
||||||
return self
|
|
||||||
|
|
||||||
if self.error_lvl == 1:
|
|
||||||
warnings.warn(
|
|
||||||
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
|
||||||
)
|
|
||||||
elif self.error_lvl == 2:
|
|
||||||
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
|
||||||
|
|
||||||
return self.factory(instance, owner)
|
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
|
||||||
raise TypeError(
|
|
||||||
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
|
||||||
)
|
|
||||||
|
|
||||||
def factory(self, instance, owner):
|
|
||||||
raise NotImplementedError("must be implemented by derived classes")
|
|
||||||
|
|||||||
@@ -263,9 +263,7 @@ def match_to_ansi(match):
|
|||||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||||
)
|
)
|
||||||
|
|
||||||
color_number = colors.get(color_code, "")
|
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||||
semi = ";" if color_number else ""
|
|
||||||
ansi_code = _escape(f"{styles[style]}{semi}{color_number}", color, enclose, zsh)
|
|
||||||
if text:
|
if text:
|
||||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -10,6 +10,7 @@
|
|||||||
import errno
|
import errno
|
||||||
import io
|
import io
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
import multiprocessing.connection
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import select
|
import select
|
||||||
@@ -18,10 +19,9 @@
|
|||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from multiprocessing.connection import Connection
|
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Callable, Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -345,6 +345,49 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
|
class MultiProcessFd:
|
||||||
|
"""Return an object which stores a file descriptor and can be passed as an
|
||||||
|
argument to a function run with ``multiprocessing.Process``, such that
|
||||||
|
the file descriptor is available in the subprocess."""
|
||||||
|
|
||||||
|
def __init__(self, fd):
|
||||||
|
self._connection = None
|
||||||
|
self._fd = None
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
self._connection = multiprocessing.connection.Connection(fd)
|
||||||
|
else:
|
||||||
|
self._fd = fd
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fd(self):
|
||||||
|
if self._connection:
|
||||||
|
return self._connection._handle
|
||||||
|
else:
|
||||||
|
return self._fd
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
if self._connection:
|
||||||
|
self._connection.close()
|
||||||
|
else:
|
||||||
|
os.close(self._fd)
|
||||||
|
|
||||||
|
|
||||||
|
def close_connection_and_file(multiprocess_fd, file):
|
||||||
|
# MultiprocessFd is intended to transmit a FD
|
||||||
|
# to a child process, this FD is then opened to a Python File object
|
||||||
|
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
||||||
|
# multiprocessing.connection.Connection; Connection closes the FD
|
||||||
|
# when it is deleted, and prints a warning about duplicate closure if
|
||||||
|
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
||||||
|
# simple FD; closing the FD here appears to conflict with
|
||||||
|
# closure of the File object (in < 3.8 that is). Therefore this needs
|
||||||
|
# to choose whether to close the File or the Connection.
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
multiprocess_fd.close()
|
||||||
|
else:
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def replace_environment(env):
|
def replace_environment(env):
|
||||||
"""Replace the current environment (`os.environ`) with `env`.
|
"""Replace the current environment (`os.environ`) with `env`.
|
||||||
@@ -502,20 +545,22 @@ def __enter__(self):
|
|||||||
# forcing debug output.
|
# forcing debug output.
|
||||||
self._saved_debug = tty._debug
|
self._saved_debug = tty._debug
|
||||||
|
|
||||||
# Pipe for redirecting output to logger
|
# OS-level pipe for redirecting output to logger
|
||||||
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
read_fd, write_fd = os.pipe()
|
||||||
|
|
||||||
# Pipe for communication back from the daemon
|
read_multiprocess_fd = MultiProcessFd(read_fd)
|
||||||
|
|
||||||
|
# Multiprocessing pipe for communication back from the daemon
|
||||||
# Currently only used to save echo value between uses
|
# Currently only used to save echo value between uses
|
||||||
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
||||||
|
|
||||||
# Sets a daemon that writes to file what it reads from a pipe
|
# Sets a daemon that writes to file what it reads from a pipe
|
||||||
try:
|
try:
|
||||||
# need to pass this b/c multiprocessing closes stdin in child.
|
# need to pass this b/c multiprocessing closes stdin in child.
|
||||||
input_fd = None
|
input_multiprocess_fd = None
|
||||||
try:
|
try:
|
||||||
if sys.stdin.isatty():
|
if sys.stdin.isatty():
|
||||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
@@ -524,9 +569,9 @@ def __enter__(self):
|
|||||||
self.process = multiprocessing.Process(
|
self.process = multiprocessing.Process(
|
||||||
target=_writer_daemon,
|
target=_writer_daemon,
|
||||||
args=(
|
args=(
|
||||||
input_fd,
|
input_multiprocess_fd,
|
||||||
read_fd,
|
read_multiprocess_fd,
|
||||||
self.write_fd,
|
write_fd,
|
||||||
self.echo,
|
self.echo,
|
||||||
self.log_file,
|
self.log_file,
|
||||||
child_pipe,
|
child_pipe,
|
||||||
@@ -537,9 +582,9 @@ def __enter__(self):
|
|||||||
self.process.start()
|
self.process.start()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_fd:
|
if input_multiprocess_fd:
|
||||||
input_fd.close()
|
input_multiprocess_fd.close()
|
||||||
read_fd.close()
|
read_multiprocess_fd.close()
|
||||||
|
|
||||||
# Flush immediately before redirecting so that anything buffered
|
# Flush immediately before redirecting so that anything buffered
|
||||||
# goes to the original stream
|
# goes to the original stream
|
||||||
@@ -557,9 +602,9 @@ def __enter__(self):
|
|||||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||||
|
|
||||||
# redirect to the pipe we created above
|
# redirect to the pipe we created above
|
||||||
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
os.dup2(write_fd, sys.stdout.fileno())
|
||||||
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
os.dup2(write_fd, sys.stderr.fileno())
|
||||||
self.write_fd.close()
|
os.close(write_fd)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Handle I/O the Python way. This won't redirect lower-level
|
# Handle I/O the Python way. This won't redirect lower-level
|
||||||
@@ -572,7 +617,7 @@ def __enter__(self):
|
|||||||
self._saved_stderr = sys.stderr
|
self._saved_stderr = sys.stderr
|
||||||
|
|
||||||
# create a file object for the pipe; redirect to it.
|
# create a file object for the pipe; redirect to it.
|
||||||
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
pipe_fd_out = os.fdopen(write_fd, "w")
|
||||||
sys.stdout = pipe_fd_out
|
sys.stdout = pipe_fd_out
|
||||||
sys.stderr = pipe_fd_out
|
sys.stderr = pipe_fd_out
|
||||||
|
|
||||||
@@ -608,7 +653,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
else:
|
else:
|
||||||
sys.stdout = self._saved_stdout
|
sys.stdout = self._saved_stdout
|
||||||
sys.stderr = self._saved_stderr
|
sys.stderr = self._saved_stderr
|
||||||
self.write_fd.close()
|
|
||||||
|
|
||||||
# print log contents in parent if needed.
|
# print log contents in parent if needed.
|
||||||
if self.log_file.write_in_parent:
|
if self.log_file.write_in_parent:
|
||||||
@@ -822,14 +866,14 @@ def force_echo(self):
|
|||||||
|
|
||||||
|
|
||||||
def _writer_daemon(
|
def _writer_daemon(
|
||||||
stdin_fd: Optional[Connection],
|
stdin_multiprocess_fd,
|
||||||
read_fd: Connection,
|
read_multiprocess_fd,
|
||||||
write_fd: Connection,
|
write_fd,
|
||||||
echo: bool,
|
echo,
|
||||||
log_file_wrapper: FileWrapper,
|
log_file_wrapper,
|
||||||
control_fd: Connection,
|
control_pipe,
|
||||||
filter_fn: Optional[Callable[[str], str]],
|
filter_fn,
|
||||||
) -> None:
|
):
|
||||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||||
|
|
||||||
The daemon receives output from the parent process and writes it both
|
The daemon receives output from the parent process and writes it both
|
||||||
@@ -866,37 +910,43 @@ def _writer_daemon(
|
|||||||
``StringIO`` in the parent. This is mainly for testing.
|
``StringIO`` in the parent. This is mainly for testing.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
stdin_fd: optional input from the terminal
|
stdin_multiprocess_fd (int): input from the terminal
|
||||||
read_fd: pipe for reading from parent's redirected stdout
|
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
||||||
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
stdout
|
||||||
daemons
|
echo (bool): initial echo setting -- controlled by user and
|
||||||
log_file_wrapper: file to log all output
|
preserved across multiple writer daemons
|
||||||
control_pipe: multiprocessing pipe on which to send control information to the parent
|
log_file_wrapper (FileWrapper): file to log all output
|
||||||
filter_fn: optional function to filter each line of output
|
control_pipe (Pipe): multiprocessing pipe on which to send control
|
||||||
|
information to the parent
|
||||||
|
filter_fn (callable, optional): function to filter each line of output
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
# If this process was forked, then it will inherit file descriptors from
|
||||||
write_fd.close()
|
# the parent process. This process depends on closing all instances of
|
||||||
|
# write_fd to terminate the reading loop, so we close the file descriptor
|
||||||
|
# here. Forking is the process spawning method everywhere except Mac OS
|
||||||
|
# for Python >= 3.8 and on Windows
|
||||||
|
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||||
|
os.close(write_fd)
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O.
|
||||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
# 3. closefd=False because Connection has "ownership"
|
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
|
||||||
|
|
||||||
if stdin_fd:
|
if stdin_multiprocess_fd:
|
||||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||||
else:
|
else:
|
||||||
stdin_file = None
|
stdin = None
|
||||||
|
|
||||||
# list of streams to select from
|
# list of streams to select from
|
||||||
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
||||||
force_echo = False # parent can force echo for certain output
|
force_echo = False # parent can force echo for certain output
|
||||||
|
|
||||||
log_file = log_file_wrapper.unwrap()
|
log_file = log_file_wrapper.unwrap()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with keyboard_input(stdin_file) as kb:
|
with keyboard_input(stdin) as kb:
|
||||||
while True:
|
while True:
|
||||||
# fix the terminal settings if we recently came to
|
# fix the terminal settings if we recently came to
|
||||||
# the foreground
|
# the foreground
|
||||||
@@ -909,12 +959,12 @@ def _writer_daemon(
|
|||||||
# Allow user to toggle echo with 'v' key.
|
# Allow user to toggle echo with 'v' key.
|
||||||
# Currently ignores other chars.
|
# Currently ignores other chars.
|
||||||
# only read stdin if we're in the foreground
|
# only read stdin if we're in the foreground
|
||||||
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
if stdin in rlist and not _is_background_tty(stdin):
|
||||||
# it's possible to be backgrounded between the above
|
# it's possible to be backgrounded between the above
|
||||||
# check and the read, so we ignore SIGTTIN here.
|
# check and the read, so we ignore SIGTTIN here.
|
||||||
with ignore_signal(signal.SIGTTIN):
|
with ignore_signal(signal.SIGTTIN):
|
||||||
try:
|
try:
|
||||||
if stdin_file.read(1) == "v":
|
if stdin.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
@@ -923,13 +973,13 @@ def _writer_daemon(
|
|||||||
if e.errno != errno.EIO:
|
if e.errno != errno.EIO:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if read_file in rlist:
|
if in_pipe in rlist:
|
||||||
line_count = 0
|
line_count = 0
|
||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
try:
|
try:
|
||||||
line = _retry(read_file.readline)()
|
line = _retry(in_pipe.readline)()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# installs like --test=root gpgme produce non-UTF8 logs
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||||
@@ -958,7 +1008,7 @@ def _writer_daemon(
|
|||||||
if xoff in controls:
|
if xoff in controls:
|
||||||
force_echo = False
|
force_echo = False
|
||||||
|
|
||||||
if not _input_available(read_file):
|
if not _input_available(in_pipe):
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
if line_count > 0:
|
if line_count > 0:
|
||||||
@@ -973,14 +1023,14 @@ def _writer_daemon(
|
|||||||
finally:
|
finally:
|
||||||
# send written data back to parent if we used a StringIO
|
# send written data back to parent if we used a StringIO
|
||||||
if isinstance(log_file, io.StringIO):
|
if isinstance(log_file, io.StringIO):
|
||||||
control_fd.send(log_file.getvalue())
|
control_pipe.send(log_file.getvalue())
|
||||||
log_file_wrapper.close()
|
log_file_wrapper.close()
|
||||||
read_fd.close()
|
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||||
if stdin_fd:
|
if stdin_multiprocess_fd:
|
||||||
stdin_fd.close()
|
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
||||||
|
|
||||||
# send echo value back to the parent so it can be preserved.
|
# send echo value back to the parent so it can be preserved.
|
||||||
control_fd.send(echo)
|
control_pipe.send(echo)
|
||||||
|
|
||||||
|
|
||||||
def _retry(function):
|
def _retry(function):
|
||||||
|
|||||||
@@ -3,15 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import spack.paths
|
|
||||||
import spack.util.git
|
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.24.0.dev0"
|
__version__ = "0.23.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
@@ -26,58 +19,4 @@ def __try_int(v):
|
|||||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||||
|
|
||||||
|
|
||||||
def get_spack_commit() -> Optional[str]:
|
__all__ = ["spack_version_info", "spack_version"]
|
||||||
"""Get the Spack git commit sha.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
(str or None) the commit sha if available, otherwise None
|
|
||||||
"""
|
|
||||||
git_path = os.path.join(spack.paths.prefix, ".git")
|
|
||||||
if not os.path.exists(git_path):
|
|
||||||
return None
|
|
||||||
|
|
||||||
git = spack.util.git.git()
|
|
||||||
if not git:
|
|
||||||
return None
|
|
||||||
|
|
||||||
rev = git(
|
|
||||||
"-C",
|
|
||||||
spack.paths.prefix,
|
|
||||||
"rev-parse",
|
|
||||||
"HEAD",
|
|
||||||
output=str,
|
|
||||||
error=os.devnull,
|
|
||||||
fail_on_error=False,
|
|
||||||
)
|
|
||||||
if git.returncode != 0:
|
|
||||||
return None
|
|
||||||
|
|
||||||
match = re.match(r"[a-f\d]{7,}$", rev)
|
|
||||||
return match.group(0) if match else None
|
|
||||||
|
|
||||||
|
|
||||||
def get_version() -> str:
|
|
||||||
"""Get a descriptive version of this instance of Spack.
|
|
||||||
|
|
||||||
Outputs '<PEP440 version> (<git commit sha>)'.
|
|
||||||
|
|
||||||
The commit sha is only added when available.
|
|
||||||
"""
|
|
||||||
commit = get_spack_commit()
|
|
||||||
if commit:
|
|
||||||
return f"{spack_version} ({commit})"
|
|
||||||
return spack_version
|
|
||||||
|
|
||||||
|
|
||||||
def get_short_version() -> str:
|
|
||||||
"""Short Spack version."""
|
|
||||||
return f"{spack_version_info[0]}.{spack_version_info[1]}"
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"spack_version_info",
|
|
||||||
"spack_version",
|
|
||||||
"get_version",
|
|
||||||
"get_spack_commit",
|
|
||||||
"get_short_version",
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
|
||||||
import io
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
import os
|
||||||
@@ -51,12 +50,10 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
from llnl.string import plural
|
|
||||||
|
|
||||||
import spack.builder
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.patch
|
import spack.patch
|
||||||
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
@@ -284,7 +281,7 @@ def _avoid_mismatched_variants(error_cls):
|
|||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for variant in current_spec.variants.values():
|
for variant in current_spec.variants.values():
|
||||||
# Variant does not exist at all
|
# Variant does not exist at all
|
||||||
if variant.name not in pkg_cls.variant_names():
|
if variant.name not in pkg_cls.variants:
|
||||||
summary = (
|
summary = (
|
||||||
f"Setting a preference for the '{pkg_name}' package to the "
|
f"Setting a preference for the '{pkg_name}' package to the "
|
||||||
f"non-existing variant '{variant.name}'"
|
f"non-existing variant '{variant.name}'"
|
||||||
@@ -293,8 +290,9 @@ def _avoid_mismatched_variants(error_cls):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Variant cannot accept this value
|
# Variant cannot accept this value
|
||||||
|
s = spack.spec.Spec(pkg_name)
|
||||||
try:
|
try:
|
||||||
spack.variant.prevalidate_variant_value(pkg_cls, variant, strict=True)
|
s.update_variant_validate(variant.name, variant.value)
|
||||||
except Exception:
|
except Exception:
|
||||||
summary = (
|
summary = (
|
||||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||||
@@ -388,14 +386,6 @@ def _make_config_error(config_data, summary, error_cls):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
package_deprecated_attributes = AuditClass(
|
|
||||||
group="packages",
|
|
||||||
tag="PKG-DEPRECATED-ATTRIBUTES",
|
|
||||||
description="Sanity checks to preclude use of deprecated package attributes",
|
|
||||||
kwargs=("pkgs",),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
package_properties = AuditClass(
|
package_properties = AuditClass(
|
||||||
group="packages",
|
group="packages",
|
||||||
tag="PKG-PROPERTIES",
|
tag="PKG-PROPERTIES",
|
||||||
@@ -414,23 +404,22 @@ def _make_config_error(config_data, summary, error_cls):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@package_properties
|
@package_directives
|
||||||
def _check_build_test_callbacks(pkgs, error_cls):
|
def _check_build_test_callbacks(pkgs, error_cls):
|
||||||
"""Ensure stand-alone test methods are not included in build-time callbacks.
|
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||||
|
|
||||||
Test methods are for checking the installed software as stand-alone tests.
|
|
||||||
They could also be called during the post-install phase of a build.
|
|
||||||
"""
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||||
|
|
||||||
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks])
|
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||||
|
# TODO (post-34236): "test"->"test_" once remove deprecated methods
|
||||||
|
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
|
||||||
if has_test_method:
|
if has_test_method:
|
||||||
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks."
|
msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
|
||||||
callbacks = ", ".join(test_callbacks)
|
instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
|
||||||
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}"
|
", ".join(test_callbacks)
|
||||||
|
)
|
||||||
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
errors.append(error_cls(msg.format(pkg_name), [instr]))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
@@ -528,56 +517,11 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_deprecated_attributes
|
|
||||||
def _search_for_deprecated_package_methods(pkgs, error_cls):
|
|
||||||
"""Ensure the package doesn't define or use deprecated methods"""
|
|
||||||
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
|
|
||||||
DEPRECATED_USE = (
|
|
||||||
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
|
|
||||||
("self.install_test_root(", "install_test_root(self, ..)"),
|
|
||||||
("self.run_test(", "test_part(self, ..)"),
|
|
||||||
)
|
|
||||||
errors = []
|
|
||||||
for pkg_name in pkgs:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
|
||||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
|
||||||
method_errors = collections.defaultdict(list)
|
|
||||||
for name, function in methods:
|
|
||||||
for deprecated_name, alternate in DEPRECATED_METHOD:
|
|
||||||
if name == deprecated_name:
|
|
||||||
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
|
|
||||||
method_errors[name].append(msg)
|
|
||||||
|
|
||||||
source = inspect.getsource(function)
|
|
||||||
for deprecated_name, alternate in DEPRECATED_USE:
|
|
||||||
if deprecated_name in source:
|
|
||||||
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
|
|
||||||
method_errors[name].append(msg)
|
|
||||||
|
|
||||||
num_methods = len(method_errors)
|
|
||||||
if num_methods > 0:
|
|
||||||
methods = plural(num_methods, "method", show_n=False)
|
|
||||||
error_msg = (
|
|
||||||
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
|
|
||||||
)
|
|
||||||
instr = [f"Make changes to '{pkg_cls.__module__}':"]
|
|
||||||
for name in sorted(method_errors):
|
|
||||||
instr.extend([f" {msg}" for msg in method_errors[name]])
|
|
||||||
errors.append(error_cls(error_msg, instr))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
@package_properties
|
@package_properties
|
||||||
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||||
"""Ensure package names are lowercase and consistent"""
|
"""Ensure package names are lowercase and consistent"""
|
||||||
reserved_names = ("all",)
|
|
||||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
if pkg_name in reserved_names:
|
|
||||||
error_msg = f"The name '{pkg_name}' is reserved, and cannot be used for packages"
|
|
||||||
errors.append(error_cls(error_msg, []))
|
|
||||||
|
|
||||||
if badname_regex.search(pkg_name):
|
if badname_regex.search(pkg_name):
|
||||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||||
errors.append(error_cls(error_msg, []))
|
errors.append(error_cls(error_msg, []))
|
||||||
@@ -693,19 +637,19 @@ def invalid_sha256_digest(fetcher):
|
|||||||
return h, True
|
return h, True
|
||||||
return None, False
|
return None, False
|
||||||
|
|
||||||
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
|
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
|
||||||
details = []
|
details = []
|
||||||
for v, args in pkg.versions.items():
|
for v, args in pkg.versions.items():
|
||||||
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
||||||
digest, is_bad = invalid_sha256_digest(fetcher)
|
digest, is_bad = invalid_sha256_digest(fetcher)
|
||||||
if is_bad:
|
if is_bad:
|
||||||
details.append(f"{pkg_name}@{v} uses {digest}")
|
details.append("{}@{} uses {}".format(pkg_name, v, digest))
|
||||||
|
|
||||||
for _, resources in pkg.resources.items():
|
for _, resources in pkg.resources.items():
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
||||||
if is_bad:
|
if is_bad:
|
||||||
details.append(f"Resource in '{pkg_name}' uses {digest}")
|
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
|
||||||
if details:
|
if details:
|
||||||
errors.append(error_cls(error_msg, details))
|
errors.append(error_cls(error_msg, details))
|
||||||
|
|
||||||
@@ -718,17 +662,12 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
|
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||||
# values are either ConditionalValue objects or the values themselves
|
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||||
build_system_names = set(
|
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
module = pkg_cls.module
|
||||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
|
||||||
for v in variant.values
|
|
||||||
)
|
|
||||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
|
||||||
|
|
||||||
has_builders_in_package_py = any(
|
has_builders_in_package_py = any(
|
||||||
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
getattr(module, name, False) for name in builder_cls_names
|
||||||
)
|
)
|
||||||
if not has_builders_in_package_py:
|
if not has_builders_in_package_py:
|
||||||
continue
|
continue
|
||||||
@@ -810,7 +749,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
|||||||
|
|
||||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
tree = ast.parse(open(file).read())
|
tree = ast.parse(open(file).read())
|
||||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
|
||||||
visitor.visit(tree)
|
visitor.visit(tree)
|
||||||
if visitor.references_to_globals:
|
if visitor.references_to_globals:
|
||||||
errors.append(
|
errors.append(
|
||||||
@@ -826,89 +765,6 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_properties
|
|
||||||
def _ensure_test_docstring(pkgs, error_cls):
|
|
||||||
"""Ensure stand-alone test methods have a docstring.
|
|
||||||
|
|
||||||
The docstring of a test method is implicitly used as the description of
|
|
||||||
the corresponding test part during test results reporting.
|
|
||||||
"""
|
|
||||||
doc_regex = r'\s+("""[^"]+""")'
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for pkg_name in pkgs:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
|
||||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
|
||||||
method_names = []
|
|
||||||
for name, test_fn in methods:
|
|
||||||
if not name.startswith("test_"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Ensure the test method has a docstring
|
|
||||||
source = inspect.getsource(test_fn)
|
|
||||||
match = re.search(doc_regex, source)
|
|
||||||
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
|
|
||||||
method_names.append(name)
|
|
||||||
|
|
||||||
num_methods = len(method_names)
|
|
||||||
if num_methods > 0:
|
|
||||||
methods = plural(num_methods, "method", show_n=False)
|
|
||||||
docstrings = plural(num_methods, "docstring", show_n=False)
|
|
||||||
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
|
|
||||||
names = ", ".join(method_names)
|
|
||||||
instr = [
|
|
||||||
"Docstrings are used as descriptions in test outputs.",
|
|
||||||
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
|
|
||||||
f"{names}",
|
|
||||||
]
|
|
||||||
errors.append(error_cls(msg, instr))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
@package_properties
|
|
||||||
def _ensure_test_implemented(pkgs, error_cls):
|
|
||||||
"""Ensure stand-alone test methods are implemented.
|
|
||||||
|
|
||||||
The test method is also required to be non-empty.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def skip(line):
|
|
||||||
ln = line.strip()
|
|
||||||
return ln.startswith("#") or "pass" in ln
|
|
||||||
|
|
||||||
doc_regex = r'\s+("""[^"]+""")'
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for pkg_name in pkgs:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
|
||||||
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
|
|
||||||
method_names = []
|
|
||||||
for name, test_fn in methods:
|
|
||||||
if not name.startswith("test_"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
source = inspect.getsource(test_fn)
|
|
||||||
|
|
||||||
# Attempt to ensure the test method is implemented.
|
|
||||||
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
|
|
||||||
lines = [ln.strip() for ln in impl if not skip(ln)]
|
|
||||||
if not lines:
|
|
||||||
method_names.append(name)
|
|
||||||
|
|
||||||
num_methods = len(method_names)
|
|
||||||
if num_methods > 0:
|
|
||||||
methods = plural(num_methods, "method", show_n=False)
|
|
||||||
msg = f"Package {pkg_name} has empty or missing test {methods}."
|
|
||||||
names = ", ".join(method_names)
|
|
||||||
instr = [
|
|
||||||
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
|
|
||||||
]
|
|
||||||
errors.append(error_cls(msg, instr))
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
@package_https_directives
|
@package_https_directives
|
||||||
def _linting_package_file(pkgs, error_cls):
|
def _linting_package_file(pkgs, error_cls):
|
||||||
"""Check for correctness of links"""
|
"""Check for correctness of links"""
|
||||||
@@ -1075,22 +931,20 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
|
|
||||||
# check variants
|
# check variants
|
||||||
dependency_variants = dep.spec.variants
|
dependency_variants = dep.spec.variants
|
||||||
for name, variant in dependency_variants.items():
|
for name, value in dependency_variants.items():
|
||||||
try:
|
try:
|
||||||
spack.variant.prevalidate_variant_value(
|
v, _ = dependency_pkg_cls.variants[name]
|
||||||
dependency_pkg_cls, variant, dep.spec, strict=True
|
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
summary = (
|
summary = (
|
||||||
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
|
||||||
)
|
)
|
||||||
|
|
||||||
error_msg = str(e)
|
|
||||||
if isinstance(e, KeyError):
|
if isinstance(e, KeyError):
|
||||||
error_msg = (
|
error_msg = (
|
||||||
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
f"variant {str(e).strip()} does not exist in package {dep_name}"
|
||||||
f" in package '{dep_name}'"
|
|
||||||
)
|
)
|
||||||
|
error_msg += f" in package '{dep_name}'"
|
||||||
|
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
error_cls(summary=summary, details=[error_msg, f"in {filename}"])
|
||||||
@@ -1102,38 +956,39 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
@package_directives
|
@package_directives
|
||||||
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||||
"""Ensures that variant defaults are present and parsable from cli"""
|
"""Ensures that variant defaults are present and parsable from cli"""
|
||||||
|
|
||||||
def check_variant(pkg_cls, variant, vname):
|
|
||||||
# bool is a subclass of int in python. Permitting a default that is an instance
|
|
||||||
# of 'int' means both foo=false and foo=0 are accepted. Other falsish values are
|
|
||||||
# not allowed, since they can't be parsed from CLI ('foo=')
|
|
||||||
default_is_parsable = isinstance(variant.default, int) or variant.default
|
|
||||||
|
|
||||||
if not default_is_parsable:
|
|
||||||
msg = f"Variant '{vname}' of package '{pkg_cls.name}' has an unparsable default value"
|
|
||||||
return [error_cls(msg, [])]
|
|
||||||
|
|
||||||
try:
|
|
||||||
vspec = variant.make_default()
|
|
||||||
except spack.variant.MultipleValuesInExclusiveVariantError:
|
|
||||||
msg = f"Can't create default value for variant '{vname}' in package '{pkg_cls.name}'"
|
|
||||||
return [error_cls(msg, [])]
|
|
||||||
|
|
||||||
try:
|
|
||||||
variant.validate_or_raise(vspec, pkg_cls.name)
|
|
||||||
except spack.variant.InvalidVariantValueError:
|
|
||||||
msg = "Default value of variant '{vname}' in package '{pkg.name}' is invalid"
|
|
||||||
question = "Is it among the allowed values?"
|
|
||||||
return [error_cls(msg, [question])]
|
|
||||||
|
|
||||||
return []
|
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for vname in pkg_cls.variant_names():
|
for variant_name, entry in pkg_cls.variants.items():
|
||||||
for _, variant_def in pkg_cls.variant_definitions(vname):
|
variant, _ = entry
|
||||||
errors.extend(check_variant(pkg_cls, variant_def, vname))
|
default_is_parsable = (
|
||||||
|
# Permitting a default that is an instance on 'int' permits
|
||||||
|
# to have foo=false or foo=0. Other falsish values are
|
||||||
|
# not allowed, since they can't be parsed from cli ('foo=')
|
||||||
|
isinstance(variant.default, int)
|
||||||
|
or variant.default
|
||||||
|
)
|
||||||
|
if not default_is_parsable:
|
||||||
|
error_msg = "Variant '{}' of package '{}' has a bad default value"
|
||||||
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
vspec = variant.make_default()
|
||||||
|
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||||
|
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
|
||||||
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||||
|
except spack.variant.InvalidVariantValueError:
|
||||||
|
error_msg = (
|
||||||
|
"The default value of the variant '{}' in package '{}' failed validation"
|
||||||
|
)
|
||||||
|
question = "Is it among the allowed values?"
|
||||||
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@@ -1143,11 +998,11 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||||
for name in pkg_cls.variant_names():
|
for variant_name, entry in pkg_cls.variants.items():
|
||||||
for when, variant in pkg_cls.variant_definitions(name):
|
variant, _ = entry
|
||||||
if not variant.description:
|
if not variant.description:
|
||||||
msg = f"Variant '{name}' in package '{pkg_name}' is missing a description"
|
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||||
errors.append(error_cls(msg, []))
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@@ -1204,26 +1059,29 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
|
|||||||
|
|
||||||
|
|
||||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||||
|
variant_exceptions = (
|
||||||
|
spack.variant.InconsistentValidationError,
|
||||||
|
spack.variant.MultipleValuesInExclusiveVariantError,
|
||||||
|
spack.variant.InvalidVariantValueError,
|
||||||
|
KeyError,
|
||||||
|
)
|
||||||
errors = []
|
errors = []
|
||||||
variant_names = pkg.variant_names()
|
|
||||||
summary = f"{pkg.name}: wrong variant in '{directive}' directive"
|
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
|
||||||
|
|
||||||
for name, v in constraint.variants.items():
|
for name, v in constraint.variants.items():
|
||||||
if name not in variant_names:
|
|
||||||
msg = f"variant {name} does not exist in {pkg.name}"
|
|
||||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
spack.variant.prevalidate_variant_value(pkg, v, constraint, strict=True)
|
variant, _ = pkg.variants[name]
|
||||||
except (
|
variant.validate_or_raise(v, pkg_cls=pkg)
|
||||||
spack.variant.InconsistentValidationError,
|
except variant_exceptions as e:
|
||||||
spack.variant.MultipleValuesInExclusiveVariantError,
|
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||||
spack.variant.InvalidVariantValueError,
|
summary = summary.format(directive)
|
||||||
) as e:
|
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||||
msg = str(e).strip()
|
|
||||||
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
|
error_msg = str(e).strip()
|
||||||
|
if isinstance(e, KeyError):
|
||||||
|
error_msg = "the variant {0} does not exist".format(error_msg)
|
||||||
|
|
||||||
|
err = error_cls(summary=summary, details=[error_msg, "in " + filename])
|
||||||
|
|
||||||
|
errors.append(err)
|
||||||
|
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
@@ -1261,10 +1119,9 @@ def _extracts_errors(triggers, summary):
|
|||||||
for dname in dnames
|
for dname in dnames
|
||||||
)
|
)
|
||||||
|
|
||||||
for when, variants_by_name in pkg_cls.variants.items():
|
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||||
for vname, variant in variants_by_name.items():
|
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
errors.extend(_extracts_errors(triggers, summary))
|
||||||
errors.extend(_extracts_errors([when], summary))
|
|
||||||
|
|
||||||
for when, providers, details in _error_items(pkg_cls.provided):
|
for when, providers, details in _error_items(pkg_cls.provided):
|
||||||
errors.extend(
|
errors.extend(
|
||||||
|
|||||||
@@ -33,20 +33,20 @@
|
|||||||
from llnl.util.symlink import readlink
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
|
import spack.cmd
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
import spack.database as spack_db
|
import spack.database as spack_db
|
||||||
import spack.deptypes as dt
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.hooks.sbang
|
import spack.hooks.sbang
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.oci.opener
|
import spack.oci.opener
|
||||||
import spack.paths
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.relocate as relocate
|
import spack.relocate as relocate
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -87,8 +87,6 @@
|
|||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
from .enums import InstallRecordStatus
|
|
||||||
|
|
||||||
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
BUILD_CACHE_RELATIVE_PATH = "build_cache"
|
||||||
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
BUILD_CACHE_KEYS_RELATIVE_PATH = "_pgp"
|
||||||
|
|
||||||
@@ -254,7 +252,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
|
|
||||||
spec_list = [
|
spec_list = [
|
||||||
s
|
s
|
||||||
for s in db.query_local(installed=InstallRecordStatus.ANY)
|
for s in db.query_local(installed=any, in_buildcache=any)
|
||||||
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
if s.external or db.query_local_by_spec_hash(s.dag_hash()).in_buildcache
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -369,7 +367,7 @@ def update(self, with_cooldown=False):
|
|||||||
on disk under ``_index_cache_root``)."""
|
on disk under ``_index_cache_root``)."""
|
||||||
self._init_local_index_cache()
|
self._init_local_index_cache()
|
||||||
configured_mirror_urls = [
|
configured_mirror_urls = [
|
||||||
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||||
]
|
]
|
||||||
items_to_remove = []
|
items_to_remove = []
|
||||||
spec_cache_clear_needed = False
|
spec_cache_clear_needed = False
|
||||||
@@ -715,32 +713,15 @@ def get_buildfile_manifest(spec):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def deps_to_relocate(spec):
|
def hashes_to_prefixes(spec):
|
||||||
"""Return the transitive link and direct run dependencies of the spec.
|
"""Return a dictionary of hashes to prefixes for a spec and its deps, excluding externals"""
|
||||||
|
return {
|
||||||
This is a special traversal for dependencies we need to consider when relocating a package.
|
s.dag_hash(): str(s.prefix)
|
||||||
|
|
||||||
Package binaries, scripts, and other files may refer to the prefixes of dependencies, so
|
|
||||||
we need to rewrite those locations when dependencies are in a different place at install time
|
|
||||||
than they were at build time.
|
|
||||||
|
|
||||||
This traversal covers transitive link dependencies and direct run dependencies because:
|
|
||||||
|
|
||||||
1. Spack adds RPATHs for transitive link dependencies so that packages can find needed
|
|
||||||
dependency libraries.
|
|
||||||
2. Packages may call any of their *direct* run dependencies (and may bake their paths into
|
|
||||||
binaries or scripts), so we also need to search for run dependency prefixes when relocating.
|
|
||||||
|
|
||||||
This returns a deduplicated list of transitive link dependencies and direct run dependencies.
|
|
||||||
"""
|
|
||||||
deps = [
|
|
||||||
s
|
|
||||||
for s in itertools.chain(
|
for s in itertools.chain(
|
||||||
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
|
||||||
)
|
)
|
||||||
if not s.external
|
if not s.external
|
||||||
]
|
}
|
||||||
return llnl.util.lang.dedupe(deps, key=lambda s: s.dag_hash())
|
|
||||||
|
|
||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec):
|
||||||
@@ -756,7 +737,7 @@ def get_buildinfo_dict(spec):
|
|||||||
"relocate_binaries": manifest["binary_to_relocate"],
|
"relocate_binaries": manifest["binary_to_relocate"],
|
||||||
"relocate_links": manifest["link_to_relocate"],
|
"relocate_links": manifest["link_to_relocate"],
|
||||||
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
"hash_to_prefix": hashes_to_prefixes(spec),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1176,7 +1157,7 @@ def _url_upload_tarball_and_specfile(
|
|||||||
|
|
||||||
|
|
||||||
class Uploader:
|
class Uploader:
|
||||||
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
|
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
|
||||||
self.mirror = mirror
|
self.mirror = mirror
|
||||||
self.force = force
|
self.force = force
|
||||||
self.update_index = update_index
|
self.update_index = update_index
|
||||||
@@ -1184,9 +1165,6 @@ def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_inde
|
|||||||
self.tmpdir: str
|
self.tmpdir: str
|
||||||
self.executor: concurrent.futures.Executor
|
self.executor: concurrent.futures.Executor
|
||||||
|
|
||||||
# Verify if the mirror meets the requirements to push
|
|
||||||
self.mirror.ensure_mirror_usable("push")
|
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||||
@@ -1224,7 +1202,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
|||||||
class OCIUploader(Uploader):
|
class OCIUploader(Uploader):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
base_image: Optional[str],
|
base_image: Optional[str],
|
||||||
@@ -1273,7 +1251,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
|||||||
class URLUploader(Uploader):
|
class URLUploader(Uploader):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
signing_key: Optional[str],
|
signing_key: Optional[str],
|
||||||
@@ -1297,7 +1275,7 @@ def push(
|
|||||||
|
|
||||||
|
|
||||||
def make_uploader(
|
def make_uploader(
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool = False,
|
force: bool = False,
|
||||||
update_index: bool = False,
|
update_index: bool = False,
|
||||||
signing_key: Optional[str] = None,
|
signing_key: Optional[str] = None,
|
||||||
@@ -1469,9 +1447,7 @@ def _oci_push_pkg_blob(
|
|||||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||||
|
|
||||||
# Create an oci.image.layer aka tarball of the package
|
# Create an oci.image.layer aka tarball of the package
|
||||||
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
|
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||||
filename, spec.prefix, get_buildinfo_dict(spec)
|
|
||||||
)
|
|
||||||
|
|
||||||
blob = spack.oci.oci.Blob(
|
blob = spack.oci.oci.Blob(
|
||||||
Digest.from_sha256(compressed_tarfile_checksum),
|
Digest.from_sha256(compressed_tarfile_checksum),
|
||||||
@@ -1654,6 +1630,7 @@ def _oci_push(
|
|||||||
Dict[str, spack.oci.oci.Blob],
|
Dict[str, spack.oci.oci.Blob],
|
||||||
List[Tuple[Spec, BaseException]],
|
List[Tuple[Spec, BaseException]],
|
||||||
]:
|
]:
|
||||||
|
|
||||||
# Spec dag hash -> blob
|
# Spec dag hash -> blob
|
||||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||||
|
|
||||||
@@ -1953,9 +1930,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
|||||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
|
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||||
spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
binary=True
|
||||||
)
|
).values()
|
||||||
if not configured_mirrors:
|
if not configured_mirrors:
|
||||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||||
|
|
||||||
@@ -1980,7 +1957,7 @@ def fetch_url_to_mirror(url):
|
|||||||
for mirror in configured_mirrors:
|
for mirror in configured_mirrors:
|
||||||
if mirror.fetch_url == url:
|
if mirror.fetch_url == url:
|
||||||
return mirror
|
return mirror
|
||||||
return spack.mirrors.mirror.Mirror(url)
|
return spack.mirror.Mirror(url)
|
||||||
|
|
||||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||||
|
|
||||||
@@ -2223,36 +2200,11 @@ def relocate_package(spec):
|
|||||||
# First match specific prefix paths. Possibly the *local* install prefix
|
# First match specific prefix paths. Possibly the *local* install prefix
|
||||||
# of some dependency is in an upstream, so we cannot assume the original
|
# of some dependency is in an upstream, so we cannot assume the original
|
||||||
# spack store root can be mapped uniformly to the new spack store root.
|
# spack store root can be mapped uniformly to the new spack store root.
|
||||||
#
|
for dag_hash, new_dep_prefix in hashes_to_prefixes(spec).items():
|
||||||
# If the spec is spliced, we need to handle the simultaneous mapping
|
if dag_hash in hash_to_old_prefix:
|
||||||
# from the old install_tree to the new install_tree and from the build_spec
|
old_dep_prefix = hash_to_old_prefix[dag_hash]
|
||||||
# to the spliced spec.
|
prefix_to_prefix_bin[old_dep_prefix] = new_dep_prefix
|
||||||
# Because foo.build_spec is foo for any non-spliced spec, we can simplify
|
prefix_to_prefix_text[old_dep_prefix] = new_dep_prefix
|
||||||
# by checking for spliced-in nodes by checking for nodes not in the build_spec
|
|
||||||
# without any explicit check for whether the spec is spliced.
|
|
||||||
# An analog in this algorithm is any spec that shares a name or provides the same virtuals
|
|
||||||
# in the context of the relevant root spec. This ensures that the analog for a spec s
|
|
||||||
# is the spec that s replaced when we spliced.
|
|
||||||
relocation_specs = deps_to_relocate(spec)
|
|
||||||
build_spec_ids = set(id(s) for s in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD))
|
|
||||||
for s in relocation_specs:
|
|
||||||
analog = s
|
|
||||||
if id(s) not in build_spec_ids:
|
|
||||||
analogs = [
|
|
||||||
d
|
|
||||||
for d in spec.build_spec.traverse(deptype=dt.ALL & ~dt.BUILD)
|
|
||||||
if s._splice_match(d, self_root=spec, other_root=spec.build_spec)
|
|
||||||
]
|
|
||||||
if analogs:
|
|
||||||
# Prefer same-name analogs and prefer higher versions
|
|
||||||
# This matches the preferences in Spec.splice, so we will find same node
|
|
||||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
|
||||||
|
|
||||||
lookup_dag_hash = analog.dag_hash()
|
|
||||||
if lookup_dag_hash in hash_to_old_prefix:
|
|
||||||
old_dep_prefix = hash_to_old_prefix[lookup_dag_hash]
|
|
||||||
prefix_to_prefix_bin[old_dep_prefix] = str(s.prefix)
|
|
||||||
prefix_to_prefix_text[old_dep_prefix] = str(s.prefix)
|
|
||||||
|
|
||||||
# Only then add the generic fallback of install prefix -> install prefix.
|
# Only then add the generic fallback of install prefix -> install prefix.
|
||||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||||
@@ -2334,9 +2286,7 @@ def is_backup_file(file):
|
|||||||
if not codesign:
|
if not codesign:
|
||||||
return
|
return
|
||||||
for binary in changed_files:
|
for binary in changed_files:
|
||||||
# preserve the original inode by running codesign on a copy
|
codesign("-fs-", binary)
|
||||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
|
||||||
codesign("-fs-", tmp_binary)
|
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# If we are installing back to the same location
|
||||||
# relocate the sbang location if the spack directory changed
|
# relocate the sbang location if the spack directory changed
|
||||||
@@ -2569,13 +2519,7 @@ def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
|||||||
return pkg_prefix
|
return pkg_prefix
|
||||||
|
|
||||||
|
|
||||||
def install_root_node(
|
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||||
spec: spack.spec.Spec,
|
|
||||||
unsigned=False,
|
|
||||||
force: bool = False,
|
|
||||||
sha256: Optional[str] = None,
|
|
||||||
allow_missing: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Install the root node of a concrete spec from a buildcache.
|
"""Install the root node of a concrete spec from a buildcache.
|
||||||
|
|
||||||
Checking the sha256 sum of a node before installation is usually needed only
|
Checking the sha256 sum of a node before installation is usually needed only
|
||||||
@@ -2584,10 +2528,11 @@ def install_root_node(
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec: spec to be installed (note that only the root node will be installed)
|
spec: spec to be installed (note that only the root node will be installed)
|
||||||
unsigned: if True allows installing unsigned binaries
|
unsigned (bool): if True allows installing unsigned binaries
|
||||||
force: force installation if the spec is already present in the local store
|
force (bool): force installation if the spec is already present in the
|
||||||
sha256: optional sha256 of the binary package, to be checked before installation
|
local store
|
||||||
allow_missing: when true, allows installing a node with missing dependencies
|
sha256 (str): optional sha256 of the binary package, to be checked
|
||||||
|
before installation
|
||||||
"""
|
"""
|
||||||
# Early termination
|
# Early termination
|
||||||
if spec.external or spec.virtual:
|
if spec.external or spec.virtual:
|
||||||
@@ -2597,10 +2542,10 @@ def install_root_node(
|
|||||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||||
return
|
return
|
||||||
|
|
||||||
download_result = download_tarball(spec.build_spec, unsigned)
|
download_result = download_tarball(spec, unsigned)
|
||||||
if not download_result:
|
if not download_result:
|
||||||
msg = 'download of binary cache file for spec "{0}" failed'
|
msg = 'download of binary cache file for spec "{0}" failed'
|
||||||
raise RuntimeError(msg.format(spec.build_spec.format()))
|
raise RuntimeError(msg.format(spec.format()))
|
||||||
|
|
||||||
if sha256:
|
if sha256:
|
||||||
checker = spack.util.crypto.Checker(sha256)
|
checker = spack.util.crypto.Checker(sha256)
|
||||||
@@ -2619,13 +2564,8 @@ def install_root_node(
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
|
||||||
if spec.spliced: # overwrite old metadata with new
|
|
||||||
spack.store.STORE.layout.write_spec(
|
|
||||||
spec, spack.store.STORE.layout.spec_file_path(spec)
|
|
||||||
)
|
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, allow_missing=allow_missing)
|
spack.store.STORE.db.add(spec)
|
||||||
|
|
||||||
|
|
||||||
def install_single_spec(spec, unsigned=False, force=False):
|
def install_single_spec(spec, unsigned=False, force=False):
|
||||||
@@ -2650,7 +2590,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
specfile_is_signed = False
|
specfile_is_signed = False
|
||||||
found_specs = []
|
found_specs = []
|
||||||
|
|
||||||
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||||
|
|
||||||
for mirror in binary_mirrors:
|
for mirror in binary_mirrors:
|
||||||
buildcache_fetch_url_json = url_util.join(
|
buildcache_fetch_url_json = url_util.join(
|
||||||
@@ -2711,7 +2651,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
|||||||
if spec is None:
|
if spec is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||||
tty.debug("No Spack mirrors are currently configured")
|
tty.debug("No Spack mirrors are currently configured")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -2750,16 +2690,13 @@ def clear_spec_cache():
|
|||||||
|
|
||||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||||
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
|
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||||
|
|
||||||
if not mirror_collection:
|
if not mirror_collection:
|
||||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||||
|
|
||||||
for mirror in mirror_collection.values():
|
for mirror in mirror_collection.values():
|
||||||
fetch_url = mirror.fetch_url
|
fetch_url = mirror.fetch_url
|
||||||
# TODO: oci:// does not support signing.
|
|
||||||
if fetch_url.startswith("oci://"):
|
|
||||||
continue
|
|
||||||
keys_url = url_util.join(
|
keys_url = url_util.join(
|
||||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||||
)
|
)
|
||||||
@@ -2805,7 +2742,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
|
|
||||||
def _url_push_keys(
|
def _url_push_keys(
|
||||||
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
|
*mirrors: Union[spack.mirror.Mirror, str],
|
||||||
keys: List[str],
|
keys: List[str],
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
update_index: bool = False,
|
update_index: bool = False,
|
||||||
@@ -2872,7 +2809,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
rebuilds = {}
|
rebuilds = {}
|
||||||
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
|
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||||
|
|
||||||
rebuild_list = []
|
rebuild_list = []
|
||||||
@@ -2916,7 +2853,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
|||||||
|
|
||||||
|
|
||||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||||
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
|
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||||
tty.die(
|
tty.die(
|
||||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||||
)
|
)
|
||||||
@@ -2925,7 +2862,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
|||||||
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
||||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||||
|
|
||||||
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
|
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||||
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
||||||
|
|
||||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Common basic functions used through the spack.bootstrap package"""
|
"""Common basic functions used through the spack.bootstrap package"""
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
|
||||||
import importlib
|
import importlib
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
@@ -61,19 +60,10 @@ def _try_import_from_store(
|
|||||||
python, *_ = candidate_spec.dependencies("python-venv")
|
python, *_ = candidate_spec.dependencies("python-venv")
|
||||||
else:
|
else:
|
||||||
python, *_ = candidate_spec.dependencies("python")
|
python, *_ = candidate_spec.dependencies("python")
|
||||||
|
module_paths = [
|
||||||
# if python is installed, ask it for the layout
|
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||||
if python.installed:
|
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||||
module_paths = [
|
]
|
||||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
|
||||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
|
||||||
]
|
|
||||||
# otherwise search for the site-packages directory
|
|
||||||
# (clingo from binaries with truncated python-venv runtime)
|
|
||||||
else:
|
|
||||||
module_paths = glob.glob(
|
|
||||||
os.path.join(candidate_spec.prefix, "lib", "python*", "site-packages")
|
|
||||||
)
|
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.modules
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
|||||||
@@ -35,19 +35,23 @@
|
|||||||
from llnl.util.lang import GroupedExceptionHandler
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.mirrors.mirror
|
import spack.environment
|
||||||
|
import spack.modules
|
||||||
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
import spack.platforms.linux
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
|
import spack.util.environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.installer import PackageInstaller
|
|
||||||
|
|
||||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||||
from .clingo import ClingoBootstrapConcretizer
|
from .clingo import ClingoBootstrapConcretizer
|
||||||
@@ -92,7 +96,12 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Promote (relative) paths to file urls
|
||||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
url = conf["info"]["url"]
|
||||||
|
if spack.util.url.is_path_instead_of_url(url):
|
||||||
|
if not os.path.isabs(url):
|
||||||
|
url = os.path.join(self.metadata_dir, url)
|
||||||
|
url = spack.util.url.path_to_file_url(url)
|
||||||
|
self.url = url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -171,15 +180,7 @@ def _install_by_hash(
|
|||||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||||
spack.binary_distribution.install_root_node(
|
spack.binary_distribution.install_root_node(
|
||||||
# allow_missing is true since when bootstrapping clingo we truncate runtime
|
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||||
# deps such as gcc-runtime, since we link libstdc++ statically, and the other
|
|
||||||
# further runtime deps are loaded by the Python interpreter. This just silences
|
|
||||||
# warnings about missing dependencies.
|
|
||||||
match,
|
|
||||||
unsigned=True,
|
|
||||||
force=True,
|
|
||||||
sha256=pkg_sha256,
|
|
||||||
allow_missing=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def _install_and_test(
|
def _install_and_test(
|
||||||
@@ -272,17 +273,17 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||||
concrete_spec = bootstrapper.concretize()
|
concrete_spec = bootstrapper.concretize()
|
||||||
else:
|
else:
|
||||||
abstract_spec = spack.spec.Spec(
|
concrete_spec = spack.spec.Spec(
|
||||||
abstract_spec_str + " ^" + spec_for_current_python()
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
)
|
)
|
||||||
concrete_spec = spack.concretize.concretized(abstract_spec)
|
concrete_spec.concretize()
|
||||||
|
|
||||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
|
|
||||||
# Install the spec that should make the module importable
|
# Install the spec that should make the module importable
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
concrete_spec.package.do_install(fail_fast=True)
|
||||||
|
|
||||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
@@ -301,11 +302,11 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
|
|
||||||
concrete_spec = spack.concretize.concretized(spack.spec.Spec(abstract_spec_str))
|
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
PackageInstaller([concrete_spec.package], fail_fast=True).install()
|
concrete_spec.package.do_install()
|
||||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||||
self.last_search = info
|
self.last_search = info
|
||||||
return True
|
return True
|
||||||
@@ -603,10 +604,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
|||||||
current = copy.copy(entry)
|
current = copy.copy(entry)
|
||||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
try:
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
current.update(spack.util.spack_yaml.load(stream))
|
list_of_sources.append(current)
|
||||||
list_of_sources.append(current)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
return list_of_sources
|
return list_of_sources
|
||||||
|
|||||||
@@ -14,9 +14,9 @@
|
|||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.spec
|
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
import spack.util.path
|
import spack.util.cpus
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
from ._common import _root_spec
|
from ._common import _root_spec
|
||||||
from .config import root_path, spec_for_current_python, store_path
|
from .config import root_path, spec_for_current_python, store_path
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -37,17 +37,13 @@
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import stat
|
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from multiprocessing.connection import Connection
|
from typing import Dict, List, Set, Tuple
|
||||||
from typing import Callable, Dict, List, Optional, Set, Tuple
|
|
||||||
|
|
||||||
import archspec.cpu
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -55,6 +51,7 @@
|
|||||||
from llnl.util.lang import dedupe, stable_partition
|
from llnl.util.lang import dedupe, stable_partition
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
from llnl.util.tty.log import MultiProcessFd
|
||||||
|
|
||||||
import spack.build_systems.cmake
|
import spack.build_systems.cmake
|
||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
@@ -64,21 +61,26 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.multimethod
|
import spack.main
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
import spack.repo
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
|
import spack.user_environment
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.libc
|
import spack.util.path
|
||||||
|
import spack.util.pattern
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.context import Context
|
from spack.context import Context
|
||||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
from spack.error import NoHeadersError, NoLibrariesError
|
||||||
from spack.install_test import spack_install_test_log
|
from spack.install_test import spack_install_test_log
|
||||||
|
from spack.installer import InstallError
|
||||||
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIR_CASE_ENTRY,
|
SYSTEM_DIR_CASE_ENTRY,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
@@ -90,7 +92,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module
|
from spack.util.module_cmd import load_module, path_from_modules
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -361,7 +363,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
isa_arg = optimization_flags(compiler, spec.target)
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -406,65 +408,6 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
def optimization_flags(compiler, target):
|
|
||||||
if spack.compilers.is_mixed_toolchain(compiler):
|
|
||||||
msg = (
|
|
||||||
"microarchitecture specific optimizations are not "
|
|
||||||
"supported yet on mixed compiler toolchains [check"
|
|
||||||
f" {compiler.name}@{compiler.version} for further details]"
|
|
||||||
)
|
|
||||||
tty.debug(msg)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
# Try to check if the current compiler comes with a version number or
|
|
||||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
|
||||||
# custom spec.
|
|
||||||
compiler_version = compiler.version
|
|
||||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
|
||||||
if not version_number or suffix:
|
|
||||||
try:
|
|
||||||
compiler_version = compiler.real_version
|
|
||||||
except spack.util.executable.ProcessError as e:
|
|
||||||
# log this and just return compiler.version instead
|
|
||||||
tty.debug(str(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
|
||||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
|
||||||
result = ""
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class FilterDefaultDynamicLinkerSearchPaths:
|
|
||||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
|
||||||
|
|
||||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
|
||||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
|
||||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
|
||||||
if not dynamic_linker:
|
|
||||||
return
|
|
||||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
|
||||||
try:
|
|
||||||
s = os.stat(path)
|
|
||||||
if stat.S_ISDIR(s.st_mode):
|
|
||||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
|
||||||
except OSError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
|
||||||
try:
|
|
||||||
s = os.stat(p)
|
|
||||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __call__(self, dirs: List[str]) -> List[str]:
|
|
||||||
if not self.default_path_identifiers:
|
|
||||||
return dirs
|
|
||||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
|
||||||
|
|
||||||
|
|
||||||
def set_wrapper_variables(pkg, env):
|
def set_wrapper_variables(pkg, env):
|
||||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||||
@@ -512,7 +455,7 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set(SPACK_DEBUG, "TRUE")
|
env.set(SPACK_DEBUG, "TRUE")
|
||||||
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
|
||||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||||
env.set(SPACK_DEBUG_LOG_DIR, spack.paths.spack_working_dir)
|
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||||
|
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
# Enable ccache in the compiler wrapper
|
# Enable ccache in the compiler wrapper
|
||||||
@@ -522,71 +465,69 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set("CCACHE_DISABLE", "1")
|
env.set("CCACHE_DISABLE", "1")
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg))
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK)
|
rpath_deps = get_rpath_deps(pkg)
|
||||||
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
|
|
||||||
|
|
||||||
link_dirs = []
|
link_dirs = []
|
||||||
include_dirs = []
|
include_dirs = []
|
||||||
rpath_dirs = []
|
rpath_dirs = []
|
||||||
|
|
||||||
for dep in chain(external_link_deps, nonexternal_link_deps):
|
def _prepend_all(list_to_modify, items_to_add):
|
||||||
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler
|
# Update the original list (creating a new list would be faster but
|
||||||
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly
|
# may not be convenient)
|
||||||
# expensive search in `query.libs.directories` and `query.headers.directories`, which is
|
for item in reversed(list(items_to_add)):
|
||||||
# what this branch is trying to avoid.
|
list_to_modify.insert(0, item)
|
||||||
if is_system_path(dep.prefix):
|
|
||||||
continue
|
|
||||||
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
|
|
||||||
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
|
|
||||||
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
|
|
||||||
query = pkg.spec[dep.name]
|
|
||||||
dep_link_dirs = []
|
|
||||||
try:
|
|
||||||
# Locating libraries can be time consuming, so log start and finish.
|
|
||||||
tty.debug(f"Collecting libraries for {dep.name}")
|
|
||||||
dep_link_dirs.extend(query.libs.directories)
|
|
||||||
tty.debug(f"Libraries for {dep.name} have been collected.")
|
|
||||||
except NoLibrariesError:
|
|
||||||
tty.debug(f"No libraries found for {dep.name}")
|
|
||||||
|
|
||||||
for default_lib_dir in ("lib", "lib64"):
|
def update_compiler_args_for_dep(dep):
|
||||||
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
if dep in link_deps and (not is_system_path(dep.prefix)):
|
||||||
if os.path.isdir(default_lib_prefix):
|
query = pkg.spec[dep.name]
|
||||||
dep_link_dirs.append(default_lib_prefix)
|
dep_link_dirs = list()
|
||||||
|
try:
|
||||||
|
# In some circumstances (particularly for externals) finding
|
||||||
|
# libraries packages can be time consuming, so indicate that
|
||||||
|
# we are performing this operation (and also report when it
|
||||||
|
# finishes).
|
||||||
|
tty.debug("Collecting libraries for {0}".format(dep.name))
|
||||||
|
dep_link_dirs.extend(query.libs.directories)
|
||||||
|
tty.debug("Libraries for {0} have been collected.".format(dep.name))
|
||||||
|
except NoLibrariesError:
|
||||||
|
tty.debug("No libraries found for {0}".format(dep.name))
|
||||||
|
|
||||||
link_dirs[:0] = dep_link_dirs
|
for default_lib_dir in ["lib", "lib64"]:
|
||||||
if dep.dag_hash() in rpath_hashes:
|
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
|
||||||
rpath_dirs[:0] = dep_link_dirs
|
if os.path.isdir(default_lib_prefix):
|
||||||
|
dep_link_dirs.append(default_lib_prefix)
|
||||||
|
|
||||||
try:
|
_prepend_all(link_dirs, dep_link_dirs)
|
||||||
tty.debug(f"Collecting headers for {dep.name}")
|
if dep in rpath_deps:
|
||||||
include_dirs[:0] = query.headers.directories
|
_prepend_all(rpath_dirs, dep_link_dirs)
|
||||||
tty.debug(f"Headers for {dep.name} have been collected.")
|
|
||||||
except NoHeadersError:
|
|
||||||
tty.debug(f"No headers found for {dep.name}")
|
|
||||||
|
|
||||||
# The top-level package is heuristically rpath'ed.
|
try:
|
||||||
for libdir in ("lib64", "lib"):
|
_prepend_all(include_dirs, query.headers.directories)
|
||||||
|
except NoHeadersError:
|
||||||
|
tty.debug("No headers found for {0}".format(dep.name))
|
||||||
|
|
||||||
|
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||||
|
if dspec.external:
|
||||||
|
update_compiler_args_for_dep(dspec)
|
||||||
|
|
||||||
|
# Just above, we prepended entries for -L/-rpath for externals. We
|
||||||
|
# now do this for non-external packages so that Spack-built packages
|
||||||
|
# are searched first for libraries etc.
|
||||||
|
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||||
|
if not dspec.external:
|
||||||
|
update_compiler_args_for_dep(dspec)
|
||||||
|
|
||||||
|
# The top-level package is always RPATHed. It hasn't been installed yet
|
||||||
|
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
|
||||||
|
# not be created for the install).
|
||||||
|
for libdir in ["lib64", "lib"]:
|
||||||
lib_path = os.path.join(pkg.prefix, libdir)
|
lib_path = os.path.join(pkg.prefix, libdir)
|
||||||
rpath_dirs.insert(0, lib_path)
|
rpath_dirs.insert(0, lib_path)
|
||||||
|
|
||||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
|
||||||
pkg.compiler.default_dynamic_linker
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
|
||||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
|
||||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
|
||||||
|
|
||||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
|
||||||
# just this filter.
|
|
||||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
|
||||||
if implicit_rpaths:
|
|
||||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
|
||||||
|
|
||||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||||
@@ -616,12 +557,14 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
"""
|
"""
|
||||||
module = ModuleChangePropagator(pkg)
|
module = ModuleChangePropagator(pkg)
|
||||||
|
|
||||||
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
|
|
||||||
module.make_jobs = jobs
|
|
||||||
if context == Context.BUILD:
|
if context == Context.BUILD:
|
||||||
|
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||||
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
|
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||||
|
module.make_jobs = jobs
|
||||||
|
|
||||||
# TODO: make these build deps that can be installed if not found.
|
# TODO: make these build deps that can be installed if not found.
|
||||||
module.make = MakeExecutable("make", jobs)
|
module.make = MakeExecutable("make", jobs)
|
||||||
module.gmake = MakeExecutable("gmake", jobs)
|
module.gmake = MakeExecutable("gmake", jobs)
|
||||||
@@ -789,6 +732,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpaths(pkg):
|
||||||
|
"""Get a list of all the rpaths for a package."""
|
||||||
|
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
|
||||||
|
deps = get_rpath_deps(pkg)
|
||||||
|
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||||
|
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
|
||||||
|
# Second module is our compiler mod name. We use that to get rpaths from
|
||||||
|
# module show output.
|
||||||
|
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||||
|
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||||
|
if mod_rpath:
|
||||||
|
rpaths.append(mod_rpath)
|
||||||
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(pkg):
|
def load_external_modules(pkg):
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
@@ -830,6 +788,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
# Platform specific setup goes before package specific setup. This is for setting
|
# Platform specific setup goes before package specific setup. This is for setting
|
||||||
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
|
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
|
||||||
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
|
||||||
|
target = platform.target(pkg.spec.architecture.target)
|
||||||
platform.setup_platform_environment(pkg, env_mods)
|
platform.setup_platform_environment(pkg, env_mods)
|
||||||
|
|
||||||
tty.debug("setup_package: grabbing modifications from dependencies")
|
tty.debug("setup_package: grabbing modifications from dependencies")
|
||||||
@@ -854,8 +813,15 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
|
if target and target.module_name:
|
||||||
|
load_module(target.module_name)
|
||||||
|
|
||||||
load_external_modules(pkg)
|
load_external_modules(pkg)
|
||||||
|
|
||||||
|
implicit_rpaths = pkg.compiler.implicit_rpaths()
|
||||||
|
if implicit_rpaths:
|
||||||
|
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
env_mods.apply_modifications()
|
env_mods.apply_modifications()
|
||||||
@@ -882,9 +848,6 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
|||||||
elif context == Context.RUN:
|
elif context == Context.RUN:
|
||||||
self.root_depflag = dt.RUN | dt.LINK
|
self.root_depflag = dt.RUN | dt.LINK
|
||||||
|
|
||||||
def accept(self, item):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def neighbors(self, item):
|
def neighbors(self, item):
|
||||||
spec = item.edge.spec
|
spec = item.edge.spec
|
||||||
if spec.dag_hash() in self.root_hashes:
|
if spec.dag_hash() in self.root_hashes:
|
||||||
@@ -922,19 +885,19 @@ def effective_deptypes(
|
|||||||
a flag specifying in what way they do so. The list is ordered topologically
|
a flag specifying in what way they do so. The list is ordered topologically
|
||||||
from root to leaf, meaning that environment modifications should be applied
|
from root to leaf, meaning that environment modifications should be applied
|
||||||
in reverse so that dependents override dependencies, not the other way around."""
|
in reverse so that dependents override dependencies, not the other way around."""
|
||||||
topo_sorted_edges = traverse.traverse_topo_edges_generator(
|
visitor = traverse.TopoVisitor(
|
||||||
traverse.with_artificial_edges(specs),
|
EnvironmentVisitor(*specs, context=context),
|
||||||
visitor=EnvironmentVisitor(*specs, context=context),
|
key=lambda x: x.dag_hash(),
|
||||||
key=traverse.by_dag_hash,
|
|
||||||
root=True,
|
root=True,
|
||||||
all_edges=True,
|
all_edges=True,
|
||||||
)
|
)
|
||||||
|
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
|
||||||
|
|
||||||
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
|
||||||
use_modes = defaultdict(lambda: UseMode(0))
|
use_modes = defaultdict(lambda: UseMode(0))
|
||||||
nodes_with_type = []
|
nodes_with_type = []
|
||||||
|
|
||||||
for edge in topo_sorted_edges:
|
for edge in visitor.edges:
|
||||||
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
parent, child, depflag = edge.parent, edge.spec, edge.depflag
|
||||||
|
|
||||||
# Mark the starting point
|
# Mark the starting point
|
||||||
@@ -1048,12 +1011,6 @@ def set_all_package_py_globals(self):
|
|||||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||||
set_package_py_globals(pkg, context=Context.RUN)
|
set_package_py_globals(pkg, context=Context.RUN)
|
||||||
|
|
||||||
# Looping over the set of packages a second time
|
|
||||||
# ensures all globals are loaded into the module space prior to
|
|
||||||
# any package setup. This guarantees package setup methods have
|
|
||||||
# access to expected module level definitions such as "spack_cc"
|
|
||||||
for dspec, flag in chain(self.external, self.nonexternal):
|
|
||||||
pkg = dspec.package
|
|
||||||
for spec in dspec.dependents():
|
for spec in dspec.dependents():
|
||||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||||
# setting globals for those.
|
# setting globals for those.
|
||||||
@@ -1063,15 +1020,6 @@ def set_all_package_py_globals(self):
|
|||||||
pkg.setup_dependent_package(dependent_module, spec)
|
pkg.setup_dependent_package(dependent_module, spec)
|
||||||
dependent_module.propagate_changes_to_mro()
|
dependent_module.propagate_changes_to_mro()
|
||||||
|
|
||||||
if self.context == Context.BUILD:
|
|
||||||
pkg = self.specs[0].package
|
|
||||||
module = ModuleChangePropagator(pkg)
|
|
||||||
# std_cmake_args is not sufficiently static to be defined
|
|
||||||
# in set_package_py_globals and is deprecated so its handled
|
|
||||||
# here as a special case
|
|
||||||
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
|
||||||
module.propagate_changes_to_mro()
|
|
||||||
|
|
||||||
def get_env_modifications(self) -> EnvironmentModifications:
|
def get_env_modifications(self) -> EnvironmentModifications:
|
||||||
"""Returns the environment variable modifications for the given input specs and context.
|
"""Returns the environment variable modifications for the given input specs and context.
|
||||||
Environment modifications include:
|
Environment modifications include:
|
||||||
@@ -1141,61 +1089,45 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cmake_prefix_path(pkg):
|
||||||
|
# Note that unlike modifications_from_dependencies, this does not include
|
||||||
|
# any edits to CMAKE_PREFIX_PATH defined in custom
|
||||||
|
# setup_dependent_build_environment implementations of dependency packages
|
||||||
|
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
|
||||||
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
|
build_link_deps = build_deps | link_deps
|
||||||
|
spack_built = []
|
||||||
|
externals = []
|
||||||
|
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
|
||||||
|
# prepending all externals and then all non-externals
|
||||||
|
for dspec in pkg.spec.traverse(root=False, order="post"):
|
||||||
|
if dspec in build_link_deps:
|
||||||
|
if dspec.external:
|
||||||
|
externals.insert(0, dspec)
|
||||||
|
else:
|
||||||
|
spack_built.insert(0, dspec)
|
||||||
|
|
||||||
|
ordered_build_link_deps = spack_built + externals
|
||||||
|
cmake_prefix_path_entries = []
|
||||||
|
for spec in ordered_build_link_deps:
|
||||||
|
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
|
||||||
|
|
||||||
|
return filter_system_paths(cmake_prefix_path_entries)
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||||
function: Callable,
|
|
||||||
kwargs: Dict,
|
|
||||||
write_pipe: Connection,
|
|
||||||
input_pipe: Optional[Connection],
|
|
||||||
jsfd1: Optional[Connection],
|
|
||||||
jsfd2: Optional[Connection],
|
|
||||||
):
|
):
|
||||||
"""Main entry point in the child process for Spack builds.
|
|
||||||
|
|
||||||
``_setup_pkg_and_run`` is called by the child process created in
|
|
||||||
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
|
||||||
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
|
||||||
|
|
||||||
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
|
||||||
the following:
|
|
||||||
|
|
||||||
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
|
||||||
particular build phase.
|
|
||||||
|
|
||||||
* ``BaseException``: any exception raised by a child build process, which will be
|
|
||||||
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
|
||||||
raised in the parent.
|
|
||||||
|
|
||||||
* The return value of ``function()``, which can be anything (except an exception).
|
|
||||||
This is returned to the caller.
|
|
||||||
|
|
||||||
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
|
||||||
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
|
||||||
them automatically in the child if they are not passed at process creation time.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
serialized_pkg: Spack package install context object (serialized form of the
|
|
||||||
package that we'll build in the child process).
|
|
||||||
function: function to call in the child process; serialized_pkg is passed to
|
|
||||||
this as the first argument.
|
|
||||||
kwargs: additional keyword arguments to pass to ``function()``.
|
|
||||||
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
|
||||||
child *must* send a result (or an error) back to parent on.
|
|
||||||
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
|
||||||
jsfd1: gmake Jobserver file descriptor 1.
|
|
||||||
jsfd2: gmake Jobserver file descriptor 2.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
context: str = kwargs.get("context", "build")
|
context: str = kwargs.get("context", "build")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
# We are in the child process. Python sets sys.stdin to
|
||||||
# process and its parent from simultaneously reading from the original stdin. But, we
|
# open(os.devnull) to prevent our process and its parent from
|
||||||
# assume that the parent process is not going to read from it till we are done with the
|
# simultaneously reading from the original stdin. But, we assume
|
||||||
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
# that the parent process is not going to read from it till we
|
||||||
if input_pipe is not None:
|
# are done with the child, so we undo Python's precaution.
|
||||||
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
if input_multiprocess_fd is not None:
|
||||||
|
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
||||||
|
|
||||||
pkg = serialized_pkg.restore()
|
pkg = serialized_pkg.restore()
|
||||||
|
|
||||||
@@ -1207,18 +1139,17 @@ def _setup_pkg_and_run(
|
|||||||
return_value = function(pkg, kwargs)
|
return_value = function(pkg, kwargs)
|
||||||
write_pipe.send(return_value)
|
write_pipe.send(return_value)
|
||||||
|
|
||||||
except spack.error.StopPhase as e:
|
except StopPhase as e:
|
||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
write_pipe.send(e)
|
write_pipe.send(e)
|
||||||
except BaseException as e:
|
except BaseException:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
|
exc_type, exc, tb = sys.exc_info()
|
||||||
|
|
||||||
# Need to unwind the traceback in the child because traceback
|
# Need to unwind the traceback in the child because traceback
|
||||||
# objects can't be sent to the parent.
|
# objects can't be sent to the parent.
|
||||||
exc_type = type(e)
|
tb_string = traceback.format_exc()
|
||||||
tb = e.__traceback__
|
|
||||||
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
|
||||||
|
|
||||||
# build up some context from the offending package so we can
|
# build up some context from the offending package so we can
|
||||||
# show that, too.
|
# show that, too.
|
||||||
@@ -1235,8 +1166,8 @@ def _setup_pkg_and_run(
|
|||||||
elif context == "test":
|
elif context == "test":
|
||||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||||
|
|
||||||
error_msg = str(e)
|
error_msg = str(exc)
|
||||||
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
process = "test the installation" if context == "test" else "build from sources"
|
process = "test the installation" if context == "test" else "build from sources"
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The '{}' package cannot find an attribute while trying to {}. "
|
"The '{}' package cannot find an attribute while trying to {}. "
|
||||||
@@ -1246,7 +1177,7 @@ def _setup_pkg_and_run(
|
|||||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
).format(pkg.name, process, context)
|
).format(pkg.name, process, context)
|
||||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||||
|
|
||||||
# make a pickleable exception to send to parent.
|
# make a pickleable exception to send to parent.
|
||||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||||
@@ -1264,8 +1195,8 @@ def _setup_pkg_and_run(
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
write_pipe.close()
|
write_pipe.close()
|
||||||
if input_pipe is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_pipe.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
|
|
||||||
def start_build_process(pkg, function, kwargs):
|
def start_build_process(pkg, function, kwargs):
|
||||||
@@ -1292,9 +1223,23 @@ def child_fun():
|
|||||||
If something goes wrong, the child process catches the error and
|
If something goes wrong, the child process catches the error and
|
||||||
passes it to the parent wrapped in a ChildError. The parent is
|
passes it to the parent wrapped in a ChildError. The parent is
|
||||||
expected to handle (or re-raise) the ChildError.
|
expected to handle (or re-raise) the ChildError.
|
||||||
|
|
||||||
|
This uses `multiprocessing.Process` to create the child process. The
|
||||||
|
mechanism used to create the process differs on different operating
|
||||||
|
systems and for different versions of Python. In some cases "fork"
|
||||||
|
is used (i.e. the "fork" system call) and some cases it starts an
|
||||||
|
entirely new Python interpreter process (in the docs this is referred
|
||||||
|
to as the "spawn" start method). Breaking it down by OS:
|
||||||
|
|
||||||
|
- Linux always uses fork.
|
||||||
|
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
||||||
|
- Windows always uses the "spawn" start method.
|
||||||
|
|
||||||
|
For more information on `multiprocessing` child process creation
|
||||||
|
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
||||||
"""
|
"""
|
||||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
input_fd = None
|
input_multiprocess_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
|
|
||||||
@@ -1303,13 +1248,14 @@ def child_fun():
|
|||||||
try:
|
try:
|
||||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||||
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
input_fd = os.dup(sys.stdin.fileno())
|
||||||
|
input_multiprocess_fd = MultiProcessFd(input_fd)
|
||||||
mflags = os.environ.get("MAKEFLAGS", False)
|
mflags = os.environ.get("MAKEFLAGS", False)
|
||||||
if mflags:
|
if mflags:
|
||||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||||
if m:
|
if m:
|
||||||
jobserver_fd1 = Connection(int(m.group(1)))
|
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
||||||
jobserver_fd2 = Connection(int(m.group(2)))
|
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
||||||
|
|
||||||
p = multiprocessing.Process(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
@@ -1318,7 +1264,7 @@ def child_fun():
|
|||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
write_pipe,
|
write_pipe,
|
||||||
input_fd,
|
input_multiprocess_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
),
|
),
|
||||||
@@ -1338,8 +1284,8 @@ def child_fun():
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Close the input stream in the parent process
|
# Close the input stream in the parent process
|
||||||
if input_fd is not None:
|
if input_multiprocess_fd is not None:
|
||||||
input_fd.close()
|
input_multiprocess_fd.close()
|
||||||
|
|
||||||
def exitcode_msg(p):
|
def exitcode_msg(p):
|
||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
@@ -1354,7 +1300,7 @@ def exitcode_msg(p):
|
|||||||
p.join()
|
p.join()
|
||||||
|
|
||||||
# If returns a StopPhase, raise it
|
# If returns a StopPhase, raise it
|
||||||
if isinstance(child_result, spack.error.StopPhase):
|
if isinstance(child_result, StopPhase):
|
||||||
# do not print
|
# do not print
|
||||||
raise child_result
|
raise child_result
|
||||||
|
|
||||||
@@ -1377,7 +1323,7 @@ def exitcode_msg(p):
|
|||||||
return child_result
|
return child_result
|
||||||
|
|
||||||
|
|
||||||
CONTEXT_BASES = (spack.package_base.PackageBase, spack.builder.Builder)
|
CONTEXT_BASES = (spack.package_base.PackageBase, spack.build_systems._checks.BaseBuilder)
|
||||||
|
|
||||||
|
|
||||||
def get_package_context(traceback, context=3):
|
def get_package_context(traceback, context=3):
|
||||||
@@ -1426,20 +1372,27 @@ def make_stack(tb, stack=None):
|
|||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
# Point out the location in the install method where we failed.
|
# Point out the location in the install method where we failed.
|
||||||
filename = inspect.getfile(frame.f_code)
|
filename = inspect.getfile(frame.f_code)
|
||||||
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
|
lineno = frame.f_lineno
|
||||||
|
if os.path.basename(filename) == "package.py":
|
||||||
|
# subtract 1 because we inject a magic import at the top of package files.
|
||||||
|
# TODO: get rid of the magic import.
|
||||||
|
lineno -= 1
|
||||||
|
|
||||||
|
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||||
|
|
||||||
# Build a message showing context in the install method.
|
# Build a message showing context in the install method.
|
||||||
sourcelines, start = inspect.getsourcelines(frame)
|
sourcelines, start = inspect.getsourcelines(frame)
|
||||||
|
|
||||||
# Calculate lineno of the error relative to the start of the function.
|
# Calculate lineno of the error relative to the start of the function.
|
||||||
fun_lineno = frame.f_lineno - start
|
fun_lineno = lineno - start
|
||||||
start_ctx = max(0, fun_lineno - context)
|
start_ctx = max(0, fun_lineno - context)
|
||||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||||
|
|
||||||
for i, line in enumerate(sourcelines):
|
for i, line in enumerate(sourcelines):
|
||||||
is_error = start_ctx + i == fun_lineno
|
is_error = start_ctx + i == fun_lineno
|
||||||
|
mark = ">> " if is_error else " "
|
||||||
# Add start to get lineno relative to start of file, not function.
|
# Add start to get lineno relative to start of file, not function.
|
||||||
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
|
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
|
||||||
if is_error:
|
if is_error:
|
||||||
marked = colorize("@R{%s}" % cescape(marked))
|
marked = colorize("@R{%s}" % cescape(marked))
|
||||||
lines.append(marked)
|
lines.append(marked)
|
||||||
@@ -1556,6 +1509,17 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
|
|||||||
return ChildError(msg, module, name, traceback, log, log_type, context)
|
return ChildError(msg, module, name, traceback, log, log_type, context)
|
||||||
|
|
||||||
|
|
||||||
|
class StopPhase(spack.error.SpackError):
|
||||||
|
"""Pickle-able exception to control stopped builds."""
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
return _make_stop_phase, (self.message, self.long_message)
|
||||||
|
|
||||||
|
|
||||||
|
def _make_stop_phase(msg, long_msg):
|
||||||
|
return StopPhase(msg, long_msg)
|
||||||
|
|
||||||
|
|
||||||
def write_log_summary(out, log_type, log, last=None):
|
def write_log_summary(out, log_type, log, last=None):
|
||||||
errors, warnings = parse_log_events(log)
|
errors, warnings = parse_log_events(log)
|
||||||
nerr = len(errors)
|
nerr = len(errors)
|
||||||
|
|||||||
@@ -8,8 +8,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.error
|
import spack.installer
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -35,7 +34,7 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
if not predicate(abs_path):
|
if not predicate(abs_path):
|
||||||
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
||||||
msg = msg.format(pkg.name, filetype, path)
|
msg = msg.format(pkg.name, filetype, path)
|
||||||
raise spack.error.InstallError(msg)
|
raise spack.installer.InstallError(msg)
|
||||||
|
|
||||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||||
@@ -43,7 +42,7 @@ def check_paths(path_list, filetype, predicate):
|
|||||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||||
msg = "Install failed for {0}. Nothing was installed!"
|
msg = "Install failed for {0}. Nothing was installed!"
|
||||||
raise spack.error.InstallError(msg.format(pkg.name))
|
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||||
|
|
||||||
|
|
||||||
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
||||||
@@ -64,7 +63,7 @@ def apply_macos_rpath_fixups(builder: spack.builder.Builder):
|
|||||||
|
|
||||||
|
|
||||||
def ensure_build_dependencies_or_raise(
|
def ensure_build_dependencies_or_raise(
|
||||||
spec: spack.spec.Spec, dependencies: List[str], error_msg: str
|
spec: spack.spec.Spec, dependencies: List[spack.spec.Spec], error_msg: str
|
||||||
):
|
):
|
||||||
"""Ensure that some build dependencies are present in the concrete spec.
|
"""Ensure that some build dependencies are present in the concrete spec.
|
||||||
|
|
||||||
@@ -72,7 +71,7 @@ def ensure_build_dependencies_or_raise(
|
|||||||
|
|
||||||
Args:
|
Args:
|
||||||
spec: concrete spec to be checked.
|
spec: concrete spec to be checked.
|
||||||
dependencies: list of package names of required build dependencies
|
dependencies: list of abstract specs to be satisfied
|
||||||
error_msg: brief error message to be prepended to a longer description
|
error_msg: brief error message to be prepended to a longer description
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
@@ -128,8 +127,8 @@ def execute_install_time_tests(builder: spack.builder.Builder):
|
|||||||
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
builder.pkg.tester.phase_tests(builder, "install", builder.install_time_test_callbacks)
|
||||||
|
|
||||||
|
|
||||||
class BuilderWithDefaults(spack.builder.Builder):
|
class BaseBuilder(spack.builder.Builder):
|
||||||
"""Base class for all specific builders with common callbacks registered."""
|
"""Base class for builders to register common checks"""
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
# Check that self.prefix is there after installation
|
||||||
spack.phase_callbacks.run_after("install")(sanity_check_prefix)
|
spack.builder.run_after("install")(sanity_check_prefix)
|
||||||
|
|||||||
@@ -2,11 +2,10 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.directives
|
import spack.directives
|
||||||
|
import spack.package_base
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
@@ -47,12 +46,18 @@ class AspellDictPackage(AutotoolsPackage):
|
|||||||
#: Override the default autotools builder
|
#: Override the default autotools builder
|
||||||
AutotoolsBuilder = AspellBuilder
|
AutotoolsBuilder = AspellBuilder
|
||||||
|
|
||||||
def patch(self):
|
def view_destination(self, view):
|
||||||
aspell_spec = self.spec["aspell"]
|
aspell_spec = self.spec["aspell"]
|
||||||
|
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
|
||||||
|
raise spack.package_base.ExtensionError(
|
||||||
|
"aspell does not support non-global extensions"
|
||||||
|
)
|
||||||
aspell = aspell_spec.command
|
aspell = aspell_spec.command
|
||||||
dictdir = aspell("dump", "config", "dict-dir", output=str).strip()
|
return aspell("dump", "config", "dict-dir", output=str).strip()
|
||||||
datadir = aspell("dump", "config", "data-dir", output=str).strip()
|
|
||||||
dictdir = os.path.relpath(dictdir, aspell_spec.prefix)
|
def view_source(self):
|
||||||
datadir = os.path.relpath(datadir, aspell_spec.prefix)
|
return self.prefix.lib
|
||||||
fs.filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
|
|
||||||
fs.filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure")
|
def patch(self):
|
||||||
|
fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
|
||||||
|
fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
|
||||||
|
|||||||
@@ -6,18 +6,14 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import Callable, List, Optional, Set, Tuple, Union
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.error
|
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, conflicts, depends_on
|
from spack.directives import build_system, conflicts, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.operating_systems.mac_os import macos_version
|
from spack.operating_systems.mac_os import macos_version
|
||||||
@@ -25,7 +21,7 @@
|
|||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
from ._checks import (
|
from ._checks import (
|
||||||
BuilderWithDefaults,
|
BaseBuilder,
|
||||||
apply_macos_rpath_fixups,
|
apply_macos_rpath_fixups,
|
||||||
ensure_build_dependencies_or_raise,
|
ensure_build_dependencies_or_raise,
|
||||||
execute_build_time_tests,
|
execute_build_time_tests,
|
||||||
@@ -72,14 +68,14 @@ def flags_to_build_system_args(self, flags):
|
|||||||
# Legacy methods (used by too many packages to change them,
|
# Legacy methods (used by too many packages to change them,
|
||||||
# need to forward to the builder)
|
# need to forward to the builder)
|
||||||
def enable_or_disable(self, *args, **kwargs):
|
def enable_or_disable(self, *args, **kwargs):
|
||||||
return spack.builder.create(self).enable_or_disable(*args, **kwargs)
|
return self.builder.enable_or_disable(*args, **kwargs)
|
||||||
|
|
||||||
def with_or_without(self, *args, **kwargs):
|
def with_or_without(self, *args, **kwargs):
|
||||||
return spack.builder.create(self).with_or_without(*args, **kwargs)
|
return self.builder.with_or_without(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("autotools")
|
@spack.builder.builder("autotools")
|
||||||
class AutotoolsBuilder(BuilderWithDefaults):
|
class AutotoolsBuilder(BaseBuilder):
|
||||||
"""The autotools builder encodes the default way of installing software built
|
"""The autotools builder encodes the default way of installing software built
|
||||||
with autotools. It has four phases that can be overridden, if need be:
|
with autotools. It has four phases that can be overridden, if need be:
|
||||||
|
|
||||||
@@ -160,7 +156,7 @@ class AutotoolsBuilder(BuilderWithDefaults):
|
|||||||
install_libtool_archives = False
|
install_libtool_archives = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def patch_config_files(self) -> bool:
|
def patch_config_files(self):
|
||||||
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||||
distributed with the tarball.
|
distributed with the tarball.
|
||||||
|
|
||||||
@@ -180,7 +176,7 @@ def patch_config_files(self) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _removed_la_files_log(self) -> str:
|
def _removed_la_files_log(self):
|
||||||
"""File containing the list of removed libtool archives"""
|
"""File containing the list of removed libtool archives"""
|
||||||
build_dir = self.build_directory
|
build_dir = self.build_directory
|
||||||
if not os.path.isabs(self.build_directory):
|
if not os.path.isabs(self.build_directory):
|
||||||
@@ -188,15 +184,15 @@ def _removed_la_files_log(self) -> str:
|
|||||||
return os.path.join(build_dir, "removed_la_files.txt")
|
return os.path.join(build_dir, "removed_la_files.txt")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self) -> List[str]:
|
def archive_files(self):
|
||||||
"""Files to archive for packages based on autotools"""
|
"""Files to archive for packages based on autotools"""
|
||||||
files = [os.path.join(self.build_directory, "config.log")]
|
files = [os.path.join(self.build_directory, "config.log")]
|
||||||
if not self.install_libtool_archives:
|
if not self.install_libtool_archives:
|
||||||
files.append(self._removed_la_files_log)
|
files.append(self._removed_la_files_log)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("autoreconf")
|
@spack.builder.run_after("autoreconf")
|
||||||
def _do_patch_config_files(self) -> None:
|
def _do_patch_config_files(self):
|
||||||
"""Some packages ship with older config.guess/config.sub files and need to
|
"""Some packages ship with older config.guess/config.sub files and need to
|
||||||
have these updated when installed on a newer architecture.
|
have these updated when installed on a newer architecture.
|
||||||
|
|
||||||
@@ -252,7 +248,7 @@ def runs_ok(script_abs_path):
|
|||||||
|
|
||||||
# An external gnuconfig may not not have a prefix.
|
# An external gnuconfig may not not have a prefix.
|
||||||
if gnuconfig_dir is None:
|
if gnuconfig_dir is None:
|
||||||
raise spack.error.InstallError(
|
raise spack.build_environment.InstallError(
|
||||||
"Spack could not find substitutes for GNU config files because no "
|
"Spack could not find substitutes for GNU config files because no "
|
||||||
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||||
"prefix path instead of modules for external `gnuconfig`."
|
"prefix path instead of modules for external `gnuconfig`."
|
||||||
@@ -272,7 +268,7 @@ def runs_ok(script_abs_path):
|
|||||||
msg += (
|
msg += (
|
||||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||||
)
|
)
|
||||||
raise spack.error.InstallError(msg)
|
raise spack.build_environment.InstallError(msg)
|
||||||
|
|
||||||
# Filter working substitutes
|
# Filter working substitutes
|
||||||
candidates = [f for f in candidates if runs_ok(f)]
|
candidates = [f for f in candidates if runs_ok(f)]
|
||||||
@@ -297,7 +293,9 @@ def runs_ok(script_abs_path):
|
|||||||
and set the prefix to the directory containing the `config.guess` and
|
and set the prefix to the directory containing the `config.guess` and
|
||||||
`config.sub` files.
|
`config.sub` files.
|
||||||
"""
|
"""
|
||||||
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.pkg.name))
|
raise spack.build_environment.InstallError(
|
||||||
|
msg.format(", ".join(to_be_found), self.name)
|
||||||
|
)
|
||||||
|
|
||||||
# Copy the good files over the bad ones
|
# Copy the good files over the bad ones
|
||||||
for abs_path in to_be_patched:
|
for abs_path in to_be_patched:
|
||||||
@@ -307,8 +305,8 @@ def runs_ok(script_abs_path):
|
|||||||
fs.copy(substitutes[name], abs_path)
|
fs.copy(substitutes[name], abs_path)
|
||||||
os.chmod(abs_path, mode)
|
os.chmod(abs_path, mode)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _patch_usr_bin_file(self) -> None:
|
def _patch_usr_bin_file(self):
|
||||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||||
scripts to use file from path."""
|
scripts to use file from path."""
|
||||||
|
|
||||||
@@ -319,8 +317,8 @@ def _patch_usr_bin_file(self) -> None:
|
|||||||
with fs.keep_modification_time(*x.filenames):
|
with fs.keep_modification_time(*x.filenames):
|
||||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _set_autotools_environment_variables(self) -> None:
|
def _set_autotools_environment_variables(self):
|
||||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||||
|
|
||||||
@@ -333,8 +331,8 @@ def _set_autotools_environment_variables(self) -> None:
|
|||||||
"""
|
"""
|
||||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _do_patch_libtool_configure(self) -> None:
|
def _do_patch_libtool_configure(self):
|
||||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||||
further into "libtool". Note that patches that can be fixed by patching
|
further into "libtool". Note that patches that can be fixed by patching
|
||||||
"libtool" directly should be implemented in the _do_patch_libtool method
|
"libtool" directly should be implemented in the _do_patch_libtool method
|
||||||
@@ -361,8 +359,8 @@ def _do_patch_libtool_configure(self) -> None:
|
|||||||
# Support Libtool 2.4.2 and older:
|
# Support Libtool 2.4.2 and older:
|
||||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("configure")
|
@spack.builder.run_after("configure")
|
||||||
def _do_patch_libtool(self) -> None:
|
def _do_patch_libtool(self):
|
||||||
"""If configure generates a "libtool" script that does not correctly
|
"""If configure generates a "libtool" script that does not correctly
|
||||||
detect the compiler (and patch_libtool is set), patch in the correct
|
detect the compiler (and patch_libtool is set), patch in the correct
|
||||||
values for libtool variables.
|
values for libtool variables.
|
||||||
@@ -510,64 +508,27 @@ def _do_patch_libtool(self) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def configure_directory(self) -> str:
|
def configure_directory(self):
|
||||||
"""Return the directory where 'configure' resides."""
|
"""Return the directory where 'configure' resides."""
|
||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def configure_abs_path(self) -> str:
|
def configure_abs_path(self):
|
||||||
# Absolute path to configure
|
# Absolute path to configure
|
||||||
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
|
||||||
return configure_abs_path
|
return configure_abs_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self) -> str:
|
def build_directory(self):
|
||||||
"""Override to provide another place to build the package"""
|
"""Override to provide another place to build the package"""
|
||||||
return self.configure_directory
|
return self.configure_directory
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.builder.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self) -> None:
|
def delete_configure_to_force_update(self):
|
||||||
if self.force_autoreconf:
|
if self.force_autoreconf:
|
||||||
fs.force_remove(self.configure_abs_path)
|
fs.force_remove(self.configure_abs_path)
|
||||||
|
|
||||||
@property
|
def autoreconf(self, pkg, spec, prefix):
|
||||||
def autoreconf_search_path_args(self) -> List[str]:
|
|
||||||
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
|
||||||
of build deps, skips the default path of automake, move external include
|
|
||||||
flags to the back, since they might pull in unrelated m4 files shadowing
|
|
||||||
spack dependencies."""
|
|
||||||
return _autoreconf_search_path_args(self.spec)
|
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("autoreconf")
|
|
||||||
def set_configure_or_die(self) -> None:
|
|
||||||
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
|
||||||
is found, a module level attribute is set.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
RuntimeError: if the "configure" script is not found
|
|
||||||
"""
|
|
||||||
# Check if the "configure" script is there. If not raise a RuntimeError.
|
|
||||||
if not os.path.exists(self.configure_abs_path):
|
|
||||||
msg = "configure script not found in {0}"
|
|
||||||
raise RuntimeError(msg.format(self.configure_directory))
|
|
||||||
|
|
||||||
# Monkey-patch the configure script in the corresponding module
|
|
||||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
|
||||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
|
||||||
globals_for_pkg.propagate_changes_to_mro()
|
|
||||||
|
|
||||||
def configure_args(self) -> List[str]:
|
|
||||||
"""Return the list of all the arguments that must be passed to configure,
|
|
||||||
except ``--prefix`` which will be pre-pended to the list.
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def autoreconf(
|
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Not needed usually, configure should be already there"""
|
"""Not needed usually, configure should be already there"""
|
||||||
|
|
||||||
# If configure exists nothing needs to be done
|
# If configure exists nothing needs to be done
|
||||||
@@ -594,12 +555,39 @@ def autoreconf(
|
|||||||
autoreconf_args += self.autoreconf_extra_args
|
autoreconf_args += self.autoreconf_extra_args
|
||||||
self.pkg.module.autoreconf(*autoreconf_args)
|
self.pkg.module.autoreconf(*autoreconf_args)
|
||||||
|
|
||||||
def configure(
|
@property
|
||||||
self,
|
def autoreconf_search_path_args(self):
|
||||||
pkg: spack.package_base.PackageBase,
|
"""Search path includes for autoreconf. Add an -I flag for all `aclocal` dirs
|
||||||
spec: spack.spec.Spec,
|
of build deps, skips the default path of automake, move external include
|
||||||
prefix: spack.util.prefix.Prefix,
|
flags to the back, since they might pull in unrelated m4 files shadowing
|
||||||
) -> None:
|
spack dependencies."""
|
||||||
|
return _autoreconf_search_path_args(self.spec)
|
||||||
|
|
||||||
|
@spack.builder.run_after("autoreconf")
|
||||||
|
def set_configure_or_die(self):
|
||||||
|
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||||
|
is found, a module level attribute is set.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: if the "configure" script is not found
|
||||||
|
"""
|
||||||
|
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||||
|
if not os.path.exists(self.configure_abs_path):
|
||||||
|
msg = "configure script not found in {0}"
|
||||||
|
raise RuntimeError(msg.format(self.configure_directory))
|
||||||
|
|
||||||
|
# Monkey-patch the configure script in the corresponding module
|
||||||
|
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||||
|
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||||
|
globals_for_pkg.propagate_changes_to_mro()
|
||||||
|
|
||||||
|
def configure_args(self):
|
||||||
|
"""Return the list of all the arguments that must be passed to configure,
|
||||||
|
except ``--prefix`` which will be pre-pended to the list.
|
||||||
|
"""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Run "configure", with the arguments specified by the builder and an
|
"""Run "configure", with the arguments specified by the builder and an
|
||||||
appropriately set prefix.
|
appropriately set prefix.
|
||||||
"""
|
"""
|
||||||
@@ -610,12 +598,7 @@ def configure(
|
|||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
pkg.module.configure(*options)
|
pkg.module.configure(*options)
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
# See https://autotools.io/automake/silent.html
|
# See https://autotools.io/automake/silent.html
|
||||||
params = ["V=1"]
|
params = ["V=1"]
|
||||||
@@ -623,49 +606,41 @@ def build(
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*params)
|
pkg.module.make(*params)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.install_targets)
|
pkg.module.make(*self.install_targets)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self) -> None:
|
def check(self):
|
||||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self.pkg._if_make_target_execute("test")
|
self.pkg._if_make_target_execute("test")
|
||||||
self.pkg._if_make_target_execute("check")
|
self.pkg._if_make_target_execute("check")
|
||||||
|
|
||||||
def _activate_or_not(
|
def _activate_or_not(
|
||||||
self,
|
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||||
name: str,
|
):
|
||||||
activation_word: str,
|
|
||||||
deactivation_word: str,
|
|
||||||
activation_value: Optional[Union[Callable, str]] = None,
|
|
||||||
variant=None,
|
|
||||||
) -> List[str]:
|
|
||||||
"""This function contain the current implementation details of
|
"""This function contain the current implementation details of
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name: name of the option that is being activated or not
|
name (str): name of the option that is being activated or not
|
||||||
activation_word: the default activation word ('with' in the case of
|
activation_word (str): the default activation word ('with' in the
|
||||||
``with_or_without``)
|
case of ``with_or_without``)
|
||||||
deactivation_word: the default deactivation word ('without' in the case of
|
deactivation_word (str): the default deactivation word ('without'
|
||||||
``with_or_without``)
|
in the case of ``with_or_without``)
|
||||||
activation_value: callable that accepts a single value. This value is either one of the
|
activation_value (typing.Callable): callable that accepts a single
|
||||||
allowed values for a multi-valued variant or the name of a bool-valued variant.
|
value. This value is either one of the allowed values for a
|
||||||
|
multi-valued variant or the name of a bool-valued variant.
|
||||||
Returns the parameter to be used when the value is activated.
|
Returns the parameter to be used when the value is activated.
|
||||||
|
|
||||||
The special value "prefix" can also be assigned and will return
|
The special value 'prefix' can also be assigned and will return
|
||||||
``spec[name].prefix`` as activation parameter.
|
``spec[name].prefix`` as activation parameter.
|
||||||
variant: name of the variant that is being processed (if different from option name)
|
variant (str): name of the variant that is being processed
|
||||||
|
(if different from option name)
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
@@ -673,19 +648,19 @@ def _activate_or_not(
|
|||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
variant("foo", values=("x", "y"), description=")
|
variant('foo', values=('x', 'y'), description='')
|
||||||
variant("bar", default=True, description=")
|
variant('bar', default=True, description='')
|
||||||
variant("ba_z", default=True, description=")
|
variant('ba_z', default=True, description='')
|
||||||
|
|
||||||
calling this function like:
|
calling this function like:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
_activate_or_not(
|
_activate_or_not(
|
||||||
"foo", "with", "without", activation_value="prefix"
|
'foo', 'with', 'without', activation_value='prefix'
|
||||||
)
|
)
|
||||||
_activate_or_not("bar", "with", "without")
|
_activate_or_not('bar', 'with', 'without')
|
||||||
_activate_or_not("ba-z", "with", "without", variant="ba_z")
|
_activate_or_not('ba-z', 'with', 'without', variant='ba_z')
|
||||||
|
|
||||||
will generate the following configuration options:
|
will generate the following configuration options:
|
||||||
|
|
||||||
@@ -705,16 +680,17 @@ def _activate_or_not(
|
|||||||
Raises:
|
Raises:
|
||||||
KeyError: if name is not among known variants
|
KeyError: if name is not among known variants
|
||||||
"""
|
"""
|
||||||
spec: spack.spec.Spec = self.pkg.spec
|
spec = self.pkg.spec
|
||||||
args: List[str] = []
|
args = []
|
||||||
|
|
||||||
if activation_value == "prefix":
|
if activation_value == "prefix":
|
||||||
activation_value = lambda x: spec[x].prefix
|
activation_value = lambda x: spec[x].prefix
|
||||||
|
|
||||||
variant = variant or name
|
variant = variant or name
|
||||||
|
|
||||||
# Defensively look that the name passed as argument is among variants
|
# Defensively look that the name passed as argument is among
|
||||||
if not self.pkg.has_variant(variant):
|
# variants
|
||||||
|
if variant not in self.pkg.variants:
|
||||||
msg = '"{0}" is not a variant of "{1}"'
|
msg = '"{0}" is not a variant of "{1}"'
|
||||||
raise KeyError(msg.format(variant, self.pkg.name))
|
raise KeyError(msg.format(variant, self.pkg.name))
|
||||||
|
|
||||||
@@ -723,24 +699,34 @@ def _activate_or_not(
|
|||||||
|
|
||||||
# Create a list of pairs. Each pair includes a configuration
|
# Create a list of pairs. Each pair includes a configuration
|
||||||
# option and whether or not that option is activated
|
# option and whether or not that option is activated
|
||||||
vdef = self.pkg.get_variant(variant)
|
variant_desc, _ = self.pkg.variants[variant]
|
||||||
if set(vdef.values) == set((True, False)): # type: ignore
|
if set(variant_desc.values) == set((True, False)):
|
||||||
# BoolValuedVariant carry information about a single option.
|
# BoolValuedVariant carry information about a single option.
|
||||||
# Nonetheless, for uniformity of treatment we'll package them
|
# Nonetheless, for uniformity of treatment we'll package them
|
||||||
# in an iterable of one element.
|
# in an iterable of one element.
|
||||||
options = [(name, f"+{variant}" in spec)]
|
condition = "+{name}".format(name=variant)
|
||||||
|
options = [(name, condition in spec)]
|
||||||
else:
|
else:
|
||||||
|
condition = "{variant}={value}"
|
||||||
# "feature_values" is used to track values which correspond to
|
# "feature_values" is used to track values which correspond to
|
||||||
# features which can be enabled or disabled as understood by the
|
# features which can be enabled or disabled as understood by the
|
||||||
# package's build system. It excludes values which have special
|
# package's build system. It excludes values which have special
|
||||||
# meanings and do not correspond to features (e.g. "none")
|
# meanings and do not correspond to features (e.g. "none")
|
||||||
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values
|
feature_values = (
|
||||||
options = [(v, f"{variant}={v}" in spec) for v in feature_values] # type: ignore
|
getattr(variant_desc.values, "feature_values", None) or variant_desc.values
|
||||||
|
)
|
||||||
|
|
||||||
|
options = [
|
||||||
|
(value, condition.format(variant=variant, value=value) in spec)
|
||||||
|
for value in feature_values
|
||||||
|
]
|
||||||
|
|
||||||
# For each allowed value in the list of values
|
# For each allowed value in the list of values
|
||||||
for option_value, activated in options:
|
for option_value, activated in options:
|
||||||
# Search for an override in the package for this value
|
# Search for an override in the package for this value
|
||||||
override_name = f"{activation_word}_or_{deactivation_word}_{option_value}"
|
override_name = "{0}_or_{1}_{2}".format(
|
||||||
|
activation_word, deactivation_word, option_value
|
||||||
|
)
|
||||||
line_generator = getattr(self, override_name, None) or getattr(
|
line_generator = getattr(self, override_name, None) or getattr(
|
||||||
self.pkg, override_name, None
|
self.pkg, override_name, None
|
||||||
)
|
)
|
||||||
@@ -749,24 +735,19 @@ def _activate_or_not(
|
|||||||
|
|
||||||
def _default_generator(is_activated):
|
def _default_generator(is_activated):
|
||||||
if is_activated:
|
if is_activated:
|
||||||
line = f"--{activation_word}-{option_value}"
|
line = "--{0}-{1}".format(activation_word, option_value)
|
||||||
if activation_value is not None and activation_value(
|
if activation_value is not None and activation_value(
|
||||||
option_value
|
option_value
|
||||||
): # NOQA=ignore=E501
|
): # NOQA=ignore=E501
|
||||||
line = f"{line}={activation_value(option_value)}"
|
line += "={0}".format(activation_value(option_value))
|
||||||
return line
|
return line
|
||||||
return f"--{deactivation_word}-{option_value}"
|
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||||
|
|
||||||
line_generator = _default_generator
|
line_generator = _default_generator
|
||||||
args.append(line_generator(activated))
|
args.append(line_generator(activated))
|
||||||
return args
|
return args
|
||||||
|
|
||||||
def with_or_without(
|
def with_or_without(self, name, activation_value=None, variant=None):
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
activation_value: Optional[Union[Callable, str]] = None,
|
|
||||||
variant: Optional[str] = None,
|
|
||||||
) -> List[str]:
|
|
||||||
"""Inspects a variant and returns the arguments that activate
|
"""Inspects a variant and returns the arguments that activate
|
||||||
or deactivate the selected feature(s) for the configure options.
|
or deactivate the selected feature(s) for the configure options.
|
||||||
|
|
||||||
@@ -781,11 +762,12 @@ def with_or_without(
|
|||||||
``variant=value`` is in the spec.
|
``variant=value`` is in the spec.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name: name of a valid multi-valued variant
|
name (str): name of a valid multi-valued variant
|
||||||
activation_value: callable that accepts a single value and returns the parameter to be
|
activation_value (typing.Callable): callable that accepts a single
|
||||||
used leading to an entry of the type ``--with-{name}={parameter}``.
|
value and returns the parameter to be used leading to an entry
|
||||||
|
of the type ``--with-{name}={parameter}``.
|
||||||
|
|
||||||
The special value "prefix" can also be assigned and will return
|
The special value 'prefix' can also be assigned and will return
|
||||||
``spec[name].prefix`` as activation parameter.
|
``spec[name].prefix`` as activation parameter.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -793,22 +775,18 @@ def with_or_without(
|
|||||||
"""
|
"""
|
||||||
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
return self._activate_or_not(name, "with", "without", activation_value, variant)
|
||||||
|
|
||||||
def enable_or_disable(
|
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||||
self,
|
|
||||||
name: str,
|
|
||||||
activation_value: Optional[Union[Callable, str]] = None,
|
|
||||||
variant: Optional[str] = None,
|
|
||||||
) -> List[str]:
|
|
||||||
"""Same as
|
"""Same as
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name: name of a valid multi-valued variant
|
name (str): name of a valid multi-valued variant
|
||||||
activation_value: if present accepts a single value and returns the parameter to be
|
activation_value (typing.Callable): if present accepts a single value
|
||||||
used leading to an entry of the type ``--enable-{name}={parameter}``
|
and returns the parameter to be used leading to an entry of the
|
||||||
|
type ``--enable-{name}={parameter}``
|
||||||
|
|
||||||
The special value "prefix" can also be assigned and will return
|
The special value 'prefix' can also be assigned and will return
|
||||||
``spec[name].prefix`` as activation parameter.
|
``spec[name].prefix`` as activation parameter.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -816,15 +794,15 @@ def enable_or_disable(
|
|||||||
"""
|
"""
|
||||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def installcheck(self) -> None:
|
def installcheck(self):
|
||||||
"""Run "make" on the ``installcheck`` target, if found."""
|
"""Run "make" on the ``installcheck`` target, if found."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self.pkg._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def remove_libtool_archives(self) -> None:
|
def remove_libtool_archives(self):
|
||||||
"""Remove all .la files in prefix sub-folders if the package sets
|
"""Remove all .la files in prefix sub-folders if the package sets
|
||||||
``install_libtool_archives`` to be False.
|
``install_libtool_archives`` to be False.
|
||||||
"""
|
"""
|
||||||
@@ -846,13 +824,12 @@ def setup_build_environment(self, env):
|
|||||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|
||||||
|
|
||||||
def _autoreconf_search_path_args(spec: spack.spec.Spec) -> List[str]:
|
def _autoreconf_search_path_args(spec):
|
||||||
dirs_seen: Set[Tuple[int, int]] = set()
|
dirs_seen = set()
|
||||||
flags_spack: List[str] = []
|
flags_spack, flags_external = [], []
|
||||||
flags_external: List[str] = []
|
|
||||||
|
|
||||||
# We don't want to add an include flag for automake's default search path.
|
# We don't want to add an include flag for automake's default search path.
|
||||||
for automake in spec.dependencies(name="automake", deptype="build"):
|
for automake in spec.dependencies(name="automake", deptype="build"):
|
||||||
|
|||||||
@@ -10,7 +10,8 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.phase_callbacks
|
import spack.build_environment
|
||||||
|
import spack.builder
|
||||||
|
|
||||||
from .cmake import CMakeBuilder, CMakePackage
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
@@ -88,7 +89,7 @@ def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
|
|||||||
if variant is None:
|
if variant is None:
|
||||||
variant = cmake_var.lower()
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
if not self.pkg.has_variant(variant):
|
if variant not in self.pkg.variants:
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
if variant not in self.pkg.spec.variants:
|
if variant not in self.pkg.spec.variants:
|
||||||
@@ -192,10 +193,7 @@ def initconfig_mpi_entries(self):
|
|||||||
|
|
||||||
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
|
||||||
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
|
||||||
|
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
||||||
# not all MPIs have Fortran wrappers
|
|
||||||
if hasattr(spec["mpi"], "mpifc"):
|
|
||||||
entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
|
|
||||||
|
|
||||||
# Check for slurm
|
# Check for slurm
|
||||||
using_slurm = False
|
using_slurm = False
|
||||||
@@ -299,6 +297,18 @@ def initconfig_hardware_entries(self):
|
|||||||
def std_initconfig_entries(self):
|
def std_initconfig_entries(self):
|
||||||
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
|
||||||
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
|
||||||
|
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
|
||||||
|
cmake_rpaths_path = ";".join(cmake_rpaths_env)
|
||||||
|
complete_rpath_list = cmake_rpaths_path
|
||||||
|
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
|
||||||
|
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
|
||||||
|
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
|
||||||
|
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
|
||||||
|
|
||||||
|
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
|
||||||
|
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
|
||||||
|
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
|
||||||
|
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
@@ -308,6 +318,8 @@ def std_initconfig_entries(self):
|
|||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||||
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
|
||||||
|
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
|
||||||
|
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
|
||||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -335,7 +347,7 @@ def std_cmake_args(self):
|
|||||||
args.extend(["-C", self.cache_path])
|
args.extend(["-C", self.cache_path])
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def install_cmake_cache(self):
|
def install_cmake_cache(self):
|
||||||
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||||
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||||
|
|||||||
@@ -7,11 +7,10 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class CargoPackage(spack.package_base.PackageBase):
|
class CargoPackage(spack.package_base.PackageBase):
|
||||||
@@ -28,7 +27,7 @@ class CargoPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("cargo")
|
@spack.builder.builder("cargo")
|
||||||
class CargoBuilder(BuilderWithDefaults):
|
class CargoBuilder(BaseBuilder):
|
||||||
"""The Cargo builder encodes the most common way of building software with
|
"""The Cargo builder encodes the most common way of building software with
|
||||||
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
a rust Cargo.toml file. It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
@@ -78,7 +77,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
fs.install_tree("out", prefix)
|
fs.install_tree("out", prefix)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Run "cargo test"."""
|
"""Run "cargo test"."""
|
||||||
|
|||||||
@@ -8,25 +8,18 @@
|
|||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from itertools import chain
|
from typing import List, Optional, Tuple
|
||||||
from typing import Any, List, Optional, Tuple
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util.lang import stable_partition
|
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack import traverse
|
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.environment import filter_system_paths
|
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
# Regex to extract the primary generator from the CMake generator
|
# Regex to extract the primary generator from the CMake generator
|
||||||
# string.
|
# string.
|
||||||
@@ -52,9 +45,9 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
|||||||
python_executable = pkg.spec["python"].command.path
|
python_executable = pkg.spec["python"].command.path
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[
|
||||||
define("PYTHON_EXECUTABLE", python_executable),
|
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||||
define("Python_EXECUTABLE", python_executable),
|
CMakeBuilder.define("Python_EXECUTABLE", python_executable),
|
||||||
define("Python3_EXECUTABLE", python_executable),
|
CMakeBuilder.define("Python3_EXECUTABLE", python_executable),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -89,7 +82,7 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
|||||||
ipo = False
|
ipo = False
|
||||||
|
|
||||||
if cmake.satisfies("@3.9:"):
|
if cmake.satisfies("@3.9:"):
|
||||||
args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
args.append(CMakeBuilder.define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
|
||||||
|
|
||||||
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
# Disable Package Registry: export(PACKAGE) may put files in the user's home directory, and
|
||||||
# find_package may search there. This is not what we want.
|
# find_package may search there. This is not what we want.
|
||||||
@@ -97,36 +90,30 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
|||||||
# Do not populate CMake User Package Registry
|
# Do not populate CMake User Package Registry
|
||||||
if cmake.satisfies("@3.15:"):
|
if cmake.satisfies("@3.15:"):
|
||||||
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
# see https://cmake.org/cmake/help/latest/policy/CMP0090.html
|
||||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0090", "NEW"))
|
||||||
elif cmake.satisfies("@3.1:"):
|
elif cmake.satisfies("@3.1:"):
|
||||||
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
# see https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_NO_PACKAGE_REGISTRY.html
|
||||||
args.append(define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
args.append(CMakeBuilder.define("CMAKE_EXPORT_NO_PACKAGE_REGISTRY", True))
|
||||||
|
|
||||||
# Do not use CMake User/System Package Registry
|
# Do not use CMake User/System Package Registry
|
||||||
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
# https://cmake.org/cmake/help/latest/manual/cmake-packages.7.html#disabling-the-package-registry
|
||||||
if cmake.satisfies("@3.16:"):
|
if cmake.satisfies("@3.16:"):
|
||||||
args.append(define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
args.append(CMakeBuilder.define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False))
|
||||||
elif cmake.satisfies("@3.1:3.15"):
|
elif cmake.satisfies("@3.1:3.15"):
|
||||||
args.append(define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY", False))
|
||||||
args.append(define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
args.append(CMakeBuilder.define("CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY", False))
|
||||||
|
|
||||||
# Export a compilation database if supported.
|
# Export a compilation database if supported.
|
||||||
if _supports_compilation_databases(pkg):
|
if _supports_compilation_databases(pkg):
|
||||||
args.append(define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||||
|
|
||||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||||
args.append(define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||||
|
|
||||||
# Disable find package's config mode for versions of Boost that
|
|
||||||
# didn't provide it. See https://github.com/spack/spack/issues/20169
|
|
||||||
# and https://cmake.org/cmake/help/latest/module/FindBoost.html
|
|
||||||
if pkg.spec.satisfies("^boost@:1.69.0"):
|
|
||||||
args.append(define("Boost_NO_BOOST_CMAKE", True))
|
|
||||||
|
|
||||||
|
|
||||||
def generator(*names: str, default: Optional[str] = None) -> None:
|
def generator(*names: str, default: Optional[str] = None):
|
||||||
"""The build system generator to use.
|
"""The build system generator to use.
|
||||||
|
|
||||||
See ``cmake --help`` for a list of valid generators.
|
See ``cmake --help`` for a list of valid generators.
|
||||||
@@ -158,33 +145,11 @@ def _values(x):
|
|||||||
default=default,
|
default=default,
|
||||||
values=_values,
|
values=_values,
|
||||||
description="the build system generator to use",
|
description="the build system generator to use",
|
||||||
when="build_system=cmake",
|
|
||||||
)
|
)
|
||||||
for x in not_used:
|
for x in not_used:
|
||||||
conflicts(f"generator={x}")
|
conflicts(f"generator={x}")
|
||||||
|
|
||||||
|
|
||||||
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
|
|
||||||
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
|
|
||||||
attribute of direct build/test and transitive link dependencies."""
|
|
||||||
edges = traverse.traverse_topo_edges_generator(
|
|
||||||
traverse.with_artificial_edges([pkg.spec]),
|
|
||||||
visitor=traverse.MixedDepthVisitor(
|
|
||||||
direct=dt.BUILD | dt.TEST, transitive=dt.LINK, key=traverse.by_dag_hash
|
|
||||||
),
|
|
||||||
key=traverse.by_dag_hash,
|
|
||||||
root=False,
|
|
||||||
all_edges=False, # cover all nodes, not all edges
|
|
||||||
)
|
|
||||||
ordered_specs = [edge.spec for edge in edges]
|
|
||||||
# Separate out externals so they do not shadow Spack prefixes
|
|
||||||
externals, spack_built = stable_partition((s for s in ordered_specs), lambda x: x.external)
|
|
||||||
|
|
||||||
return filter_system_paths(
|
|
||||||
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CMakePackage(spack.package_base.PackageBase):
|
class CMakePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using CMake
|
"""Specialized class for packages built using CMake
|
||||||
|
|
||||||
@@ -276,15 +241,15 @@ def flags_to_build_system_args(self, flags):
|
|||||||
|
|
||||||
# Legacy methods (used by too many packages to change them,
|
# Legacy methods (used by too many packages to change them,
|
||||||
# need to forward to the builder)
|
# need to forward to the builder)
|
||||||
def define(self, cmake_var: str, value: Any) -> str:
|
def define(self, *args, **kwargs):
|
||||||
return define(cmake_var, value)
|
return self.builder.define(*args, **kwargs)
|
||||||
|
|
||||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
def define_from_variant(self, *args, **kwargs):
|
||||||
return define_from_variant(self, cmake_var, variant)
|
return self.builder.define_from_variant(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("cmake")
|
@spack.builder.builder("cmake")
|
||||||
class CMakeBuilder(BuilderWithDefaults):
|
class CMakeBuilder(BaseBuilder):
|
||||||
"""The cmake builder encodes the default way of building software with CMake. IT
|
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||||
has three phases that can be overridden:
|
has three phases that can be overridden:
|
||||||
|
|
||||||
@@ -334,15 +299,15 @@ class CMakeBuilder(BuilderWithDefaults):
|
|||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self) -> List[str]:
|
def archive_files(self):
|
||||||
"""Files to archive for packages based on CMake"""
|
"""Files to archive for packages based on CMake"""
|
||||||
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
files = [os.path.join(self.build_directory, "CMakeCache.txt")]
|
||||||
if _supports_compilation_databases(self.pkg):
|
if _supports_compilation_databases(self):
|
||||||
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
files.append(os.path.join(self.build_directory, "compile_commands.json"))
|
||||||
return files
|
return files
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def root_cmakelists_dir(self) -> str:
|
def root_cmakelists_dir(self):
|
||||||
"""The relative path to the directory containing CMakeLists.txt
|
"""The relative path to the directory containing CMakeLists.txt
|
||||||
|
|
||||||
This path is relative to the root of the extracted tarball,
|
This path is relative to the root of the extracted tarball,
|
||||||
@@ -351,17 +316,16 @@ def root_cmakelists_dir(self) -> str:
|
|||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generator(self) -> str:
|
def generator(self):
|
||||||
if self.spec.satisfies("generator=make"):
|
if self.spec.satisfies("generator=make"):
|
||||||
return "Unix Makefiles"
|
return "Unix Makefiles"
|
||||||
if self.spec.satisfies("generator=ninja"):
|
if self.spec.satisfies("generator=ninja"):
|
||||||
return "Ninja"
|
return "Ninja"
|
||||||
raise ValueError(
|
msg = f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
||||||
f'{self.spec.format()} has an unsupported value for the "generator" variant'
|
raise ValueError(msg)
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_cmake_args(self) -> List[str]:
|
def std_cmake_args(self):
|
||||||
"""Standard cmake arguments provided as a property for
|
"""Standard cmake arguments provided as a property for
|
||||||
convenience of package writers
|
convenience of package writers
|
||||||
"""
|
"""
|
||||||
@@ -370,9 +334,7 @@ def std_cmake_args(self) -> List[str]:
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def std_args(
|
def std_args(pkg, generator=None):
|
||||||
pkg: spack.package_base.PackageBase, generator: Optional[str] = None
|
|
||||||
) -> List[str]:
|
|
||||||
"""Computes the standard cmake arguments for a generic package"""
|
"""Computes the standard cmake arguments for a generic package"""
|
||||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||||
generator = generator or default_generator
|
generator = generator or default_generator
|
||||||
@@ -382,27 +344,18 @@ def std_args(
|
|||||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||||
msg += "CMakePackage currently supports the following "
|
msg += "CMakePackage currently supports the following "
|
||||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||||
raise spack.error.InstallError(msg)
|
raise spack.package_base.InstallError(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
build_type = pkg.spec.variants["build_type"].value
|
build_type = pkg.spec.variants["build_type"].value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
build_type = "RelWithDebInfo"
|
build_type = "RelWithDebInfo"
|
||||||
|
|
||||||
|
define = CMakeBuilder.define
|
||||||
args = [
|
args = [
|
||||||
"-G",
|
"-G",
|
||||||
generator,
|
generator,
|
||||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||||
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
|
||||||
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
|
|
||||||
define(
|
|
||||||
"CMAKE_INSTALL_RPATH",
|
|
||||||
[
|
|
||||||
pathlib.Path(pkg.prefix, "lib").as_posix(),
|
|
||||||
pathlib.Path(pkg.prefix, "lib64").as_posix(),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
|
|
||||||
define("CMAKE_BUILD_TYPE", build_type),
|
define("CMAKE_BUILD_TYPE", build_type),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -417,34 +370,164 @@ def std_args(
|
|||||||
_conditional_cmake_defaults(pkg, args)
|
_conditional_cmake_defaults(pkg, args)
|
||||||
_maybe_set_python_hints(pkg, args)
|
_maybe_set_python_hints(pkg, args)
|
||||||
|
|
||||||
|
# Set up CMake rpath
|
||||||
|
args.extend(
|
||||||
|
[
|
||||||
|
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
|
||||||
|
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
|
||||||
|
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
def define_cuda_architectures(pkg):
|
||||||
return define_cuda_architectures(pkg)
|
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||||
|
|
||||||
|
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||||
|
it is declared in the cuda package.
|
||||||
|
|
||||||
|
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||||
|
|
||||||
|
"""
|
||||||
|
cmake_flag = str()
|
||||||
|
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||||
|
cmake_flag = CMakeBuilder.define(
|
||||||
|
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||||
|
)
|
||||||
|
|
||||||
|
return cmake_flag
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
def define_hip_architectures(pkg):
|
||||||
return define_hip_architectures(pkg)
|
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||||
|
|
||||||
|
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||||
|
architectures and it is declared in the rocm package.
|
||||||
|
|
||||||
|
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||||
|
not set.
|
||||||
|
|
||||||
|
"""
|
||||||
|
cmake_flag = str()
|
||||||
|
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||||
|
cmake_flag = CMakeBuilder.define(
|
||||||
|
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||||
|
)
|
||||||
|
|
||||||
|
return cmake_flag
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def define(cmake_var: str, value: Any) -> str:
|
def define(cmake_var, value):
|
||||||
return define(cmake_var, value)
|
"""Return a CMake command line argument that defines a variable.
|
||||||
|
|
||||||
def define_from_variant(self, cmake_var: str, variant: Optional[str] = None) -> str:
|
The resulting argument will convert boolean values to OFF/ON
|
||||||
return define_from_variant(self.pkg, cmake_var, variant)
|
and lists/tuples to CMake semicolon-separated string lists. All other
|
||||||
|
values will be interpreted as strings.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
[define('BUILD_SHARED_LIBS', True),
|
||||||
|
define('CMAKE_CXX_STANDARD', 14),
|
||||||
|
define('swr', ['avx', 'avx2'])]
|
||||||
|
|
||||||
|
will generate the following configuration options:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||||
|
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||||
|
"-DSWR:STRING=avx;avx2]
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Create a list of pairs. Each pair includes a configuration
|
||||||
|
# option and whether or not that option is activated
|
||||||
|
if isinstance(value, bool):
|
||||||
|
kind = "BOOL"
|
||||||
|
value = "ON" if value else "OFF"
|
||||||
|
else:
|
||||||
|
kind = "STRING"
|
||||||
|
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||||
|
value = ";".join(str(v) for v in value)
|
||||||
|
else:
|
||||||
|
value = str(value)
|
||||||
|
|
||||||
|
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
||||||
|
|
||||||
|
def define_from_variant(self, cmake_var, variant=None):
|
||||||
|
"""Return a CMake command line argument from the given variant's value.
|
||||||
|
|
||||||
|
The optional ``variant`` argument defaults to the lower-case transform
|
||||||
|
of ``cmake_var``.
|
||||||
|
|
||||||
|
This utility function is similar to
|
||||||
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
Given a package with:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
variant('cxxstd', default='11', values=('11', '14'),
|
||||||
|
multi=False, description='')
|
||||||
|
variant('shared', default=True, description='')
|
||||||
|
variant('swr', values=any_combination_of('avx', 'avx2'),
|
||||||
|
description='')
|
||||||
|
|
||||||
|
calling this function like:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
[self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
|
||||||
|
self.define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
|
||||||
|
self.define_from_variant('SWR')]
|
||||||
|
|
||||||
|
will generate the following configuration options:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
||||||
|
"-DCMAKE_CXX_STANDARD:STRING=14",
|
||||||
|
"-DSWR:STRING=avx;avx2]
|
||||||
|
|
||||||
|
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
||||||
|
|
||||||
|
Note: if the provided variant is conditional, and the condition is not met,
|
||||||
|
this function returns an empty string. CMake discards empty strings
|
||||||
|
provided on the command line.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if variant is None:
|
||||||
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
|
if variant not in self.pkg.variants:
|
||||||
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
|
if variant not in self.pkg.spec.variants:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
value = self.pkg.spec.variants[variant].value
|
||||||
|
if isinstance(value, (tuple, list)):
|
||||||
|
# Sort multi-valued variants for reproducibility
|
||||||
|
value = sorted(value)
|
||||||
|
|
||||||
|
return self.define(cmake_var, value)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_dirname(self) -> str:
|
def build_dirname(self):
|
||||||
"""Directory name to use when building the package."""
|
"""Directory name to use when building the package."""
|
||||||
return f"spack-build-{self.pkg.spec.dag_hash(7)}"
|
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self) -> str:
|
def build_directory(self):
|
||||||
"""Full-path to the directory to use when building the package."""
|
"""Full-path to the directory to use when building the package."""
|
||||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||||
|
|
||||||
def cmake_args(self) -> List[str]:
|
def cmake_args(self):
|
||||||
"""List of all the arguments that must be passed to cmake, except:
|
"""List of all the arguments that must be passed to cmake, except:
|
||||||
|
|
||||||
* CMAKE_INSTALL_PREFIX
|
* CMAKE_INSTALL_PREFIX
|
||||||
@@ -454,32 +537,15 @@ def cmake_args(self) -> List[str]:
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def cmake(
|
def cmake(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
|
|
||||||
# skip cmake phase if it is an incremental develop build
|
|
||||||
if spec.is_develop and os.path.isfile(
|
|
||||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
pkg.module.cmake(*options)
|
pkg.module.cmake(*options)
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
@@ -488,12 +554,7 @@ def build(
|
|||||||
self.build_targets.append("-v")
|
self.build_targets.append("-v")
|
||||||
pkg.module.ninja(*self.build_targets)
|
pkg.module.ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
@@ -501,9 +562,9 @@ def install(
|
|||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
pkg.module.ninja(*self.install_targets)
|
pkg.module.ninja(*self.install_targets)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self) -> None:
|
def check(self):
|
||||||
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||||
and runs them if found.
|
and runs them if found.
|
||||||
"""
|
"""
|
||||||
@@ -514,133 +575,3 @@ def check(self) -> None:
|
|||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||||
self.pkg._if_ninja_target_execute("check")
|
self.pkg._if_ninja_target_execute("check")
|
||||||
|
|
||||||
|
|
||||||
def define(cmake_var: str, value: Any) -> str:
|
|
||||||
"""Return a CMake command line argument that defines a variable.
|
|
||||||
|
|
||||||
The resulting argument will convert boolean values to OFF/ON and lists/tuples to CMake
|
|
||||||
semicolon-separated string lists. All other values will be interpreted as strings.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
[define("BUILD_SHARED_LIBS", True),
|
|
||||||
define("CMAKE_CXX_STANDARD", 14),
|
|
||||||
define("swr", ["avx", "avx2"])]
|
|
||||||
|
|
||||||
will generate the following configuration options:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
["-DBUILD_SHARED_LIBS:BOOL=ON",
|
|
||||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
|
||||||
"-DSWR:STRING=avx;avx2]
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Create a list of pairs. Each pair includes a configuration
|
|
||||||
# option and whether or not that option is activated
|
|
||||||
if isinstance(value, bool):
|
|
||||||
kind = "BOOL"
|
|
||||||
value = "ON" if value else "OFF"
|
|
||||||
else:
|
|
||||||
kind = "STRING"
|
|
||||||
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
|
||||||
value = ";".join(str(v) for v in value)
|
|
||||||
else:
|
|
||||||
value = str(value)
|
|
||||||
|
|
||||||
return "".join(["-D", cmake_var, ":", kind, "=", value])
|
|
||||||
|
|
||||||
|
|
||||||
def define_from_variant(
|
|
||||||
pkg: spack.package_base.PackageBase, cmake_var: str, variant: Optional[str] = None
|
|
||||||
) -> str:
|
|
||||||
"""Return a CMake command line argument from the given variant's value.
|
|
||||||
|
|
||||||
The optional ``variant`` argument defaults to the lower-case transform
|
|
||||||
of ``cmake_var``.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
Given a package with:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
variant("cxxstd", default="11", values=("11", "14"),
|
|
||||||
multi=False, description="")
|
|
||||||
variant("shared", default=True, description="")
|
|
||||||
variant("swr", values=any_combination_of("avx", "avx2"),
|
|
||||||
description="")
|
|
||||||
|
|
||||||
calling this function like:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
[
|
|
||||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
|
||||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
|
||||||
self.define_from_variant("SWR"),
|
|
||||||
]
|
|
||||||
|
|
||||||
will generate the following configuration options:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
[
|
|
||||||
"-DBUILD_SHARED_LIBS:BOOL=ON",
|
|
||||||
"-DCMAKE_CXX_STANDARD:STRING=14",
|
|
||||||
"-DSWR:STRING=avx;avx2",
|
|
||||||
]
|
|
||||||
|
|
||||||
for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
|
|
||||||
|
|
||||||
Note: if the provided variant is conditional, and the condition is not met, this function
|
|
||||||
returns an empty string. CMake discards empty strings provided on the command line.
|
|
||||||
"""
|
|
||||||
if variant is None:
|
|
||||||
variant = cmake_var.lower()
|
|
||||||
|
|
||||||
if not pkg.has_variant(variant):
|
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, pkg.name))
|
|
||||||
|
|
||||||
if variant not in pkg.spec.variants:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
value = pkg.spec.variants[variant].value
|
|
||||||
if isinstance(value, (tuple, list)):
|
|
||||||
# Sort multi-valued variants for reproducibility
|
|
||||||
value = sorted(value)
|
|
||||||
|
|
||||||
return define(cmake_var, value)
|
|
||||||
|
|
||||||
|
|
||||||
def define_hip_architectures(pkg: spack.package_base.PackageBase) -> str:
|
|
||||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
|
||||||
|
|
||||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
|
||||||
architectures and it is declared in the rocm package.
|
|
||||||
|
|
||||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
|
||||||
not set.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
|
||||||
return define("CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value)
|
|
||||||
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def define_cuda_architectures(pkg: spack.package_base.PackageBase) -> str:
|
|
||||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
|
||||||
|
|
||||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
|
||||||
it is declared in the cuda package.
|
|
||||||
|
|
||||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
|
||||||
return define("CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value)
|
|
||||||
return ""
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
|
|
||||||
import spack.compiler
|
import spack.compiler
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.util.executable
|
|
||||||
|
|
||||||
# Local "type" for type hints
|
# Local "type" for type hints
|
||||||
Path = Union[str, pathlib.Path]
|
Path = Union[str, pathlib.Path]
|
||||||
|
|||||||
@@ -110,8 +110,8 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
|
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
depends_on("cuda@5.0:", when="cuda_arch=35")
|
||||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
depends_on("cuda@6.5:", when="cuda_arch=37")
|
||||||
|
|
||||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||||
@@ -131,7 +131,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||||
|
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
|
||||||
|
|
||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
@@ -150,6 +149,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
# minimum supported versions
|
# minimum supported versions
|
||||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||||
|
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||||
|
|
||||||
# maximum supported version
|
# maximum supported version
|
||||||
@@ -180,6 +180,13 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
|
||||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
|
||||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
|
||||||
|
conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
|
||||||
|
conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
|
||||||
|
conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
|
||||||
|
conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
|
||||||
|
conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10.0 target=x86_64:")
|
||||||
|
conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
|
||||||
|
conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
|
||||||
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
|
||||||
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
|
||||||
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
|
||||||
@@ -205,6 +212,9 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
|
||||||
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
|
||||||
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
|
||||||
|
conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
|
||||||
|
conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
|
||||||
|
conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
|
||||||
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
|
||||||
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
|
||||||
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
|
||||||
@@ -231,11 +241,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
|||||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||||
|
|
||||||
# ARM
|
|
||||||
# https://github.com/spack/spack/pull/39666#issuecomment-2377609263
|
|
||||||
# Might need to be expanded to other gcc versions
|
|
||||||
conflicts("%gcc@13.2.0", when="+cuda ^cuda@:12.4 target=aarch64:")
|
|
||||||
|
|
||||||
# XL is mostly relevant for ppc64le Linux
|
# XL is mostly relevant for ppc64le Linux
|
||||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||||
|
|||||||
@@ -7,9 +7,8 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, apply_macos_rpath_fixups, execute_install_time_tests
|
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class Package(spack.package_base.PackageBase):
|
class Package(spack.package_base.PackageBase):
|
||||||
@@ -27,7 +26,7 @@ class Package(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("generic")
|
@spack.builder.builder("generic")
|
||||||
class GenericBuilder(BuilderWithDefaults):
|
class GenericBuilder(BaseBuilder):
|
||||||
"""A builder for a generic build system, that require packagers
|
"""A builder for a generic build system, that require packagers
|
||||||
to implement an "install" phase.
|
to implement an "install" phase.
|
||||||
"""
|
"""
|
||||||
@@ -45,7 +44,7 @@ class GenericBuilder(BuilderWithDefaults):
|
|||||||
install_time_test_callbacks = []
|
install_time_test_callbacks = []
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|
||||||
# unconditionally perform any post-install phase tests
|
# unconditionally perform any post-install phase tests
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -7,11 +7,10 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class GoPackage(spack.package_base.PackageBase):
|
class GoPackage(spack.package_base.PackageBase):
|
||||||
@@ -33,7 +32,7 @@ class GoPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("go")
|
@spack.builder.builder("go")
|
||||||
class GoBuilder(BuilderWithDefaults):
|
class GoBuilder(BaseBuilder):
|
||||||
"""The Go builder encodes the most common way of building software with
|
"""The Go builder encodes the most common way of building software with
|
||||||
a golang go.mod file. It has two phases that can be overridden, if need be:
|
a golang go.mod file. It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
@@ -45,27 +44,16 @@ class GoBuilder(BuilderWithDefaults):
|
|||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:attr:`~.GoBuilder.build_args` | Specify arguments |
|
| :py:meth:`~.GoBuilder.build_args` | Specify arguments |
|
||||||
| | to ``go build`` |
|
| | to ``go build`` |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:attr:`~.GoBuilder.check_args` | Specify arguments |
|
| :py:meth:`~.GoBuilder.check_args` | Specify arguments |
|
||||||
| | to ``go test`` |
|
| | to ``go test`` |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
"""
|
"""
|
||||||
|
|
||||||
phases = ("build", "install")
|
phases = ("build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
|
||||||
legacy_methods = ("check", "installcheck")
|
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
|
||||||
legacy_attributes = (
|
|
||||||
"build_args",
|
|
||||||
"check_args",
|
|
||||||
"build_directory",
|
|
||||||
"install_time_test_callbacks",
|
|
||||||
)
|
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["check"]
|
install_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
@@ -100,7 +88,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
fs.mkdirp(prefix.bin)
|
fs.mkdirp(prefix.bin)
|
||||||
fs.install(pkg.name, prefix.bin)
|
fs.install(pkg.name, prefix.bin)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Run ``go test .`` in the source directory"""
|
"""Run ``go test .`` in the source directory"""
|
||||||
|
|||||||
@@ -23,9 +23,8 @@
|
|||||||
)
|
)
|
||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.error import InstallError
|
from spack.package_base import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
@@ -1163,7 +1162,7 @@ def _determine_license_type(self):
|
|||||||
debug_print(license_type)
|
debug_print(license_type)
|
||||||
return license_type
|
return license_type
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("install")
|
@spack.builder.run_before("install")
|
||||||
def configure(self):
|
def configure(self):
|
||||||
"""Generates the silent.cfg file to pass to installer.sh.
|
"""Generates the silent.cfg file to pass to installer.sh.
|
||||||
|
|
||||||
@@ -1250,7 +1249,7 @@ def install(self, spec, prefix):
|
|||||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||||
install(f, dst)
|
install(f, dst)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def validate_install(self):
|
def validate_install(self):
|
||||||
# Sometimes the installer exits with an error but doesn't pass a
|
# Sometimes the installer exits with an error but doesn't pass a
|
||||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||||
@@ -1258,7 +1257,7 @@ def validate_install(self):
|
|||||||
if not os.path.exists(self.prefix.bin):
|
if not os.path.exists(self.prefix.bin):
|
||||||
raise InstallError("The installer has failed to install anything.")
|
raise InstallError("The installer has failed to install anything.")
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def configure_rpath(self):
|
def configure_rpath(self):
|
||||||
if "+rpath" not in self.spec:
|
if "+rpath" not in self.spec:
|
||||||
return
|
return
|
||||||
@@ -1276,7 +1275,7 @@ def configure_rpath(self):
|
|||||||
with open(compiler_cfg, "w") as fh:
|
with open(compiler_cfg, "w") as fh:
|
||||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def configure_auto_dispatch(self):
|
def configure_auto_dispatch(self):
|
||||||
if self._has_compilers:
|
if self._has_compilers:
|
||||||
if "auto_dispatch=none" in self.spec:
|
if "auto_dispatch=none" in self.spec:
|
||||||
@@ -1300,7 +1299,7 @@ def configure_auto_dispatch(self):
|
|||||||
with open(compiler_cfg, "a") as fh:
|
with open(compiler_cfg, "a") as fh:
|
||||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def filter_compiler_wrappers(self):
|
def filter_compiler_wrappers(self):
|
||||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||||
bin_dir = self.component_bin_dir("mpi")
|
bin_dir = self.component_bin_dir("mpi")
|
||||||
@@ -1308,7 +1307,7 @@ def filter_compiler_wrappers(self):
|
|||||||
f = os.path.join(bin_dir, f)
|
f = os.path.join(bin_dir, f)
|
||||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def uninstall_ism(self):
|
def uninstall_ism(self):
|
||||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||||
@@ -1340,7 +1339,7 @@ def base_lib_dir(self):
|
|||||||
debug_print(d)
|
debug_print(d)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def modify_LLVMgold_rpath(self):
|
def modify_LLVMgold_rpath(self):
|
||||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||||
|
|
||||||
|
|||||||
@@ -8,14 +8,11 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, conflicts, depends_on
|
from spack.directives import build_system, conflicts, depends_on
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import (
|
from ._checks import (
|
||||||
BuilderWithDefaults,
|
BaseBuilder,
|
||||||
apply_macos_rpath_fixups,
|
apply_macos_rpath_fixups,
|
||||||
execute_build_time_tests,
|
execute_build_time_tests,
|
||||||
execute_install_time_tests,
|
execute_install_time_tests,
|
||||||
@@ -39,7 +36,7 @@ class MakefilePackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("makefile")
|
@spack.builder.builder("makefile")
|
||||||
class MakefileBuilder(BuilderWithDefaults):
|
class MakefileBuilder(BaseBuilder):
|
||||||
"""The Makefile builder encodes the most common way of building software with
|
"""The Makefile builder encodes the most common way of building software with
|
||||||
Makefiles. It has three phases that can be overridden, if need be:
|
Makefiles. It has three phases that can be overridden, if need be:
|
||||||
|
|
||||||
@@ -94,50 +91,35 @@ class MakefileBuilder(BuilderWithDefaults):
|
|||||||
install_time_test_callbacks = ["installcheck"]
|
install_time_test_callbacks = ["installcheck"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self) -> str:
|
def build_directory(self):
|
||||||
"""Return the directory containing the main Makefile."""
|
"""Return the directory containing the main Makefile."""
|
||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def edit(
|
def edit(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Edit the Makefile before calling make. The default is a no-op."""
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Run "make" on the build targets specified by the builder."""
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.build_targets)
|
pkg.module.make(*self.build_targets)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Run "make" on the install targets specified by the builder."""
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.make(*self.install_targets)
|
pkg.module.make(*self.install_targets)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self) -> None:
|
def check(self):
|
||||||
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self.pkg._if_make_target_execute("test")
|
self.pkg._if_make_target_execute("test")
|
||||||
self.pkg._if_make_target_execute("check")
|
self.pkg._if_make_target_execute("check")
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def installcheck(self) -> None:
|
def installcheck(self):
|
||||||
"""Searches the Makefile for an ``installcheck`` target
|
"""Searches the Makefile for an ``installcheck`` target
|
||||||
and runs it if found.
|
and runs it if found.
|
||||||
"""
|
"""
|
||||||
@@ -145,4 +127,4 @@ def installcheck(self) -> None:
|
|||||||
self.pkg._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
spack.phase_callbacks.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class MavenPackage(spack.package_base.PackageBase):
|
class MavenPackage(spack.package_base.PackageBase):
|
||||||
@@ -34,7 +34,7 @@ class MavenPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("maven")
|
@spack.builder.builder("maven")
|
||||||
class MavenBuilder(BuilderWithDefaults):
|
class MavenBuilder(BaseBuilder):
|
||||||
"""The Maven builder encodes the default way to build software with Maven.
|
"""The Maven builder encodes the default way to build software with Maven.
|
||||||
It has two phases that can be overridden, if need be:
|
It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
|||||||
@@ -9,13 +9,10 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.prefix
|
|
||||||
from spack.directives import build_system, conflicts, depends_on, variant
|
from spack.directives import build_system, conflicts, depends_on, variant
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class MesonPackage(spack.package_base.PackageBase):
|
class MesonPackage(spack.package_base.PackageBase):
|
||||||
@@ -65,7 +62,7 @@ def flags_to_build_system_args(self, flags):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("meson")
|
@spack.builder.builder("meson")
|
||||||
class MesonBuilder(BuilderWithDefaults):
|
class MesonBuilder(BaseBuilder):
|
||||||
"""The Meson builder encodes the default way to build software with Meson.
|
"""The Meson builder encodes the default way to build software with Meson.
|
||||||
The builder has three phases that can be overridden, if need be:
|
The builder has three phases that can be overridden, if need be:
|
||||||
|
|
||||||
@@ -115,7 +112,7 @@ def archive_files(self):
|
|||||||
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
return [os.path.join(self.build_directory, "meson-logs", "meson-log.txt")]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def root_mesonlists_dir(self) -> str:
|
def root_mesonlists_dir(self):
|
||||||
"""Relative path to the directory containing meson.build
|
"""Relative path to the directory containing meson.build
|
||||||
|
|
||||||
This path is relative to the root of the extracted tarball,
|
This path is relative to the root of the extracted tarball,
|
||||||
@@ -124,7 +121,7 @@ def root_mesonlists_dir(self) -> str:
|
|||||||
return self.pkg.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_meson_args(self) -> List[str]:
|
def std_meson_args(self):
|
||||||
"""Standard meson arguments provided as a property for convenience
|
"""Standard meson arguments provided as a property for convenience
|
||||||
of package writers.
|
of package writers.
|
||||||
"""
|
"""
|
||||||
@@ -135,7 +132,7 @@ def std_meson_args(self) -> List[str]:
|
|||||||
return std_meson_args
|
return std_meson_args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def std_args(pkg) -> List[str]:
|
def std_args(pkg):
|
||||||
"""Standard meson arguments for a generic package."""
|
"""Standard meson arguments for a generic package."""
|
||||||
try:
|
try:
|
||||||
build_type = pkg.spec.variants["buildtype"].value
|
build_type = pkg.spec.variants["buildtype"].value
|
||||||
@@ -175,7 +172,7 @@ def build_directory(self):
|
|||||||
"""Directory to use when building the package."""
|
"""Directory to use when building the package."""
|
||||||
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||||
|
|
||||||
def meson_args(self) -> List[str]:
|
def meson_args(self):
|
||||||
"""List of arguments that must be passed to meson, except:
|
"""List of arguments that must be passed to meson, except:
|
||||||
|
|
||||||
* ``--prefix``
|
* ``--prefix``
|
||||||
@@ -188,12 +185,7 @@ def meson_args(self) -> List[str]:
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def meson(
|
def meson(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Run ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = []
|
options = []
|
||||||
if self.spec["meson"].satisfies("@0.64:"):
|
if self.spec["meson"].satisfies("@0.64:"):
|
||||||
@@ -204,31 +196,21 @@ def meson(
|
|||||||
with fs.working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
pkg.module.meson(*options)
|
pkg.module.meson(*options)
|
||||||
|
|
||||||
def build(
|
def build(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
options += self.build_targets
|
options += self.build_targets
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.ninja(*options)
|
pkg.module.ninja(*options)
|
||||||
|
|
||||||
def install(
|
def install(self, pkg, spec, prefix):
|
||||||
self,
|
|
||||||
pkg: spack.package_base.PackageBase,
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
prefix: spack.util.prefix.Prefix,
|
|
||||||
) -> None:
|
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pkg.module.ninja(*self.install_targets)
|
pkg.module.ninja(*self.install_targets)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self) -> None:
|
def check(self):
|
||||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self.pkg._if_ninja_target_execute("test")
|
self.pkg._if_ninja_target_execute("test")
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class MSBuildPackage(spack.package_base.PackageBase):
|
class MSBuildPackage(spack.package_base.PackageBase):
|
||||||
@@ -26,7 +26,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("msbuild")
|
@spack.builder.builder("msbuild")
|
||||||
class MSBuildBuilder(BuilderWithDefaults):
|
class MSBuildBuilder(BaseBuilder):
|
||||||
"""The MSBuild builder encodes the most common way of building software with
|
"""The MSBuild builder encodes the most common way of building software with
|
||||||
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
Mircosoft's MSBuild tool. It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, conflicts
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class NMakePackage(spack.package_base.PackageBase):
|
class NMakePackage(spack.package_base.PackageBase):
|
||||||
@@ -26,7 +26,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("nmake")
|
@spack.builder.builder("nmake")
|
||||||
class NMakeBuilder(BuilderWithDefaults):
|
class NMakeBuilder(BaseBuilder):
|
||||||
"""The NMake builder encodes the most common way of building software with
|
"""The NMake builder encodes the most common way of building software with
|
||||||
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
Mircosoft's NMake tool. It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class OctavePackage(spack.package_base.PackageBase):
|
class OctavePackage(spack.package_base.PackageBase):
|
||||||
@@ -29,7 +29,7 @@ class OctavePackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("octave")
|
@spack.builder.builder("octave")
|
||||||
class OctaveBuilder(BuilderWithDefaults):
|
class OctaveBuilder(BaseBuilder):
|
||||||
"""The octave builder provides the following phases that can be overridden:
|
"""The octave builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
1. :py:meth:`~.OctaveBuilder.install`
|
1. :py:meth:`~.OctaveBuilder.install`
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
import spack.util.path
|
import spack.util.path
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.directives import conflicts, license, redistribute, variant
|
from spack.directives import conflicts, license, redistribute, variant
|
||||||
from spack.error import InstallError
|
from spack.package_base import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
@@ -255,7 +255,7 @@ def libs(self):
|
|||||||
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
|
return find_libraries("*", root=self.component_prefix.lib, recursive=not self.v2_layout)
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiLibraryPackageWithSdk(IntelOneApiLibraryPackage):
|
class IntelOneApiLibraryPackageWithSdk(IntelOneApiPackage):
|
||||||
"""Base class for Intel oneAPI library packages with SDK components.
|
"""Base class for Intel oneAPI library packages with SDK components.
|
||||||
|
|
||||||
Contains some convenient default implementations for libraries
|
Contains some convenient default implementations for libraries
|
||||||
|
|||||||
@@ -10,12 +10,11 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, extends
|
from spack.directives import build_system, extends
|
||||||
from spack.install_test import SkipTest, test_part
|
from spack.install_test import SkipTest, test_part
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class PerlPackage(spack.package_base.PackageBase):
|
class PerlPackage(spack.package_base.PackageBase):
|
||||||
@@ -85,7 +84,7 @@ def test_use(self):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("perl")
|
@spack.builder.builder("perl")
|
||||||
class PerlBuilder(BuilderWithDefaults):
|
class PerlBuilder(BaseBuilder):
|
||||||
"""The perl builder provides four phases that can be overridden, if required:
|
"""The perl builder provides four phases that can be overridden, if required:
|
||||||
|
|
||||||
1. :py:meth:`~.PerlBuilder.configure`
|
1. :py:meth:`~.PerlBuilder.configure`
|
||||||
@@ -164,7 +163,7 @@ def configure(self, pkg, spec, prefix):
|
|||||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||||
# the Build script.
|
# the Build script.
|
||||||
@spack.phase_callbacks.run_after("configure")
|
@spack.builder.run_after("configure")
|
||||||
def fix_shebang(self):
|
def fix_shebang(self):
|
||||||
if self.build_method == "Build.PL":
|
if self.build_method == "Build.PL":
|
||||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||||
@@ -176,7 +175,7 @@ def build(self, pkg, spec, prefix):
|
|||||||
self.build_executable()
|
self.build_executable()
|
||||||
|
|
||||||
# Ensure that tests run after build (if requested):
|
# Ensure that tests run after build (if requested):
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Runs built-in tests of a Perl package."""
|
"""Runs built-in tests of a Perl package."""
|
||||||
|
|||||||
@@ -24,9 +24,6 @@
|
|||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.platforms
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
@@ -35,7 +32,7 @@
|
|||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
def _flatten_dict(dictionary: Mapping[str, object]) -> Iterable[str]:
|
||||||
@@ -340,7 +337,7 @@ class PythonPackage(PythonExtension):
|
|||||||
legacy_buildsystem = "python_pip"
|
legacy_buildsystem = "python_pip"
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test_imports"]
|
install_time_test_callbacks = ["test"]
|
||||||
|
|
||||||
build_system("python_pip")
|
build_system("python_pip")
|
||||||
|
|
||||||
@@ -375,7 +372,7 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def python_spec(self) -> Spec:
|
def python_spec(self):
|
||||||
"""Get python-venv if it exists or python otherwise."""
|
"""Get python-venv if it exists or python otherwise."""
|
||||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
return python
|
return python
|
||||||
@@ -426,11 +423,11 @@ def libs(self) -> LibraryList:
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("python_pip")
|
@spack.builder.builder("python_pip")
|
||||||
class PythonPipBuilder(BuilderWithDefaults):
|
class PythonPipBuilder(BaseBuilder):
|
||||||
phases = ("install",)
|
phases = ("install",)
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = ("test_imports",)
|
legacy_methods = ("test",)
|
||||||
|
|
||||||
#: Same as legacy_methods, but the signature is different
|
#: Same as legacy_methods, but the signature is different
|
||||||
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||||
@@ -439,7 +436,7 @@ class PythonPipBuilder(BuilderWithDefaults):
|
|||||||
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
|
||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test_imports"]
|
install_time_test_callbacks = ["test"]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def std_args(cls) -> List[str]:
|
def std_args(cls) -> List[str]:
|
||||||
@@ -544,4 +541,4 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
|||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -6,10 +6,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class QMakePackage(spack.package_base.PackageBase):
|
class QMakePackage(spack.package_base.PackageBase):
|
||||||
@@ -31,7 +30,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("qmake")
|
@spack.builder.builder("qmake")
|
||||||
class QMakeBuilder(BuilderWithDefaults):
|
class QMakeBuilder(BaseBuilder):
|
||||||
"""The qmake builder provides three phases that can be overridden:
|
"""The qmake builder provides three phases that can be overridden:
|
||||||
|
|
||||||
1. :py:meth:`~.QMakeBuilder.qmake`
|
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||||
@@ -82,4 +81,4 @@ def check(self):
|
|||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.pkg._if_make_target_execute("check")
|
self.pkg._if_make_target_execute("check")
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|||||||
@@ -11,9 +11,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
|
||||||
from spack.config import determine_number_of_jobs
|
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
|
from spack.util.cpus import determine_number_of_jobs
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
from spack.directives import build_system, extends, maintainers
|
from spack.directives import build_system, extends, maintainers
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class RubyPackage(spack.package_base.PackageBase):
|
class RubyPackage(spack.package_base.PackageBase):
|
||||||
@@ -28,7 +28,7 @@ class RubyPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("ruby")
|
@spack.builder.builder("ruby")
|
||||||
class RubyBuilder(BuilderWithDefaults):
|
class RubyBuilder(BaseBuilder):
|
||||||
"""The Ruby builder provides two phases that can be overridden if required:
|
"""The Ruby builder provides two phases that can be overridden if required:
|
||||||
|
|
||||||
#. :py:meth:`~.RubyBuilder.build`
|
#. :py:meth:`~.RubyBuilder.build`
|
||||||
|
|||||||
@@ -4,10 +4,9 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class SConsPackage(spack.package_base.PackageBase):
|
class SConsPackage(spack.package_base.PackageBase):
|
||||||
@@ -29,7 +28,7 @@ class SConsPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("scons")
|
@spack.builder.builder("scons")
|
||||||
class SConsBuilder(BuilderWithDefaults):
|
class SConsBuilder(BaseBuilder):
|
||||||
"""The Scons builder provides the following phases that can be overridden:
|
"""The Scons builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
1. :py:meth:`~.SConsBuilder.build`
|
1. :py:meth:`~.SConsBuilder.build`
|
||||||
@@ -80,4 +79,4 @@ def build_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|||||||
@@ -11,12 +11,11 @@
|
|||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, depends_on, extends
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class SIPPackage(spack.package_base.PackageBase):
|
class SIPPackage(spack.package_base.PackageBase):
|
||||||
@@ -104,7 +103,7 @@ def test_imports(self):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("sip")
|
@spack.builder.builder("sip")
|
||||||
class SIPBuilder(BuilderWithDefaults):
|
class SIPBuilder(BaseBuilder):
|
||||||
"""The SIP builder provides the following phases that can be overridden:
|
"""The SIP builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
* configure
|
* configure
|
||||||
@@ -171,4 +170,4 @@ def install_args(self):
|
|||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -6,10 +6,9 @@
|
|||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
|
||||||
from spack.directives import build_system, depends_on
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
from ._checks import BuilderWithDefaults, execute_build_time_tests, execute_install_time_tests
|
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class WafPackage(spack.package_base.PackageBase):
|
class WafPackage(spack.package_base.PackageBase):
|
||||||
@@ -31,7 +30,7 @@ class WafPackage(spack.package_base.PackageBase):
|
|||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("waf")
|
@spack.builder.builder("waf")
|
||||||
class WafBuilder(BuilderWithDefaults):
|
class WafBuilder(BaseBuilder):
|
||||||
"""The WAF builder provides the following phases that can be overridden:
|
"""The WAF builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
* configure
|
* configure
|
||||||
@@ -137,7 +136,7 @@ def build_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("build")(execute_build_time_tests)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def install_test(self):
|
def install_test(self):
|
||||||
"""Run unit tests after install.
|
"""Run unit tests after install.
|
||||||
@@ -147,4 +146,4 @@ def install_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
spack.phase_callbacks.run_after("install")(execute_install_time_tests)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|||||||
@@ -6,30 +6,43 @@
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
from typing import Dict, List, Optional, Tuple, Type
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import spack.error
|
from llnl.util import lang
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
import spack.multimethod
|
import spack.multimethod
|
||||||
import spack.package_base
|
|
||||||
import spack.phase_callbacks
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.environment
|
|
||||||
|
|
||||||
#: Builder classes, as registered by the "builder" decorator
|
#: Builder classes, as registered by the "builder" decorator
|
||||||
BUILDER_CLS: Dict[str, Type["Builder"]] = {}
|
BUILDER_CLS = {}
|
||||||
|
|
||||||
|
#: An object of this kind is a shared global state used to collect callbacks during
|
||||||
|
#: class definition time, and is flushed when the class object is created at the end
|
||||||
|
#: of the class definition
|
||||||
|
#:
|
||||||
|
#: Args:
|
||||||
|
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||||
|
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||||
|
CallbackTemporaryStage = collections.namedtuple(
|
||||||
|
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Shared global state to aggregate "@run_before" callbacks
|
||||||
|
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||||
|
#: Shared global state to aggregate "@run_after" callbacks
|
||||||
|
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||||
|
|
||||||
#: Map id(pkg) to a builder, to avoid creating multiple
|
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||||
#: builders for the same package object.
|
#: builders for the same package object.
|
||||||
_BUILDERS: Dict[int, "Builder"] = {}
|
_BUILDERS = {}
|
||||||
|
|
||||||
|
|
||||||
def builder(build_system_name: str):
|
def builder(build_system_name):
|
||||||
"""Class decorator used to register the default builder
|
"""Class decorator used to register the default builder
|
||||||
for a given build-system.
|
for a given build-system.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
build_system_name: name of the build-system
|
build_system_name (str): name of the build-system
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _decorator(cls):
|
def _decorator(cls):
|
||||||
@@ -40,9 +53,13 @@ def _decorator(cls):
|
|||||||
return _decorator
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
def create(pkg: spack.package_base.PackageBase) -> "Builder":
|
def create(pkg):
|
||||||
"""Given a package object with an associated concrete spec, return the builder object that can
|
"""Given a package object with an associated concrete spec,
|
||||||
install it."""
|
return the builder object that can install it.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||||
|
"""
|
||||||
if id(pkg) not in _BUILDERS:
|
if id(pkg) not in _BUILDERS:
|
||||||
_BUILDERS[id(pkg)] = _create(pkg)
|
_BUILDERS[id(pkg)] = _create(pkg)
|
||||||
return _BUILDERS[id(pkg)]
|
return _BUILDERS[id(pkg)]
|
||||||
@@ -57,15 +74,7 @@ def __call__(self, spec, prefix):
|
|||||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||||
|
|
||||||
|
|
||||||
def get_builder_class(pkg, name: str) -> Optional[Type["Builder"]]:
|
def _create(pkg):
|
||||||
"""Return the builder class if a package module defines it."""
|
|
||||||
cls = getattr(pkg.module, name, None)
|
|
||||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
|
||||||
return cls
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _create(pkg: spack.package_base.PackageBase) -> "Builder":
|
|
||||||
"""Return a new builder object for the package object being passed as argument.
|
"""Return a new builder object for the package object being passed as argument.
|
||||||
|
|
||||||
The function inspects the build-system used by the package object and try to:
|
The function inspects the build-system used by the package object and try to:
|
||||||
@@ -85,15 +94,14 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
|||||||
to look for build-related methods in the ``*Package``.
|
to look for build-related methods in the ``*Package``.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
pkg: package object for which we need a builder
|
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||||
"""
|
"""
|
||||||
package_buildsystem = buildsystem_name(pkg)
|
package_buildsystem = buildsystem_name(pkg)
|
||||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||||
builder_cls_name = default_builder_cls.__name__
|
builder_cls_name = default_builder_cls.__name__
|
||||||
builder_class = get_builder_class(pkg, builder_cls_name)
|
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||||
|
if builder_cls:
|
||||||
if builder_class:
|
return builder_cls(pkg)
|
||||||
return builder_class(pkg)
|
|
||||||
|
|
||||||
# Specialized version of a given buildsystem can subclass some
|
# Specialized version of a given buildsystem can subclass some
|
||||||
# base classes and specialize certain phases or methods or attributes.
|
# base classes and specialize certain phases or methods or attributes.
|
||||||
@@ -150,8 +158,8 @@ def __forward(self, *args, **kwargs):
|
|||||||
# with the same name is defined in the Package, it will override this definition
|
# with the same name is defined in the Package, it will override this definition
|
||||||
# (when _ForwardToBaseBuilder is initialized)
|
# (when _ForwardToBaseBuilder is initialized)
|
||||||
for method_name in (
|
for method_name in (
|
||||||
base_cls.phases # type: ignore
|
base_cls.phases
|
||||||
+ base_cls.legacy_methods # type: ignore
|
+ base_cls.legacy_methods
|
||||||
+ getattr(base_cls, "legacy_long_methods", tuple())
|
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||||
+ ("setup_build_environment", "setup_dependent_build_environment")
|
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||||
):
|
):
|
||||||
@@ -163,14 +171,14 @@ def __forward(self):
|
|||||||
|
|
||||||
return __forward
|
return __forward
|
||||||
|
|
||||||
for attribute_name in base_cls.legacy_attributes: # type: ignore
|
for attribute_name in base_cls.legacy_attributes:
|
||||||
setattr(
|
setattr(
|
||||||
_ForwardToBaseBuilder,
|
_ForwardToBaseBuilder,
|
||||||
attribute_name,
|
attribute_name,
|
||||||
property(forward_property_to_getattr(attribute_name)),
|
property(forward_property_to_getattr(attribute_name)),
|
||||||
)
|
)
|
||||||
|
|
||||||
class Adapter(base_cls, metaclass=_PackageAdapterMeta): # type: ignore
|
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||||
def __init__(self, pkg):
|
def __init__(self, pkg):
|
||||||
# Deal with custom phases in packages here
|
# Deal with custom phases in packages here
|
||||||
if hasattr(pkg, "phases"):
|
if hasattr(pkg, "phases"):
|
||||||
@@ -195,18 +203,99 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
|||||||
return Adapter(pkg)
|
return Adapter(pkg)
|
||||||
|
|
||||||
|
|
||||||
def buildsystem_name(pkg: spack.package_base.PackageBase) -> str:
|
def buildsystem_name(pkg):
|
||||||
"""Given a package object with an associated concrete spec,
|
"""Given a package object with an associated concrete spec,
|
||||||
return the name of its build system."""
|
return the name of its build system.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package for which we want
|
||||||
|
the build system name
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
return pkg.spec.variants["build_system"].value
|
return pkg.spec.variants["build_system"].value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# We are reading an old spec without the build_system variant
|
# We are reading an old spec without the build_system variant
|
||||||
return pkg.legacy_buildsystem # type: ignore
|
return pkg.legacy_buildsystem
|
||||||
|
|
||||||
|
|
||||||
|
class PhaseCallbacksMeta(type):
|
||||||
|
"""Permit to register arbitrary functions during class definition and run them
|
||||||
|
later, before or after a given install phase.
|
||||||
|
|
||||||
|
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||||
|
stored in a global shared state when a class being defined is parsed by the Python
|
||||||
|
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||||
|
of callbacks is attached to the class being defined.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(mcs, name, bases, attr_dict):
|
||||||
|
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||||
|
staged_callbacks = temporary_stage.callbacks
|
||||||
|
|
||||||
|
# Here we have an adapter from an old-style package. This means there is no
|
||||||
|
# hierarchy of builders, and every callback that had to be combined between
|
||||||
|
# *Package and *Builder has been combined already by _PackageAdapterMeta
|
||||||
|
if name == "Adapter":
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we are here we have callbacks. To get a complete list, we accumulate all the
|
||||||
|
# callbacks from base classes, we deduplicate them, then prepend what we have
|
||||||
|
# registered here.
|
||||||
|
#
|
||||||
|
# The order should be:
|
||||||
|
# 1. Callbacks are registered in order within the same class
|
||||||
|
# 2. Callbacks defined in derived classes precede those defined in base
|
||||||
|
# classes
|
||||||
|
callbacks_from_base = []
|
||||||
|
for base in bases:
|
||||||
|
current_callbacks = getattr(base, temporary_stage.attribute_name, None)
|
||||||
|
if not current_callbacks:
|
||||||
|
continue
|
||||||
|
callbacks_from_base.extend(current_callbacks)
|
||||||
|
callbacks_from_base = list(lang.dedupe(callbacks_from_base))
|
||||||
|
# Set the callbacks in this class and flush the temporary stage
|
||||||
|
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||||
|
del temporary_stage.callbacks[:]
|
||||||
|
|
||||||
|
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_after(phase, when=None):
|
||||||
|
"""Decorator to register a function for running after a given phase.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
phase (str): phase after which the function must run.
|
||||||
|
when (str): condition under which the function is run (if None, it is always run).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(fn):
|
||||||
|
key = (phase, when)
|
||||||
|
item = (key, fn)
|
||||||
|
_RUN_AFTER.callbacks.append(item)
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_before(phase, when=None):
|
||||||
|
"""Decorator to register a function for running before a given phase.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
phase (str): phase before which the function must run.
|
||||||
|
when (str): condition under which the function is run (if None, it is always run).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(fn):
|
||||||
|
key = (phase, when)
|
||||||
|
item = (key, fn)
|
||||||
|
_RUN_BEFORE.callbacks.append(item)
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
class BuilderMeta(
|
class BuilderMeta(
|
||||||
spack.phase_callbacks.PhaseCallbacksMeta,
|
PhaseCallbacksMeta,
|
||||||
spack.multimethod.MultiMethodMeta,
|
spack.multimethod.MultiMethodMeta,
|
||||||
type(collections.abc.Sequence), # type: ignore
|
type(collections.abc.Sequence), # type: ignore
|
||||||
):
|
):
|
||||||
@@ -301,12 +390,8 @@ def __new__(mcs, name, bases, attr_dict):
|
|||||||
)
|
)
|
||||||
|
|
||||||
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||||
attr_dict[spack.phase_callbacks._RUN_BEFORE.attribute_name] = combine_callbacks(
|
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||||
spack.phase_callbacks._RUN_BEFORE.attribute_name
|
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||||
)
|
|
||||||
attr_dict[spack.phase_callbacks._RUN_AFTER.attribute_name] = combine_callbacks(
|
|
||||||
spack.phase_callbacks._RUN_AFTER.attribute_name
|
|
||||||
)
|
|
||||||
|
|
||||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
@@ -326,8 +411,8 @@ def __init__(self, name, builder):
|
|||||||
self.name = name
|
self.name = name
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
self.phase_fn = self._select_phase_fn()
|
self.phase_fn = self._select_phase_fn()
|
||||||
self.run_before = self._make_callbacks(spack.phase_callbacks._RUN_BEFORE.attribute_name)
|
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||||
self.run_after = self._make_callbacks(spack.phase_callbacks._RUN_AFTER.attribute_name)
|
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||||
|
|
||||||
def _make_callbacks(self, callbacks_attribute):
|
def _make_callbacks(self, callbacks_attribute):
|
||||||
result = []
|
result = []
|
||||||
@@ -376,115 +461,29 @@ def _on_phase_start(self, instance):
|
|||||||
# If a phase has a matching stop_before_phase attribute,
|
# If a phase has a matching stop_before_phase attribute,
|
||||||
# stop the installation process raising a StopPhase
|
# stop the installation process raising a StopPhase
|
||||||
if getattr(instance, "stop_before_phase", None) == self.name:
|
if getattr(instance, "stop_before_phase", None) == self.name:
|
||||||
raise spack.error.StopPhase("Stopping before '{0}' phase".format(self.name))
|
raise spack.build_environment.StopPhase(
|
||||||
|
"Stopping before '{0}' phase".format(self.name)
|
||||||
|
)
|
||||||
|
|
||||||
def _on_phase_exit(self, instance):
|
def _on_phase_exit(self, instance):
|
||||||
# If a phase has a matching last_phase attribute,
|
# If a phase has a matching last_phase attribute,
|
||||||
# stop the installation process raising a StopPhase
|
# stop the installation process raising a StopPhase
|
||||||
if getattr(instance, "last_phase", None) == self.name:
|
if getattr(instance, "last_phase", None) == self.name:
|
||||||
raise spack.error.StopPhase("Stopping at '{0}' phase".format(self.name))
|
raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name))
|
||||||
|
|
||||||
def copy(self):
|
def copy(self):
|
||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
|
||||||
class BaseBuilder(metaclass=BuilderMeta):
|
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||||
"""An interface for builders, without any phases defined. This class is exposed in the package
|
"""A builder is a class that, given a package object (i.e. associated with
|
||||||
API, so that packagers can create a single class to define ``setup_build_environment`` and
|
concrete spec), knows how to install it.
|
||||||
``@run_before`` and ``@run_after`` callbacks that can be shared among different builders.
|
|
||||||
|
|
||||||
Example:
|
The builder behaves like a sequence, and when iterated over return the
|
||||||
|
"phases" of the installation in the correct order.
|
||||||
|
|
||||||
.. code-block:: python
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package object to be built
|
||||||
class AnyBuilder(BaseBuilder):
|
|
||||||
@run_after("install")
|
|
||||||
def fixup_install(self):
|
|
||||||
# do something after the package is installed
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
|
||||||
env.set("MY_ENV_VAR", "my_value")
|
|
||||||
|
|
||||||
class CMakeBuilder(cmake.CMakeBuilder, AnyBuilder):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class AutotoolsBuilder(autotools.AutotoolsBuilder, AnyBuilder):
|
|
||||||
pass
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
|
||||||
self.pkg = pkg
|
|
||||||
|
|
||||||
@property
|
|
||||||
def spec(self) -> spack.spec.Spec:
|
|
||||||
return self.pkg.spec
|
|
||||||
|
|
||||||
@property
|
|
||||||
def stage(self):
|
|
||||||
return self.pkg.stage
|
|
||||||
|
|
||||||
@property
|
|
||||||
def prefix(self):
|
|
||||||
return self.pkg.prefix
|
|
||||||
|
|
||||||
def setup_build_environment(
|
|
||||||
self, env: spack.util.environment.EnvironmentModifications
|
|
||||||
) -> None:
|
|
||||||
"""Sets up the build environment for a package.
|
|
||||||
|
|
||||||
This method will be called before the current package prefix exists in
|
|
||||||
Spack's store.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
env: environment modifications to be applied when the package is built. Package authors
|
|
||||||
can call methods on it to alter the build environment.
|
|
||||||
"""
|
|
||||||
if not hasattr(super(), "setup_build_environment"):
|
|
||||||
return
|
|
||||||
super().setup_build_environment(env) # type: ignore
|
|
||||||
|
|
||||||
def setup_dependent_build_environment(
|
|
||||||
self, env: spack.util.environment.EnvironmentModifications, dependent_spec: spack.spec.Spec
|
|
||||||
) -> None:
|
|
||||||
"""Sets up the build environment of a package that depends on this one.
|
|
||||||
|
|
||||||
This is similar to ``setup_build_environment``, but it is used to modify the build
|
|
||||||
environment of a package that *depends* on this one.
|
|
||||||
|
|
||||||
This gives packages the ability to set environment variables for the build of the
|
|
||||||
dependent, which can be useful to provide search hints for headers or libraries if they are
|
|
||||||
not in standard locations.
|
|
||||||
|
|
||||||
This method will be called before the dependent package prefix exists in Spack's store.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
env: environment modifications to be applied when the dependent package is built.
|
|
||||||
Package authors can call methods on it to alter the build environment.
|
|
||||||
|
|
||||||
dependent_spec: the spec of the dependent package about to be built. This allows the
|
|
||||||
extendee (self) to query the dependent's state. Note that *this* package's spec is
|
|
||||||
available as ``self.spec``
|
|
||||||
"""
|
|
||||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
|
||||||
return
|
|
||||||
super().setup_dependent_build_environment(env, dependent_spec) # type: ignore
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
fmt = "{name}{/hash:7}"
|
|
||||||
return f"{self.__class__.__name__}({self.spec.format(fmt)})"
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
fmt = "{name}{/hash:7}"
|
|
||||||
return f'"{self.__class__.__name__}" builder for "{self.spec.format(fmt)}"'
|
|
||||||
|
|
||||||
|
|
||||||
class Builder(BaseBuilder, collections.abc.Sequence):
|
|
||||||
"""A builder is a class that, given a package object (i.e. associated with concrete spec),
|
|
||||||
knows how to install it.
|
|
||||||
|
|
||||||
The builder behaves like a sequence, and when iterated over return the "phases" of the
|
|
||||||
installation in the correct order.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Sequence of phases. Must be defined in derived classes
|
#: Sequence of phases. Must be defined in derived classes
|
||||||
@@ -499,22 +498,99 @@ class Builder(BaseBuilder, collections.abc.Sequence):
|
|||||||
build_time_test_callbacks: List[str]
|
build_time_test_callbacks: List[str]
|
||||||
install_time_test_callbacks: List[str]
|
install_time_test_callbacks: List[str]
|
||||||
|
|
||||||
#: List of glob expressions. Each expression must either be absolute or relative to the package
|
#: List of glob expressions. Each expression must either be
|
||||||
#: source path. Matching artifacts found at the end of the build process will be copied in the
|
#: absolute or relative to the package source path.
|
||||||
#: same directory tree as _spack_build_logfile and _spack_build_envfile.
|
#: Matching artifacts found at the end of the build process will be
|
||||||
@property
|
#: copied in the same directory tree as _spack_build_logfile and
|
||||||
def archive_files(self) -> List[str]:
|
#: _spack_build_envfile.
|
||||||
return []
|
archive_files: List[str] = []
|
||||||
|
|
||||||
def __init__(self, pkg: spack.package_base.PackageBase) -> None:
|
def __init__(self, pkg):
|
||||||
super().__init__(pkg)
|
self.pkg = pkg
|
||||||
self.callbacks = {}
|
self.callbacks = {}
|
||||||
for phase in self.phases:
|
for phase in self.phases:
|
||||||
self.callbacks[phase] = InstallationPhase(phase, self)
|
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def spec(self):
|
||||||
|
return self.pkg.spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stage(self):
|
||||||
|
return self.pkg.stage
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prefix(self):
|
||||||
|
return self.pkg.prefix
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
# Defer tests to virtual and concrete packages
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
"""Sets up the build environment for a package.
|
||||||
|
|
||||||
|
This method will be called before the current package prefix exists in
|
||||||
|
Spack's store.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env (spack.util.environment.EnvironmentModifications): environment
|
||||||
|
modifications to be applied when the package is built. Package authors
|
||||||
|
can call methods on it to alter the build environment.
|
||||||
|
"""
|
||||||
|
if not hasattr(super(), "setup_build_environment"):
|
||||||
|
return
|
||||||
|
super().setup_build_environment(env)
|
||||||
|
|
||||||
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
|
"""Sets up the build environment of packages that depend on this one.
|
||||||
|
|
||||||
|
This is similar to ``setup_build_environment``, but it is used to
|
||||||
|
modify the build environments of packages that *depend* on this one.
|
||||||
|
|
||||||
|
This gives packages like Python and others that follow the extension
|
||||||
|
model a way to implement common environment or compile-time settings
|
||||||
|
for dependencies.
|
||||||
|
|
||||||
|
This method will be called before the dependent package prefix exists
|
||||||
|
in Spack's store.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
1. Installing python modules generally requires ``PYTHONPATH``
|
||||||
|
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||||
|
module's install prefix. This method could be used to set that
|
||||||
|
variable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env (spack.util.environment.EnvironmentModifications): environment
|
||||||
|
modifications to be applied when the dependent package is built.
|
||||||
|
Package authors can call methods on it to alter the build environment.
|
||||||
|
|
||||||
|
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||||
|
about to be built. This allows the extendee (self) to query
|
||||||
|
the dependent's state. Note that *this* package's spec is
|
||||||
|
available as ``self.spec``
|
||||||
|
"""
|
||||||
|
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||||
|
return
|
||||||
|
super().setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
def __getitem__(self, idx):
|
def __getitem__(self, idx):
|
||||||
key = self.phases[idx]
|
key = self.phases[idx]
|
||||||
return self.callbacks[key]
|
return self.callbacks[key]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.phases)
|
return len(self.phases)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
msg = "{0}({1})"
|
||||||
|
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
msg = '"{0}" builder for "{1}"'
|
||||||
|
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||||
|
|
||||||
|
|
||||||
|
# Export these names as standalone to be used in packages
|
||||||
|
run_after = PhaseCallbacksMeta.run_after
|
||||||
|
run_before = PhaseCallbacksMeta.run_before
|
||||||
|
|||||||
@@ -5,12 +5,15 @@
|
|||||||
|
|
||||||
"""Caches used by Spack to store data"""
|
"""Caches used by Spack to store data"""
|
||||||
import os
|
import os
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.error
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
|
import spack.mirror
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.file_cache
|
import spack.util.file_cache
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -31,8 +34,12 @@ def _misc_cache():
|
|||||||
return spack.util.file_cache.FileCache(path)
|
return spack.util.file_cache.FileCache(path)
|
||||||
|
|
||||||
|
|
||||||
|
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||||
|
llnl.util.lang.Singleton(_misc_cache)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def fetch_cache_location():
|
def fetch_cache_location():
|
||||||
@@ -69,4 +76,6 @@ def store(self, fetcher, relative_dest):
|
|||||||
|
|
||||||
|
|
||||||
#: Spack's local cache for downloaded source archives
|
#: Spack's local cache for downloaded source archives
|
||||||
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||||
|
llnl.util.lang.Singleton(_fetch_cache)
|
||||||
|
)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import ssl
|
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@@ -20,28 +19,26 @@
|
|||||||
from collections import defaultdict, namedtuple
|
from collections import defaultdict, namedtuple
|
||||||
from typing import Dict, List, Optional, Set, Tuple
|
from typing import Dict, List, Optional, Set, Tuple
|
||||||
from urllib.error import HTTPError, URLError
|
from urllib.error import HTTPError, URLError
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import Singleton, memoized
|
from llnl.util.lang import memoized
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.builder
|
|
||||||
import spack.concretize
|
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.error
|
import spack.environment as ev
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.mirrors.utils
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.stage
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -53,31 +50,6 @@
|
|||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
|
||||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
|
||||||
with_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
|
||||||
without_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
|
||||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|
||||||
opener = with_ssl if verify_ssl else without_ssl
|
|
||||||
timeout = timeout or spack.config.get("config:connect_timeout", 1)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
|
||||||
|
|
||||||
return dispatch_open
|
|
||||||
|
|
||||||
|
|
||||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
JOB_RETRY_CONDITIONS = [
|
JOB_RETRY_CONDITIONS = [
|
||||||
# "always",
|
# "always",
|
||||||
@@ -97,6 +69,8 @@ def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|||||||
|
|
||||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||||
JOB_NAME_FORMAT = (
|
JOB_NAME_FORMAT = (
|
||||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||||
)
|
)
|
||||||
@@ -201,11 +175,11 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
|||||||
return nodes, edges, stages
|
return nodes, edges, stages
|
||||||
|
|
||||||
|
|
||||||
def _print_staging_summary(spec_labels, stages, rebuild_decisions):
|
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||||
if not stages:
|
if not stages:
|
||||||
return
|
return
|
||||||
|
|
||||||
mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
mirrors = spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True)
|
||||||
tty.msg("Checked the following mirrors for binaries:")
|
tty.msg("Checked the following mirrors for binaries:")
|
||||||
for m in mirrors.values():
|
for m in mirrors.values():
|
||||||
tty.msg(f" {m.fetch_url}")
|
tty.msg(f" {m.fetch_url}")
|
||||||
@@ -252,14 +226,21 @@ def _spec_matches(spec, match_string):
|
|||||||
return spec.intersects(match_string)
|
return spec.intersects(match_string)
|
||||||
|
|
||||||
|
|
||||||
def _format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions):
|
def _format_job_needs(
|
||||||
|
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
|
||||||
|
):
|
||||||
needs_list = []
|
needs_list = []
|
||||||
for dep_job in dep_jobs:
|
for dep_job in dep_jobs:
|
||||||
dep_spec_key = _spec_ci_label(dep_job)
|
dep_spec_key = _spec_ci_label(dep_job)
|
||||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||||
|
|
||||||
if not prune_dag or rebuild:
|
if not prune_dag or rebuild:
|
||||||
needs_list.append({"job": get_job_name(dep_job, build_group), "artifacts": False})
|
needs_list.append(
|
||||||
|
{
|
||||||
|
"job": get_job_name(dep_job, build_group),
|
||||||
|
"artifacts": enable_artifacts_buildcache,
|
||||||
|
}
|
||||||
|
)
|
||||||
return needs_list
|
return needs_list
|
||||||
|
|
||||||
|
|
||||||
@@ -403,6 +384,12 @@ def __init__(self, ci_config, spec_labels, stages):
|
|||||||
|
|
||||||
self.ir = {
|
self.ir = {
|
||||||
"jobs": {},
|
"jobs": {},
|
||||||
|
"temporary-storage-url-prefix": self.ci_config.get(
|
||||||
|
"temporary-storage-url-prefix", None
|
||||||
|
),
|
||||||
|
"enable-artifacts-buildcache": self.ci_config.get(
|
||||||
|
"enable-artifacts-buildcache", False
|
||||||
|
),
|
||||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||||
@@ -418,20 +405,9 @@ def __init__(self, ci_config, spec_labels, stages):
|
|||||||
if name not in ["any", "build"]:
|
if name not in ["any", "build"]:
|
||||||
jobs[name] = self.__init_job("")
|
jobs[name] = self.__init_job("")
|
||||||
|
|
||||||
def __init_job(self, release_spec):
|
def __init_job(self, spec):
|
||||||
"""Initialize job object"""
|
"""Initialize job object"""
|
||||||
job_object = {"spec": release_spec, "attributes": {}}
|
return {"spec": spec, "attributes": {}}
|
||||||
if release_spec:
|
|
||||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
|
||||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
|
||||||
|
|
||||||
return job_object
|
|
||||||
|
|
||||||
def __is_named(self, section):
|
def __is_named(self, section):
|
||||||
"""Check if a pipeline-gen configuration section is for a named job,
|
"""Check if a pipeline-gen configuration section is for a named job,
|
||||||
@@ -524,7 +500,6 @@ def generate_ir(self):
|
|||||||
for section in reversed(pipeline_gen):
|
for section in reversed(pipeline_gen):
|
||||||
name = self.__is_named(section)
|
name = self.__is_named(section)
|
||||||
has_submapping = "submapping" in section
|
has_submapping = "submapping" in section
|
||||||
has_dynmapping = "dynamic-mapping" in section
|
|
||||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||||
|
|
||||||
if name:
|
if name:
|
||||||
@@ -567,108 +542,6 @@ def _apply_section(dest, src):
|
|||||||
job["attributes"] = self.__apply_submapping(
|
job["attributes"] = self.__apply_submapping(
|
||||||
job["attributes"], job["spec"], section
|
job["attributes"], job["spec"], section
|
||||||
)
|
)
|
||||||
elif has_dynmapping:
|
|
||||||
mapping = section["dynamic-mapping"]
|
|
||||||
|
|
||||||
dynmap_name = mapping.get("name")
|
|
||||||
|
|
||||||
# Check if this section should be skipped
|
|
||||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
|
||||||
if dynmap_name and dynmap_skip:
|
|
||||||
if re.match(dynmap_skip, dynmap_name):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get the endpoint
|
|
||||||
endpoint = mapping["endpoint"]
|
|
||||||
endpoint_url = urlparse(endpoint)
|
|
||||||
|
|
||||||
# Configure the request header
|
|
||||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
header.update(mapping.get("header", {}))
|
|
||||||
|
|
||||||
# Expand header environment variables
|
|
||||||
# ie. if tokens are passed
|
|
||||||
for value in header.values():
|
|
||||||
value = os.path.expandvars(value)
|
|
||||||
|
|
||||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
|
||||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
|
||||||
allowed = mapping.get("allow", [])
|
|
||||||
ignored = mapping.get("ignore", [])
|
|
||||||
|
|
||||||
# required keys are implicitly allowed
|
|
||||||
allowed = sorted(set(allowed + required))
|
|
||||||
ignored = sorted(set(ignored))
|
|
||||||
required = sorted(set(required))
|
|
||||||
|
|
||||||
# Make sure required things are not also ignored
|
|
||||||
assert not any([ikey in required for ikey in ignored])
|
|
||||||
|
|
||||||
def job_query(job):
|
|
||||||
job_vars = job["attributes"]["variables"]
|
|
||||||
query = (
|
|
||||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
|
||||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
|
||||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
|
||||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
|
||||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
|
||||||
).format_map(job_vars)
|
|
||||||
return f"spec={quote(query)}"
|
|
||||||
|
|
||||||
for job in jobs.values():
|
|
||||||
if not job["spec"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create request for this job
|
|
||||||
query = job_query(job)
|
|
||||||
request = Request(
|
|
||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
response = _dyn_mapping_urlopener(
|
|
||||||
request, verify_ssl=verify_ssl, timeout=timeout
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
|
||||||
# This is still experimental, and failures should not stop CI
|
|
||||||
# from running normally
|
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
|
||||||
tty.warn(f"{e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
config = json.load(codecs.getreader("utf-8")(response))
|
|
||||||
|
|
||||||
# Strip ignore keys
|
|
||||||
if ignored:
|
|
||||||
for key in ignored:
|
|
||||||
if key in config:
|
|
||||||
config.pop(key)
|
|
||||||
|
|
||||||
# Only keep allowed keys
|
|
||||||
clean_config = {}
|
|
||||||
if allowed:
|
|
||||||
for key in allowed:
|
|
||||||
if key in config:
|
|
||||||
clean_config[key] = config[key]
|
|
||||||
else:
|
|
||||||
clean_config = config
|
|
||||||
|
|
||||||
# Verify all of the required keys are present
|
|
||||||
if required:
|
|
||||||
missing_keys = []
|
|
||||||
for key in required:
|
|
||||||
if key not in clean_config.keys():
|
|
||||||
missing_keys.append(key)
|
|
||||||
|
|
||||||
if missing_keys:
|
|
||||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
|
||||||
|
|
||||||
if clean_config:
|
|
||||||
job["attributes"] = spack.config.merge_yaml(
|
|
||||||
job.get("attributes", {}), clean_config
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, job in jobs.items():
|
for _, job in jobs.items():
|
||||||
if job["spec"]:
|
if job["spec"]:
|
||||||
@@ -685,13 +558,14 @@ def generate_gitlab_ci_yaml(
|
|||||||
prune_dag=False,
|
prune_dag=False,
|
||||||
check_index_only=False,
|
check_index_only=False,
|
||||||
artifacts_root=None,
|
artifacts_root=None,
|
||||||
|
remote_mirror_override=None,
|
||||||
):
|
):
|
||||||
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
"""Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||||
the spec matrix in the active environment.
|
the spec matrix in the active environment.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
env (spack.environment.Environment): Activated environment object
|
env (spack.environment.Environment): Activated environment object
|
||||||
which must contain a ci section describing how to map
|
which must contain a gitlab-ci section describing how to map
|
||||||
specs to runners
|
specs to runners
|
||||||
print_summary (bool): Should we print a summary of all the jobs in
|
print_summary (bool): Should we print a summary of all the jobs in
|
||||||
the stages in which they were placed.
|
the stages in which they were placed.
|
||||||
@@ -706,21 +580,39 @@ def generate_gitlab_ci_yaml(
|
|||||||
artifacts_root (str): Path where artifacts like logs, environment
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
requires this to be within the project directory.
|
requires this to be within the project directory.
|
||||||
|
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||||
|
is used to populate several mirrors with binaries, based on some
|
||||||
|
criteria. Spack protected pipelines populate different mirrors based
|
||||||
|
on branch name, facilitated by this option. DEPRECATED
|
||||||
"""
|
"""
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
env.concretize()
|
env.concretize()
|
||||||
env.write()
|
env.write()
|
||||||
|
|
||||||
|
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||||
|
|
||||||
# Get the joined "ci" config with all of the current scopes resolved
|
# Get the joined "ci" config with all of the current scopes resolved
|
||||||
ci_config = cfg.get("ci")
|
ci_config = cfg.get("ci")
|
||||||
|
|
||||||
|
config_deprecated = False
|
||||||
if not ci_config:
|
if not ci_config:
|
||||||
raise SpackCIError("Environment does not have a `ci` configuration")
|
tty.warn("Environment does not have `ci` a configuration")
|
||||||
|
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||||
|
if not gitlabci_config:
|
||||||
|
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
||||||
|
|
||||||
|
tty.warn(
|
||||||
|
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
||||||
|
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
||||||
|
)
|
||||||
|
translate_deprecated_config(gitlabci_config)
|
||||||
|
ci_config = gitlabci_config
|
||||||
|
config_deprecated = True
|
||||||
|
|
||||||
# Default target is gitlab...and only target is gitlab
|
# Default target is gitlab...and only target is gitlab
|
||||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||||
raise SpackCIError('Spack CI module only generates target "gitlab"')
|
tty.die('Spack CI module only generates target "gitlab"')
|
||||||
|
|
||||||
cdash_config = cfg.get("cdash")
|
cdash_config = cfg.get("cdash")
|
||||||
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
|
||||||
@@ -781,6 +673,12 @@ def generate_gitlab_ci_yaml(
|
|||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
|
||||||
|
|
||||||
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
copy_only_pipeline = spack_pipeline_type == "spack_copy_only"
|
||||||
|
if copy_only_pipeline and config_deprecated:
|
||||||
|
tty.warn(
|
||||||
|
"SPACK_PIPELINE_TYPE=spack_copy_only is not supported when using\n",
|
||||||
|
"deprecated ci configuration, a no-op pipeline will be generated\n",
|
||||||
|
"instead.",
|
||||||
|
)
|
||||||
|
|
||||||
def ensure_expected_target_path(path):
|
def ensure_expected_target_path(path):
|
||||||
"""Returns passed paths with all Windows path separators exchanged
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
@@ -798,17 +696,39 @@ def ensure_expected_target_path(path):
|
|||||||
path = path.replace("\\", "/")
|
path = path.replace("\\", "/")
|
||||||
return path
|
return path
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
if "buildcache-destination" not in pipeline_mirrors:
|
if "buildcache-destination" in pipeline_mirrors:
|
||||||
raise SpackCIError("spack ci generate requires a mirror named 'buildcache-destination'")
|
if remote_mirror_override:
|
||||||
|
tty.die(
|
||||||
|
"Using the deprecated --buildcache-destination cli option and "
|
||||||
|
"having a mirror named 'buildcache-destination' at the same time "
|
||||||
|
"is not allowed"
|
||||||
|
)
|
||||||
|
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||||
|
else:
|
||||||
|
deprecated_mirror_config = True
|
||||||
|
# TODO: This will be an error in Spack 0.23
|
||||||
|
|
||||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
# TODO: Remove this block in spack 0.23
|
||||||
|
remote_mirror_url = None
|
||||||
|
if deprecated_mirror_config:
|
||||||
|
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||||
|
tty.die("spack ci generate requires an env containing a mirror")
|
||||||
|
|
||||||
|
ci_mirrors = yaml_root["mirrors"]
|
||||||
|
mirror_urls = [url for url in ci_mirrors.values()]
|
||||||
|
remote_mirror_url = mirror_urls[0]
|
||||||
|
|
||||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||||
if spack_buildcache_copy:
|
if spack_buildcache_copy:
|
||||||
buildcache_copies = {}
|
buildcache_copies = {}
|
||||||
buildcache_copy_src_prefix = buildcache_destination.fetch_url
|
buildcache_copy_src_prefix = (
|
||||||
|
buildcache_destination.fetch_url
|
||||||
|
if buildcache_destination
|
||||||
|
else remote_mirror_override or remote_mirror_url
|
||||||
|
)
|
||||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||||
|
|
||||||
# Check for a list of "known broken" specs that we should not bother
|
# Check for a list of "known broken" specs that we should not bother
|
||||||
@@ -818,10 +738,55 @@ def ensure_expected_target_path(path):
|
|||||||
if "broken-specs-url" in ci_config:
|
if "broken-specs-url" in ci_config:
|
||||||
broken_specs_url = ci_config["broken-specs-url"]
|
broken_specs_url = ci_config["broken-specs-url"]
|
||||||
|
|
||||||
|
enable_artifacts_buildcache = False
|
||||||
|
if "enable-artifacts-buildcache" in ci_config:
|
||||||
|
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
|
||||||
|
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
|
||||||
|
|
||||||
rebuild_index_enabled = True
|
rebuild_index_enabled = True
|
||||||
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
|
||||||
rebuild_index_enabled = False
|
rebuild_index_enabled = False
|
||||||
|
|
||||||
|
temp_storage_url_prefix = None
|
||||||
|
if "temporary-storage-url-prefix" in ci_config:
|
||||||
|
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
|
||||||
|
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||||
|
|
||||||
|
# If a remote mirror override (alternate buildcache destination) was
|
||||||
|
# specified, add it here in case it has already built hashes we might
|
||||||
|
# generate.
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
mirrors_to_check = None
|
||||||
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
|
if spack_pipeline_type == "spack_protected_branch":
|
||||||
|
# Overriding the main mirror in this case might result
|
||||||
|
# in skipping jobs on a release pipeline because specs are
|
||||||
|
# up to date in develop. Eventually we want to notice and take
|
||||||
|
# advantage of this by scheduling a job to copy the spec from
|
||||||
|
# develop to the release, but until we have that, this makes
|
||||||
|
# sure we schedule a rebuild job if the spec isn't already in
|
||||||
|
# override mirror.
|
||||||
|
mirrors_to_check = {"override": remote_mirror_override}
|
||||||
|
|
||||||
|
# If we have a remote override and we want generate pipeline using
|
||||||
|
# --check-index-only, then the override mirror needs to be added to
|
||||||
|
# the configured mirrors when bindist.update() is run, or else we
|
||||||
|
# won't fetch its index and include in our local cache.
|
||||||
|
spack.mirror.add(
|
||||||
|
spack.mirror.Mirror(remote_mirror_override, name="ci_pr_mirror"),
|
||||||
|
cfg.default_modify_scope(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
shared_pr_mirror = None
|
||||||
|
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||||
|
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||||
|
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||||
|
spack.mirror.add(
|
||||||
|
spack.mirror.Mirror(shared_pr_mirror, name="ci_shared_pr_mirror"),
|
||||||
|
cfg.default_modify_scope(),
|
||||||
|
)
|
||||||
|
|
||||||
pipeline_artifacts_dir = artifacts_root
|
pipeline_artifacts_dir = artifacts_root
|
||||||
if not pipeline_artifacts_dir:
|
if not pipeline_artifacts_dir:
|
||||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||||
@@ -830,8 +795,9 @@ def ensure_expected_target_path(path):
|
|||||||
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
|
||||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||||
|
|
||||||
# Copy the environment manifest file into the concrete environment directory,
|
# Now that we've added the mirrors we know about, they should be properly
|
||||||
# along with the spack.lock file.
|
# reflected in the environment manifest file, so copy that into the
|
||||||
|
# concrete environment directory, along with the spack.lock file.
|
||||||
if not os.path.exists(concrete_env_dir):
|
if not os.path.exists(concrete_env_dir):
|
||||||
os.makedirs(concrete_env_dir)
|
os.makedirs(concrete_env_dir)
|
||||||
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||||
@@ -856,12 +822,18 @@ def ensure_expected_target_path(path):
|
|||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||||
|
|
||||||
|
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||||
|
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||||
|
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
||||||
|
|
||||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||||
|
|
||||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||||
|
# TODO: Remove this line in Spack 0.23
|
||||||
|
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
|
||||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||||
|
|
||||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||||
@@ -875,6 +847,8 @@ def ensure_expected_target_path(path):
|
|||||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||||
|
# TODO: Remove this line in Spack 0.23
|
||||||
|
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
|
||||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
# Speed up staging by first fetching binary indices from all mirrors
|
# Speed up staging by first fetching binary indices from all mirrors
|
||||||
@@ -936,7 +910,7 @@ def ensure_expected_target_path(path):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||||
spec=release_spec, index_only=check_index_only
|
spec=release_spec, mirrors_to_check=mirrors_to_check, index_only=check_index_only
|
||||||
)
|
)
|
||||||
|
|
||||||
spec_record.rebuild = not up_to_date_mirrors
|
spec_record.rebuild = not up_to_date_mirrors
|
||||||
@@ -978,16 +952,36 @@ def main_script_replacements(cmd):
|
|||||||
|
|
||||||
job_name = get_job_name(release_spec, build_group)
|
job_name = get_job_name(release_spec, build_group)
|
||||||
|
|
||||||
|
job_vars = job_object.setdefault("variables", {})
|
||||||
|
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||||
|
|
||||||
job_object["needs"] = []
|
job_object["needs"] = []
|
||||||
if spec_label in dependencies:
|
if spec_label in dependencies:
|
||||||
# In this case, "needs" is only used for scheduling
|
if enable_artifacts_buildcache:
|
||||||
# purposes, so we only get the direct dependencies.
|
# Get dependencies transitively, so they're all
|
||||||
dep_jobs = []
|
# available in the artifacts buildcache.
|
||||||
for dep_label in dependencies[spec_label]:
|
dep_jobs = [d for d in release_spec.traverse(deptype="all", root=False)]
|
||||||
dep_jobs.append(spec_labels[dep_label])
|
else:
|
||||||
|
# In this case, "needs" is only used for scheduling
|
||||||
|
# purposes, so we only get the direct dependencies.
|
||||||
|
dep_jobs = []
|
||||||
|
for dep_label in dependencies[spec_label]:
|
||||||
|
dep_jobs.append(spec_labels[dep_label])
|
||||||
|
|
||||||
job_object["needs"].extend(
|
job_object["needs"].extend(
|
||||||
_format_job_needs(dep_jobs, build_group, prune_dag, rebuild_decisions)
|
_format_job_needs(
|
||||||
|
dep_jobs,
|
||||||
|
build_group,
|
||||||
|
prune_dag,
|
||||||
|
rebuild_decisions,
|
||||||
|
enable_artifacts_buildcache,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
rebuild_spec = spec_record.rebuild
|
rebuild_spec = spec_record.rebuild
|
||||||
@@ -1044,7 +1038,6 @@ def main_script_replacements(cmd):
|
|||||||
|
|
||||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||||
# whether DAG pruning was enabled or not.
|
# whether DAG pruning was enabled or not.
|
||||||
job_vars = job_object["variables"]
|
|
||||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
|
||||||
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
@@ -1069,6 +1062,19 @@ def main_script_replacements(cmd):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if enable_artifacts_buildcache:
|
||||||
|
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||||
|
job_object["artifacts"]["paths"].extend(
|
||||||
|
[
|
||||||
|
os.path.join(bc_root, p)
|
||||||
|
for p in [
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json"),
|
||||||
|
bindist.tarball_directory_name(release_spec),
|
||||||
|
]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
job_object["stage"] = stage_name
|
job_object["stage"] = stage_name
|
||||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||||
job_object["interruptible"] = True
|
job_object["interruptible"] = True
|
||||||
@@ -1083,7 +1089,15 @@ def main_script_replacements(cmd):
|
|||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
if print_summary:
|
if print_summary:
|
||||||
_print_staging_summary(spec_labels, stages, rebuild_decisions)
|
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
|
||||||
|
|
||||||
|
# Clean up remote mirror override if enabled
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if deprecated_mirror_config:
|
||||||
|
if remote_mirror_override:
|
||||||
|
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||||
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
|
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||||
|
|
||||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||||
|
|
||||||
@@ -1105,7 +1119,7 @@ def main_script_replacements(cmd):
|
|||||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
}
|
}
|
||||||
|
|
||||||
if copy_only_pipeline:
|
if copy_only_pipeline and not config_deprecated:
|
||||||
stage_names.append("copy")
|
stage_names.append("copy")
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
sync_job["stage"] = "copy"
|
sync_job["stage"] = "copy"
|
||||||
@@ -1115,12 +1129,17 @@ def main_script_replacements(cmd):
|
|||||||
if "variables" not in sync_job:
|
if "variables" not in sync_job:
|
||||||
sync_job["variables"] = {}
|
sync_job["variables"] = {}
|
||||||
|
|
||||||
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = buildcache_destination.fetch_url
|
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
|
||||||
|
buildcache_destination.fetch_url
|
||||||
|
if buildcache_destination
|
||||||
|
else remote_mirror_override or remote_mirror_url
|
||||||
|
)
|
||||||
|
|
||||||
if "buildcache-source" not in pipeline_mirrors:
|
if "buildcache-source" in pipeline_mirrors:
|
||||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||||
|
else:
|
||||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
# TODO: Remove this condition in Spack 0.23
|
||||||
|
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
|
||||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||||
sync_job["dependencies"] = []
|
sync_job["dependencies"] = []
|
||||||
|
|
||||||
@@ -1128,6 +1147,27 @@ def main_script_replacements(cmd):
|
|||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
if job_id > 0:
|
if job_id > 0:
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if temp_storage_url_prefix:
|
||||||
|
# There were some rebuild jobs scheduled, so we will need to
|
||||||
|
# schedule a job to clean up the temporary storage location
|
||||||
|
# associated with this pipeline.
|
||||||
|
stage_names.append("cleanup-temp-storage")
|
||||||
|
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
|
||||||
|
|
||||||
|
cleanup_job["stage"] = "cleanup-temp-storage"
|
||||||
|
cleanup_job["when"] = "always"
|
||||||
|
cleanup_job["retry"] = service_job_retries
|
||||||
|
cleanup_job["interruptible"] = True
|
||||||
|
|
||||||
|
cleanup_job["script"] = _unpack_script(
|
||||||
|
cleanup_job["script"],
|
||||||
|
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
|
||||||
|
)
|
||||||
|
|
||||||
|
cleanup_job["dependencies"] = []
|
||||||
|
output_object["cleanup"] = cleanup_job
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||||
and spack_pipeline_type == "spack_protected_branch"
|
and spack_pipeline_type == "spack_protected_branch"
|
||||||
@@ -1144,9 +1184,11 @@ def main_script_replacements(cmd):
|
|||||||
signing_job["interruptible"] = True
|
signing_job["interruptible"] = True
|
||||||
if "variables" not in signing_job:
|
if "variables" not in signing_job:
|
||||||
signing_job["variables"] = {}
|
signing_job["variables"] = {}
|
||||||
signing_job["variables"][
|
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
|
||||||
"SPACK_BUILDCACHE_DESTINATION"
|
buildcache_destination.push_url # need the s3 url for aws s3 sync
|
||||||
] = buildcache_destination.push_url
|
if buildcache_destination
|
||||||
|
else remote_mirror_override or remote_mirror_url
|
||||||
|
)
|
||||||
signing_job["dependencies"] = []
|
signing_job["dependencies"] = []
|
||||||
|
|
||||||
output_object["sign-pkgs"] = signing_job
|
output_object["sign-pkgs"] = signing_job
|
||||||
@@ -1157,7 +1199,9 @@ def main_script_replacements(cmd):
|
|||||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||||
|
|
||||||
final_job["stage"] = "stage-rebuild-index"
|
final_job["stage"] = "stage-rebuild-index"
|
||||||
target_mirror = buildcache_destination.push_url
|
target_mirror = remote_mirror_override or remote_mirror_url
|
||||||
|
if buildcache_destination:
|
||||||
|
target_mirror = buildcache_destination.push_url
|
||||||
final_job["script"] = _unpack_script(
|
final_job["script"] = _unpack_script(
|
||||||
final_job["script"],
|
final_job["script"],
|
||||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||||
@@ -1175,19 +1219,25 @@ def main_script_replacements(cmd):
|
|||||||
# Capture the version of Spack used to generate the pipeline, that can be
|
# Capture the version of Spack used to generate the pipeline, that can be
|
||||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||||
# repository, presume we are a Spack release and use the Git tag instead.
|
# repository, presume we are a Spack release and use the Git tag instead.
|
||||||
spack_version = spack.get_version()
|
spack_version = spack.main.get_version()
|
||||||
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
version_to_clone = spack.main.get_spack_commit() or f"v{spack.spack_version}"
|
||||||
|
|
||||||
output_object["variables"] = {
|
output_object["variables"] = {
|
||||||
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
"SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
|
||||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||||
"SPACK_VERSION": spack_version,
|
"SPACK_VERSION": spack_version,
|
||||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||||
|
# TODO: Remove this line in Spack 0.23
|
||||||
|
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
|
||||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||||
|
# TODO: Remove this line in Spack 0.23
|
||||||
|
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||||
|
# TODO: Remove this line in Spack 0.23
|
||||||
|
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
@@ -1196,6 +1246,10 @@ def main_script_replacements(cmd):
|
|||||||
for item, val in output_vars.items():
|
for item, val in output_vars.items():
|
||||||
output_vars[item] = ensure_expected_target_path(val)
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
|
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
|
||||||
|
|
||||||
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
|
||||||
if spack_stack_name:
|
if spack_stack_name:
|
||||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||||
@@ -1218,12 +1272,17 @@ def main_script_replacements(cmd):
|
|||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
# If this job fails ignore the status and carry on
|
noop_job["retry"] = service_job_retries
|
||||||
noop_job["retry"] = 0
|
|
||||||
noop_job["allow_failure"] = True
|
|
||||||
|
|
||||||
tty.debug("No specs to rebuild, generating no-op job")
|
if copy_only_pipeline and config_deprecated:
|
||||||
output_object = {"no-specs-to-rebuild": noop_job}
|
tty.debug("Generating no-op job as copy-only is unsupported here.")
|
||||||
|
noop_job["script"] = [
|
||||||
|
'echo "copy-only pipelines are not supported with deprecated ci configs"'
|
||||||
|
]
|
||||||
|
output_object = {"unsupported-copy": noop_job}
|
||||||
|
else:
|
||||||
|
tty.debug("No specs to rebuild, generating no-op job")
|
||||||
|
output_object = {"no-specs-to-rebuild": noop_job}
|
||||||
|
|
||||||
# Ensure the child pipeline always runs
|
# Ensure the child pipeline always runs
|
||||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||||
@@ -1324,7 +1383,7 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
|||||||
"""
|
"""
|
||||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||||
mirror = spack.mirrors.mirror.Mirror.from_url(mirror_url)
|
mirror = spack.mirror.Mirror.from_url(mirror_url)
|
||||||
try:
|
try:
|
||||||
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
|
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
|
||||||
uploader.push_or_raise([spec])
|
uploader.push_or_raise([spec])
|
||||||
@@ -1344,7 +1403,7 @@ def remove_other_mirrors(mirrors_to_keep, scope=None):
|
|||||||
mirrors_to_remove.append(name)
|
mirrors_to_remove.append(name)
|
||||||
|
|
||||||
for mirror_name in mirrors_to_remove:
|
for mirror_name in mirrors_to_remove:
|
||||||
spack.mirrors.utils.remove(mirror_name, scope)
|
spack.mirror.remove(mirror_name, scope)
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
@@ -1389,11 +1448,7 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
|
|
||||||
stage_dir = job_pkg.stage.path
|
stage_dir = job_pkg.stage.path
|
||||||
tty.debug(f"stage dir: {stage_dir}")
|
tty.debug(f"stage dir: {stage_dir}")
|
||||||
for file in [
|
for file in [job_pkg.log_path, job_pkg.env_mods_path, *job_pkg.builder.archive_files]:
|
||||||
job_pkg.log_path,
|
|
||||||
job_pkg.env_mods_path,
|
|
||||||
*spack.builder.create(job_pkg).archive_files,
|
|
||||||
]:
|
|
||||||
copy_files_to_artifacts(file, job_log_dir)
|
copy_files_to_artifacts(file, job_log_dir)
|
||||||
|
|
||||||
|
|
||||||
@@ -2265,6 +2320,83 @@ def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optiona
|
|||||||
reporter.test_skipped_report(report_dir, spec, reason)
|
reporter.test_skipped_report(report_dir, spec, reason)
|
||||||
|
|
||||||
|
|
||||||
class SpackCIError(spack.error.SpackError):
|
def translate_deprecated_config(config):
|
||||||
def __init__(self, msg):
|
# Remove all deprecated keys from config
|
||||||
super().__init__(msg)
|
mappings = config.pop("mappings", [])
|
||||||
|
match_behavior = config.pop("match_behavior", "first")
|
||||||
|
|
||||||
|
build_job = {}
|
||||||
|
if "image" in config:
|
||||||
|
build_job["image"] = config.pop("image")
|
||||||
|
if "tags" in config:
|
||||||
|
build_job["tags"] = config.pop("tags")
|
||||||
|
if "variables" in config:
|
||||||
|
build_job["variables"] = config.pop("variables")
|
||||||
|
|
||||||
|
# Scripts always override in old CI
|
||||||
|
if "before_script" in config:
|
||||||
|
build_job["before_script:"] = config.pop("before_script")
|
||||||
|
if "script" in config:
|
||||||
|
build_job["script:"] = config.pop("script")
|
||||||
|
if "after_script" in config:
|
||||||
|
build_job["after_script:"] = config.pop("after_script")
|
||||||
|
|
||||||
|
signing_job = None
|
||||||
|
if "signing-job-attributes" in config:
|
||||||
|
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
||||||
|
|
||||||
|
service_job_attributes = None
|
||||||
|
if "service-job-attributes" in config:
|
||||||
|
service_job_attributes = config.pop("service-job-attributes")
|
||||||
|
|
||||||
|
# If this config already has pipeline-gen do not more
|
||||||
|
if "pipeline-gen" in config:
|
||||||
|
return True if mappings or build_job or signing_job or service_job_attributes else False
|
||||||
|
|
||||||
|
config["target"] = "gitlab"
|
||||||
|
|
||||||
|
config["pipeline-gen"] = []
|
||||||
|
pipeline_gen = config["pipeline-gen"]
|
||||||
|
|
||||||
|
# Build Job
|
||||||
|
submapping = []
|
||||||
|
for section in mappings:
|
||||||
|
submapping_section = {"match": section["match"]}
|
||||||
|
if "runner-attributes" in section:
|
||||||
|
remapped_attributes = {}
|
||||||
|
if match_behavior == "first":
|
||||||
|
for key, value in section["runner-attributes"].items():
|
||||||
|
# Scripts always override in old CI
|
||||||
|
if key == "script":
|
||||||
|
remapped_attributes["script:"] = value
|
||||||
|
elif key == "before_script":
|
||||||
|
remapped_attributes["before_script:"] = value
|
||||||
|
elif key == "after_script":
|
||||||
|
remapped_attributes["after_script:"] = value
|
||||||
|
else:
|
||||||
|
remapped_attributes[key] = value
|
||||||
|
else:
|
||||||
|
# Handle "merge" behavior be allowing scripts to merge in submapping section
|
||||||
|
remapped_attributes = section["runner-attributes"]
|
||||||
|
submapping_section["build-job"] = remapped_attributes
|
||||||
|
|
||||||
|
if "remove-attributes" in section:
|
||||||
|
# Old format only allowed tags in this section, so no extra checks are needed
|
||||||
|
submapping_section["build-job-remove"] = section["remove-attributes"]
|
||||||
|
submapping.append(submapping_section)
|
||||||
|
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
||||||
|
|
||||||
|
if build_job:
|
||||||
|
pipeline_gen.append({"build-job": build_job})
|
||||||
|
|
||||||
|
# Signing Job
|
||||||
|
if signing_job:
|
||||||
|
pipeline_gen.append(signing_job)
|
||||||
|
|
||||||
|
# Service Jobs
|
||||||
|
if service_job_attributes:
|
||||||
|
pipeline_gen.append({"reindex-job": service_job_attributes})
|
||||||
|
pipeline_gen.append({"noop-job": service_job_attributes})
|
||||||
|
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
||||||
|
|
||||||
|
return True
|
||||||
|
|||||||
@@ -4,13 +4,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import difflib
|
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from collections import Counter
|
from typing import List, Union
|
||||||
from typing import List, Optional, Union
|
|
||||||
|
|
||||||
import llnl.string
|
import llnl.string
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -19,14 +17,12 @@
|
|||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.concretize
|
import spack.config
|
||||||
import spack.config # breaks a cycle.
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
import spack.parser
|
import spack.parser
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
@@ -34,8 +30,6 @@
|
|||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
from ..enums import InstallRecordStatus
|
|
||||||
|
|
||||||
# cmd has a submodule called "list" so preserve the python list module
|
# cmd has a submodule called "list" so preserve the python list module
|
||||||
python_list = list
|
python_list = list
|
||||||
|
|
||||||
@@ -126,8 +120,6 @@ def get_module(cmd_name):
|
|||||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||||
except ImportError:
|
except ImportError:
|
||||||
module = spack.extensions.get_module(cmd_name)
|
module = spack.extensions.get_module(cmd_name)
|
||||||
if not module:
|
|
||||||
raise CommandNotFoundError(cmd_name)
|
|
||||||
|
|
||||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||||
attr_setdefault(module, DESCRIPTION, "")
|
attr_setdefault(module, DESCRIPTION, "")
|
||||||
@@ -181,66 +173,10 @@ def parse_specs(
|
|||||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||||
|
|
||||||
specs = spack.parser.parse(arg_string)
|
specs = spack.parser.parse(arg_string)
|
||||||
if not concretize:
|
for spec in specs:
|
||||||
return specs
|
if concretize:
|
||||||
|
spec.concretize(tests=tests)
|
||||||
to_concretize = [(s, None) for s in specs]
|
return specs
|
||||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
|
||||||
|
|
||||||
|
|
||||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
|
||||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
|
||||||
|
|
||||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
|
||||||
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
|
||||||
rules from config."""
|
|
||||||
unify = spack.config.get("concretizer:unify", False)
|
|
||||||
|
|
||||||
# Special case for concretizing a single spec
|
|
||||||
if len(to_concretize) == 1:
|
|
||||||
abstract, concrete = to_concretize[0]
|
|
||||||
return [concrete or spack.concretize.concretized(abstract)]
|
|
||||||
|
|
||||||
# Special case if every spec is either concrete or has an abstract hash
|
|
||||||
if all(
|
|
||||||
concrete or abstract.concrete or abstract.abstract_hash
|
|
||||||
for abstract, concrete in to_concretize
|
|
||||||
):
|
|
||||||
# Get all the concrete specs
|
|
||||||
ret = [
|
|
||||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
|
||||||
for abstract, concrete in to_concretize
|
|
||||||
]
|
|
||||||
|
|
||||||
# If unify: true, check that specs don't conflict
|
|
||||||
# Since all concrete, "when_possible" is not relevant
|
|
||||||
if unify is True: # True, "when_possible", False are possible values
|
|
||||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
|
||||||
specs_per_name = Counter(
|
|
||||||
spec.name
|
|
||||||
for spec in traverse.traverse_nodes(
|
|
||||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
|
||||||
)
|
|
||||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
|
||||||
)
|
|
||||||
|
|
||||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
|
||||||
if conflicts:
|
|
||||||
raise spack.error.SpecError(
|
|
||||||
"Specs conflict and `concretizer:unify` is configured true.",
|
|
||||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
|
||||||
)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
# Standard case
|
|
||||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
|
||||||
if unify is True:
|
|
||||||
concretize_method = spack.concretize.concretize_together
|
|
||||||
elif unify == "when_possible":
|
|
||||||
concretize_method = spack.concretize.concretize_together_when_possible
|
|
||||||
|
|
||||||
concretized = concretize_method(to_concretize, tests=tests)
|
|
||||||
return [concrete for _, concrete in concretized]
|
|
||||||
|
|
||||||
|
|
||||||
def matching_spec_from_env(spec):
|
def matching_spec_from_env(spec):
|
||||||
@@ -251,69 +187,44 @@ def matching_spec_from_env(spec):
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
return env.matching_spec(spec) or spack.concretize.concretized(spec)
|
return env.matching_spec(spec) or spec.concretized()
|
||||||
else:
|
else:
|
||||||
return spack.concretize.concretized(spec)
|
return spec.concretized()
|
||||||
|
|
||||||
|
|
||||||
def matching_specs_from_env(specs):
|
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||||
"""
|
|
||||||
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
|
||||||
|
|
||||||
For each spec, if there is a matching spec in the environment it is used. If no
|
|
||||||
matching spec is found, this will return the given spec but concretized in the
|
|
||||||
context of the active environment and other given specs, with unification rules applied.
|
|
||||||
"""
|
|
||||||
env = ev.active_environment()
|
|
||||||
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
|
||||||
additional_concrete_specs = (
|
|
||||||
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
|
||||||
)
|
|
||||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
|
||||||
|
|
||||||
|
|
||||||
def disambiguate_spec(
|
|
||||||
spec: spack.spec.Spec,
|
|
||||||
env: Optional[ev.Environment],
|
|
||||||
local: bool = False,
|
|
||||||
installed: Union[bool, InstallRecordStatus] = True,
|
|
||||||
first: bool = False,
|
|
||||||
) -> spack.spec.Spec:
|
|
||||||
"""Given a spec, figure out which installed package it refers to.
|
"""Given a spec, figure out which installed package it refers to.
|
||||||
|
|
||||||
Args:
|
Arguments:
|
||||||
spec: a spec to disambiguate
|
spec (spack.spec.Spec): a spec to disambiguate
|
||||||
env: a spack environment, if one is active, or None if no environment is active
|
env (spack.environment.Environment): a spack environment,
|
||||||
local: do not search chained spack instances
|
if one is active, or None if no environment is active
|
||||||
installed: install status argument passed to database query.
|
local (bool): do not search chained spack instances
|
||||||
first: returns the first matching spec, even if more than one match is found
|
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||||
|
install status argument passed to database query.
|
||||||
|
See ``spack.database.Database._query`` for details.
|
||||||
"""
|
"""
|
||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
|
||||||
|
|
||||||
|
|
||||||
def disambiguate_spec_from_hashes(
|
def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
|
||||||
spec: spack.spec.Spec,
|
|
||||||
hashes: List[str],
|
|
||||||
local: bool = False,
|
|
||||||
installed: Union[bool, InstallRecordStatus] = True,
|
|
||||||
first: bool = False,
|
|
||||||
) -> spack.spec.Spec:
|
|
||||||
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
spec: a spec to disambiguate
|
spec (spack.spec.Spec): a spec to disambiguate
|
||||||
hashes: a set of hashes of specs among which to disambiguate
|
hashes (typing.Iterable): a set of hashes of specs among which to disambiguate
|
||||||
local: if True, do not search chained spack instances
|
local (bool): do not search chained spack instances
|
||||||
installed: install status argument passed to database query.
|
installed (bool or spack.database.InstallStatus or typing.Iterable):
|
||||||
first: returns the first matching spec, even if more than one match is found
|
install status argument passed to database query.
|
||||||
|
See ``spack.database.Database._query`` for details.
|
||||||
"""
|
"""
|
||||||
if local:
|
if local:
|
||||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||||
else:
|
else:
|
||||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||||
if not matching_specs:
|
if not matching_specs:
|
||||||
tty.die(f"Spec '{spec}' matches no installed packages.")
|
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||||
|
|
||||||
elif first:
|
elif first:
|
||||||
return matching_specs[0]
|
return matching_specs[0]
|
||||||
@@ -598,18 +509,6 @@ def __init__(self, name):
|
|||||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||||
|
|
||||||
|
|
||||||
class MultipleSpecsMatch(Exception):
|
|
||||||
"""Raised when multiple specs match a constraint, in a context where
|
|
||||||
this is not allowed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class NoSpecMatches(Exception):
|
|
||||||
"""Raised when no spec matches a constraint, in a context where
|
|
||||||
this is not allowed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
########################################
|
########################################
|
||||||
# argparse types for argument validation
|
# argparse types for argument validation
|
||||||
########################################
|
########################################
|
||||||
@@ -694,24 +593,3 @@ def find_environment(args):
|
|||||||
def first_line(docstring):
|
def first_line(docstring):
|
||||||
"""Return the first line of the docstring."""
|
"""Return the first line of the docstring."""
|
||||||
return docstring.split("\n")[0]
|
return docstring.split("\n")[0]
|
||||||
|
|
||||||
|
|
||||||
class CommandNotFoundError(spack.error.SpackError):
|
|
||||||
"""Exception class thrown when a requested command is not recognized as
|
|
||||||
such.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cmd_name):
|
|
||||||
msg = (
|
|
||||||
f"{cmd_name} is not a recognized Spack command or extension command; "
|
|
||||||
"check with `spack commands`."
|
|
||||||
)
|
|
||||||
long_msg = None
|
|
||||||
|
|
||||||
similar = difflib.get_close_matches(cmd_name, all_commands())
|
|
||||||
|
|
||||||
if 1 <= len(similar) <= 5:
|
|
||||||
long_msg = "\nDid you mean one of the following commands?\n "
|
|
||||||
long_msg += "\n ".join(similar)
|
|
||||||
|
|
||||||
super().__init__(msg, long_msg)
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
|
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
description = "print architecture information about this machine"
|
description = "print architecture information about this machine"
|
||||||
section = "system"
|
section = "system"
|
||||||
@@ -19,23 +18,12 @@
|
|||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
# DEPRECATED: equivalent to --generic --target
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-g",
|
"-g", "--generic-target", action="store_true", help="show the best generic target"
|
||||||
"--generic-target",
|
|
||||||
action="store_true",
|
|
||||||
help="show the best generic target (deprecated)",
|
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
"--known-targets", action="store_true", help="show a list of all known targets and exit"
|
||||||
)
|
)
|
||||||
target_type = subparser.add_mutually_exclusive_group()
|
|
||||||
target_type.add_argument(
|
|
||||||
"--family", action="store_true", help="print generic ISA (x86_64, aarch64, ppc64le, ...)"
|
|
||||||
)
|
|
||||||
target_type.add_argument(
|
|
||||||
"--generic", action="store_true", help="print feature level (x86_64_v3, armv8.4a, ...)"
|
|
||||||
)
|
|
||||||
parts = subparser.add_mutually_exclusive_group()
|
parts = subparser.add_mutually_exclusive_group()
|
||||||
parts2 = subparser.add_mutually_exclusive_group()
|
parts2 = subparser.add_mutually_exclusive_group()
|
||||||
parts.add_argument(
|
parts.add_argument(
|
||||||
@@ -91,7 +79,6 @@ def display_target_group(header, target_group):
|
|||||||
|
|
||||||
def arch(parser, args):
|
def arch(parser, args):
|
||||||
if args.generic_target:
|
if args.generic_target:
|
||||||
# TODO: add deprecation warning in 0.24
|
|
||||||
print(archspec.cpu.host().generic)
|
print(archspec.cpu.host().generic)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -108,10 +95,6 @@ def arch(parser, args):
|
|||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
host_os = host_platform.operating_system(os_args)
|
host_os = host_platform.operating_system(os_args)
|
||||||
host_target = host_platform.target(target_args)
|
host_target = host_platform.target(target_args)
|
||||||
if args.family:
|
|
||||||
host_target = host_target.family
|
|
||||||
elif args.generic:
|
|
||||||
host_target = host_target.generic
|
|
||||||
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
|
||||||
|
|
||||||
if args.platform:
|
if args.platform:
|
||||||
|
|||||||
@@ -15,13 +15,12 @@
|
|||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.main
|
||||||
|
import spack.mirror
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_yaml
|
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "manage bootstrap configuration"
|
description = "manage bootstrap configuration"
|
||||||
@@ -399,9 +398,9 @@ def _mirror(args):
|
|||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
# Suppress tty from the call below for terser messages
|
# Suppress tty from the call below for terser messages
|
||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.concretize.concretized(spack.spec.Spec(spec_str))
|
spec = spack.spec.Spec(spec_str).concretized()
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirror.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
|
|
||||||
if args.binary_packages:
|
if args.binary_packages:
|
||||||
|
|||||||
@@ -17,16 +17,20 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
|
import spack.oci.opener
|
||||||
|
import spack.relocate
|
||||||
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.user_environment
|
||||||
|
import spack.util.crypto
|
||||||
import spack.util.parallel
|
import spack.util.parallel
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@@ -35,8 +39,6 @@
|
|||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.spec import Spec, save_dependency_specfiles
|
from spack.spec import Spec, save_dependency_specfiles
|
||||||
|
|
||||||
from ..enums import InstallRecordStatus
|
|
||||||
|
|
||||||
description = "create, download and install binary packages"
|
description = "create, download and install binary packages"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
level = "long"
|
level = "long"
|
||||||
@@ -311,10 +313,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
|
|
||||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||||
"""Disambiguate specs and return a list of matching specs"""
|
"""Disambiguate specs and return a list of matching specs"""
|
||||||
return [
|
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||||
spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=InstallRecordStatus.ANY)
|
|
||||||
for s in specs
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _format_spec(spec: Spec) -> str:
|
def _format_spec(spec: Spec) -> str:
|
||||||
@@ -393,7 +392,7 @@ def push_fn(args):
|
|||||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||||
|
|
||||||
mirror = args.mirror
|
mirror = args.mirror
|
||||||
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
|
assert isinstance(mirror, spack.mirror.Mirror)
|
||||||
|
|
||||||
push_url = mirror.push_url
|
push_url = mirror.push_url
|
||||||
|
|
||||||
@@ -556,7 +555,8 @@ def check_fn(args: argparse.Namespace):
|
|||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
return
|
return
|
||||||
|
|
||||||
specs = [spack.concretize.concretized(s) for s in specs]
|
for spec in specs:
|
||||||
|
spec.concretize()
|
||||||
|
|
||||||
# Next see if there are any configured binary mirrors
|
# Next see if there are any configured binary mirrors
|
||||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||||
@@ -624,7 +624,7 @@ def save_specfile_fn(args):
|
|||||||
root = specs[0]
|
root = specs[0]
|
||||||
|
|
||||||
if not root.concrete:
|
if not root.concrete:
|
||||||
root = spack.concretize.concretized(root)
|
root.concretize()
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||||
@@ -750,7 +750,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
|||||||
copy_buildcache_file(copy_file["src"], dest)
|
copy_buildcache_file(copy_file["src"], dest)
|
||||||
|
|
||||||
|
|
||||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||||
# Special case OCI images for now.
|
# Special case OCI images for now.
|
||||||
try:
|
try:
|
||||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||||
|
|||||||
@@ -4,7 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.spec
|
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
description = "change an existing spec in an environment"
|
description = "change an existing spec in an environment"
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
|
import spack.util.crypto
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.package_base import (
|
from spack.package_base import (
|
||||||
|
|||||||
@@ -19,8 +19,9 @@
|
|||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.environment.depfile
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
@@ -62,6 +63,13 @@ def setup_parser(subparser):
|
|||||||
"path to the file where generated jobs file should be written. "
|
"path to the file where generated jobs file should be written. "
|
||||||
"default is .gitlab-ci.yml in the root of the repository",
|
"default is .gitlab-ci.yml in the root of the repository",
|
||||||
)
|
)
|
||||||
|
generate.add_argument(
|
||||||
|
"--copy-to",
|
||||||
|
default=None,
|
||||||
|
help="path to additional directory for job files\n\n"
|
||||||
|
"this option provides an absolute path to a directory where the generated "
|
||||||
|
"jobs yaml file should be copied. default is not to copy",
|
||||||
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -76,6 +84,12 @@ def setup_parser(subparser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||||
)
|
)
|
||||||
|
generate.add_argument(
|
||||||
|
"--buildcache-destination",
|
||||||
|
default=None,
|
||||||
|
help="override the mirror configured in the environment\n\n"
|
||||||
|
"allows for pushing binaries from the generated pipeline to a different location",
|
||||||
|
)
|
||||||
prune_group = generate.add_mutually_exclusive_group()
|
prune_group = generate.add_mutually_exclusive_group()
|
||||||
prune_group.add_argument(
|
prune_group.add_argument(
|
||||||
"--prune-dag",
|
"--prune-dag",
|
||||||
@@ -201,10 +215,20 @@ def ci_generate(args):
|
|||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
|
if args.copy_to:
|
||||||
|
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
|
||||||
|
|
||||||
|
if args.buildcache_destination:
|
||||||
|
tty.warn(
|
||||||
|
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
|
||||||
|
)
|
||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
|
copy_yaml_to = args.copy_to
|
||||||
prune_dag = args.prune_dag
|
prune_dag = args.prune_dag
|
||||||
index_only = args.index_only
|
index_only = args.index_only
|
||||||
artifacts_root = args.artifacts_root
|
artifacts_root = args.artifacts_root
|
||||||
|
buildcache_destination = args.buildcache_destination
|
||||||
|
|
||||||
if not output_file:
|
if not output_file:
|
||||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||||
@@ -222,8 +246,15 @@ def ci_generate(args):
|
|||||||
prune_dag=prune_dag,
|
prune_dag=prune_dag,
|
||||||
check_index_only=index_only,
|
check_index_only=index_only,
|
||||||
artifacts_root=artifacts_root,
|
artifacts_root=artifacts_root,
|
||||||
|
remote_mirror_override=buildcache_destination,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if copy_yaml_to:
|
||||||
|
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||||
|
if not os.path.exists(copy_to_dir):
|
||||||
|
os.makedirs(copy_to_dir)
|
||||||
|
shutil.copyfile(output_file, copy_yaml_to)
|
||||||
|
|
||||||
|
|
||||||
def ci_reindex(args):
|
def ci_reindex(args):
|
||||||
"""rebuild the buildcache index for the remote mirror
|
"""rebuild the buildcache index for the remote mirror
|
||||||
@@ -240,7 +271,7 @@ def ci_reindex(args):
|
|||||||
ci_mirrors = yaml_root["mirrors"]
|
ci_mirrors = yaml_root["mirrors"]
|
||||||
mirror_urls = [url for url in ci_mirrors.values()]
|
mirror_urls = [url for url in ci_mirrors.values()]
|
||||||
remote_mirror_url = mirror_urls[0]
|
remote_mirror_url = mirror_urls[0]
|
||||||
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
|
mirror = spack.mirror.Mirror(remote_mirror_url)
|
||||||
|
|
||||||
buildcache.update_index(mirror, update_keys=True)
|
buildcache.update_index(mirror, update_keys=True)
|
||||||
|
|
||||||
@@ -268,13 +299,22 @@ def ci_rebuild(args):
|
|||||||
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
|
||||||
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
|
||||||
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
|
||||||
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
|
||||||
|
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
|
||||||
ci_job_name = os.environ.get("CI_JOB_NAME")
|
ci_job_name = os.environ.get("CI_JOB_NAME")
|
||||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||||
|
|
||||||
@@ -294,10 +334,12 @@ def ci_rebuild(args):
|
|||||||
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
|
||||||
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
job_test_dir = os.path.join(ci_project_dir, job_test_dir)
|
||||||
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
repro_dir = os.path.join(ci_project_dir, repro_dir)
|
||||||
|
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
|
||||||
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
|
||||||
|
|
||||||
# Debug print some of the key environment variables we should have received
|
# Debug print some of the key environment variables we should have received
|
||||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||||
|
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||||
|
|
||||||
# Query the environment manifest to find out whether we're reporting to a
|
# Query the environment manifest to find out whether we're reporting to a
|
||||||
@@ -328,12 +370,52 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
if "buildcache-destination" not in pipeline_mirrors:
|
if "buildcache-destination" in pipeline_mirrors:
|
||||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
||||||
|
else:
|
||||||
|
deprecated_mirror_config = True
|
||||||
|
# TODO: This will be an error in Spack 0.23
|
||||||
|
|
||||||
buildcache_destination = pipeline_mirrors["buildcache-destination"]
|
# If no override url exists, then just push binary package to the
|
||||||
|
# normal remote mirror url.
|
||||||
|
# TODO: Remove in Spack 0.23
|
||||||
|
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||||
|
if buildcache_destination:
|
||||||
|
buildcache_mirror_url = buildcache_destination.push_url
|
||||||
|
|
||||||
|
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||||
|
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||||
|
# force something or pipelines might not have a way to propagate build
|
||||||
|
# artifacts from upstream to downstream jobs.
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
pipeline_mirror_url = None
|
||||||
|
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
temp_storage_url_prefix = None
|
||||||
|
if "temporary-storage-url-prefix" in ci_config:
|
||||||
|
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
|
||||||
|
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
|
||||||
|
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
enable_artifacts_mirror = False
|
||||||
|
if "enable-artifacts-buildcache" in ci_config:
|
||||||
|
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
|
||||||
|
if enable_artifacts_mirror or (
|
||||||
|
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
|
||||||
|
):
|
||||||
|
# If you explicitly enabled the artifacts buildcache feature, or
|
||||||
|
# if this is a PR pipeline but you did not enable either of the
|
||||||
|
# per-pipeline temporary storage features, we force the use of
|
||||||
|
# artifacts buildcache. Otherwise jobs will not have binary
|
||||||
|
# dependencies from previous stages available since we do not
|
||||||
|
# allow pushing binaries to the remote mirror during PR pipelines.
|
||||||
|
enable_artifacts_mirror = True
|
||||||
|
pipeline_mirror_url = url_util.path_to_file_url(local_mirror_dir)
|
||||||
|
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||||
|
tty.debug(mirror_msg)
|
||||||
|
|
||||||
# Get the concrete spec to be built by this job.
|
# Get the concrete spec to be built by this job.
|
||||||
try:
|
try:
|
||||||
@@ -408,7 +490,48 @@ def ci_rebuild(args):
|
|||||||
fd.write(spack_info.encode("utf8"))
|
fd.write(spack_info.encode("utf8"))
|
||||||
fd.write(b"\n")
|
fd.write(b"\n")
|
||||||
|
|
||||||
matches = None if full_rebuild else bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
pipeline_mirrors = []
|
||||||
|
|
||||||
|
# If we decided there should be a temporary storage mechanism, add that
|
||||||
|
# mirror now so it's used when we check for a hash match already
|
||||||
|
# built for this spec.
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if pipeline_mirror_url:
|
||||||
|
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
|
||||||
|
spack.mirror.add(mirror, cfg.default_modify_scope())
|
||||||
|
pipeline_mirrors.append(pipeline_mirror_url)
|
||||||
|
|
||||||
|
# Check configured mirrors for a built spec with a matching hash
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
mirrors_to_check = None
|
||||||
|
if remote_mirror_override:
|
||||||
|
if spack_pipeline_type == "spack_protected_branch":
|
||||||
|
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||||
|
# mirror to see if we should skip building, which is what we want.
|
||||||
|
mirrors_to_check = {"override": remote_mirror_override}
|
||||||
|
|
||||||
|
# Adding this mirror to the list of configured mirrors means dependencies
|
||||||
|
# could be installed from either the override mirror or any other configured
|
||||||
|
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||||
|
# pipeline_mirror_url), which is also what we want.
|
||||||
|
spack.mirror.add(
|
||||||
|
spack.mirror.Mirror(remote_mirror_override, name="mirror_override"),
|
||||||
|
cfg.default_modify_scope(),
|
||||||
|
)
|
||||||
|
pipeline_mirrors.append(remote_mirror_override)
|
||||||
|
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
|
||||||
|
if shared_pr_mirror_url != "None":
|
||||||
|
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||||
|
|
||||||
|
matches = (
|
||||||
|
None
|
||||||
|
if full_rebuild
|
||||||
|
else bindist.get_mirrors_for_spec(
|
||||||
|
job_spec, mirrors_to_check=mirrors_to_check, index_only=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if matches:
|
if matches:
|
||||||
# Got a hash match on at least one configured mirror. All
|
# Got a hash match on at least one configured mirror. All
|
||||||
@@ -420,10 +543,25 @@ def ci_rebuild(args):
|
|||||||
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
|
||||||
for match in matches:
|
for match in matches:
|
||||||
tty.msg(" {0}".format(match["mirror_url"]))
|
tty.msg(" {0}".format(match["mirror_url"]))
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if enable_artifacts_mirror:
|
||||||
|
matching_mirror = matches[0]["mirror_url"]
|
||||||
|
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
|
||||||
|
tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
|
||||||
|
tty.debug("Downloading to {0}".format(build_cache_dir))
|
||||||
|
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
||||||
|
|
||||||
# Now we are done and successful
|
# Now we are done and successful
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||||
|
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||||
|
# package destination. This ensures that the when we rebuild everything, we only
|
||||||
|
# consume binary dependencies built in this pipeline.
|
||||||
|
# TODO: Remove this in Spack 0.23
|
||||||
|
if deprecated_mirror_config and full_rebuild:
|
||||||
|
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
|
||||||
|
|
||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
@@ -544,11 +682,17 @@ def ci_rebuild(args):
|
|||||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||||
|
|
||||||
if install_exit_code == 0:
|
if install_exit_code == 0:
|
||||||
# If the install succeeded, push it to the buildcache destination. Failure to push
|
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||||
# will result in a non-zero exit code. Pushing is best-effort.
|
# will result in a non-zero exit code. Pushing is best-effort.
|
||||||
|
mirror_urls = [buildcache_mirror_url]
|
||||||
|
|
||||||
|
# TODO: Remove this block in Spack 0.23
|
||||||
|
if pipeline_mirror_url:
|
||||||
|
mirror_urls.append(pipeline_mirror_url)
|
||||||
|
|
||||||
for result in spack_ci.create_buildcache(
|
for result in spack_ci.create_buildcache(
|
||||||
input_spec=job_spec,
|
input_spec=job_spec,
|
||||||
destination_mirror_urls=[buildcache_destination.push_url],
|
destination_mirror_urls=mirror_urls,
|
||||||
sign_binaries=spack_ci.can_sign_binaries(),
|
sign_binaries=spack_ci.can_sign_binaries(),
|
||||||
):
|
):
|
||||||
if not result.success:
|
if not result.success:
|
||||||
|
|||||||
@@ -10,9 +10,11 @@
|
|||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack.bootstrap
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd.test
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.repo
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -105,8 +107,7 @@ def clean(parser, args):
|
|||||||
# Then do the cleaning falling through the cases
|
# Then do the cleaning falling through the cases
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
specs = spack.cmd.matching_specs_from_env(specs)
|
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = "Cleaning build stage [{0}]"
|
msg = "Cleaning build stage [{0}]"
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
|
|||||||
@@ -17,7 +17,6 @@
|
|||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user