Compare commits

..

1 Commits

Author SHA1 Message Date
Wouter Deconinck
bdcd817233 containers: update docs from centos:7 to almalinux:9 2024-07-04 19:40:24 -05:00
4944 changed files with 25210 additions and 55585 deletions

View File

@@ -5,7 +5,7 @@ coverage:
status: status:
project: project:
default: default:
threshold: 2.0% threshold: 0.2%
ignore: ignore:
- lib/spack/spack/test/.* - lib/spack/spack/test/.*

View File

@@ -1,5 +1,4 @@
{ {
"name": "Ubuntu 20.04",
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01", "image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
"postCreateCommand": "./.devcontainer/postCreateCommand.sh" "postCreateCommand": "./.devcontainer/postCreateCommand.sh"
} }

View File

@@ -1,5 +0,0 @@
{
"name": "Ubuntu 22.04",
"image": "ghcr.io/spack/ubuntu-22.04:v2024-05-07",
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
}

View File

@@ -5,10 +5,13 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "daily" interval: "daily"
# Requirements to run style checks and build documentation # Requirements to build documentation
- package-ecosystem: "pip" - package-ecosystem: "pip"
directories: directory: "/lib/spack/docs"
- "/.github/workflows/requirements/style/*" schedule:
- "/lib/spack/docs" interval: "daily"
# Requirements to run style checks
- package-ecosystem: "pip"
directory: "/.github/workflows/style"
schedule: schedule:
interval: "daily" interval: "daily"

View File

@@ -28,8 +28,8 @@ jobs:
run: run:
shell: ${{ matrix.system.shell }} shell: ${{ matrix.system.shell }}
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: ${{inputs.python_version}} python-version: ${{inputs.python_version}}
- name: Install Python packages - name: Install Python packages
@@ -40,35 +40,30 @@ jobs:
run: | run: |
python -m pip install --upgrade pywin32 python -m pip install --upgrade pywin32
- name: Package audits (with coverage) - name: Package audits (with coverage)
env:
COVERAGE_FILE: coverage/.coverage-audits-${{ matrix.system.os }}
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }} if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
coverage run $(which spack) audit packages coverage run $(which spack) audit packages
coverage run $(which spack) audit configs
coverage run $(which spack) -d audit externals coverage run $(which spack) -d audit externals
coverage combine coverage combine
coverage xml
- name: Package audits (without coverage) - name: Package audits (without coverage)
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }} if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
spack -d audit packages spack -d audit packages
spack -d audit configs
spack -d audit externals spack -d audit externals
- name: Package audits (without coverage) - name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }} if: ${{ runner.os == 'Windows' }}
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
spack -d audit packages spack -d audit packages
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
spack -d audit configs
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals spack -d audit externals
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }} if: ${{ inputs.with_coverage == 'true' }}
with: with:
name: coverage-audits-${{ matrix.system.os }} flags: unittests,audits
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true

View File

@@ -37,7 +37,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison cmake bison
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Bootstrap clingo - name: Bootstrap clingo
@@ -53,27 +53,31 @@ jobs:
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.runner }}
strategy: strategy:
matrix: matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"] runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
steps: steps:
- name: Setup macOS - name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }} if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
run: | run: |
brew install cmake bison tree brew install cmake bison tree
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: "3.12" python-version: "3.12"
- name: Bootstrap clingo - name: Bootstrap clingo
env:
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
run: | run: |
source share/spack/setup-env.sh ${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4 spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison spack external find --not-buildable cmake bison
spack -d solve zlib spack -d solve zlib
tree $HOME/.spack/bootstrap/store/ tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
gnupg-sources: gnupg-sources:
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.runner }}
@@ -90,7 +94,7 @@ jobs:
if: ${{ matrix.runner == 'ubuntu-latest' }} if: ${{ matrix.runner == 'ubuntu-latest' }}
run: sudo rm -rf $(command -v gpg gpg2 patchelf) run: sudo rm -rf $(command -v gpg gpg2 patchelf)
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
@@ -119,10 +123,10 @@ jobs:
run: | run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf) sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: | python-version: |
3.8 3.8
@@ -148,7 +152,7 @@ jobs:
not_found=0 not_found=0
old_path="$PATH" old_path="$PATH"
export PATH="$ver_dir:$PATH" export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh ./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path" export PATH="$old_path"
fi fi
fi fi
@@ -161,45 +165,5 @@ jobs:
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack -d gpg list spack -d gpg list
tree $HOME/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
- name: Bootstrap File
run: |
source share/spack/setup-env.sh
spack -d python share/spack/qa/bootstrap-file.py
tree $HOME/.spack/bootstrap/store/
windows:
runs-on: "windows-latest"
steps:
- name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with:
fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with:
python-version: "3.12"
- name: Setup Windows
run: |
Remove-Item -Path (Get-Command gpg).Path
Remove-Item -Path (Get-Command file).Path
- name: Bootstrap clingo
run: |
./share/spack/setup-env.ps1
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison
spack -d solve zlib
./share/spack/qa/validate_last_exit.ps1
tree $env:userprofile/.spack/bootstrap/store/
- name: Bootstrap GnuPG
run: |
./share/spack/setup-env.ps1
spack -d gpg list
./share/spack/qa/validate_last_exit.ps1
tree $env:userprofile/.spack/bootstrap/store/
- name: Bootstrap File
run: |
./share/spack/setup-env.ps1
spack -d python share/spack/qa/bootstrap-file.py
./share/spack/qa/validate_last_exit.ps1
tree $env:userprofile/.spack/bootstrap/store/

View File

@@ -55,7 +55,7 @@ jobs:
if: github.repository == 'spack/spack' if: github.repository == 'spack/spack'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta id: docker_meta
@@ -76,7 +76,7 @@ jobs:
env: env:
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}" SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
run: | run: |
.github/workflows/bin/generate_spack_yaml_containerize.sh .github/workflows/generate_spack_yaml_containerize.sh
. share/spack/setup-env.sh . share/spack/setup-env.sh
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }} mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
@@ -87,19 +87,19 @@ jobs:
fi fi
- name: Upload Dockerfile - name: Upload Dockerfile
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with: with:
name: dockerfiles_${{ matrix.dockerfile[0] }} name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles path: dockerfiles
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
- name: Log in to GitHub Container Registry - name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -107,13 +107,13 @@ jobs:
- name: Log in to DockerHub - name: Log in to DockerHub
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }} - name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 uses: docker/build-push-action@1a162644f9a7e87d8f4b053101d1d9a712edc18c
with: with:
context: dockerfiles/${{ matrix.dockerfile[0] }} context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }} platforms: ${{ matrix.dockerfile[1] }}
@@ -126,7 +126,7 @@ jobs:
needs: deploy-images needs: deploy-images
steps: steps:
- name: Merge Artifacts - name: Merge Artifacts
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
with: with:
name: dockerfiles name: dockerfiles
pattern: dockerfiles_* pattern: dockerfiles_*

View File

@@ -15,6 +15,18 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
prechecks:
needs: [ changes ]
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
all-prechecks:
needs: [ prechecks ]
runs-on: ubuntu-latest
steps:
- name: Success
run: "true"
# Check which files have been updated by the PR # Check which files have been updated by the PR
changes: changes:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -24,7 +36,7 @@ jobs:
core: ${{ steps.filter.outputs.core }} core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }} packages: ${{ steps.filter.outputs.packages }}
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -67,34 +79,13 @@ jobs:
needs: [ prechecks, changes ] needs: [ prechecks, changes ]
uses: ./.github/workflows/bootstrap.yml uses: ./.github/workflows/bootstrap.yml
secrets: inherit secrets: inherit
unit-tests: unit-tests:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }} if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks, changes ] needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml uses: ./.github/workflows/unit_tests.yaml
secrets: inherit secrets: inherit
prechecks:
needs: [ changes ]
uses: ./.github/workflows/valid-style.yml
secrets: inherit
with:
with_coverage: ${{ needs.changes.outputs.core }}
all-prechecks:
needs: [ prechecks ]
runs-on: ubuntu-latest
steps:
- name: Success
run: "true"
coverage:
needs: [ unit-tests, prechecks ]
uses: ./.github/workflows/coverage.yml
secrets: inherit
all: all:
needs: [ coverage, bootstrap ] needs: [ unit-tests, bootstrap ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Success - name: Success

View File

@@ -1,34 +0,0 @@
name: coverage
on:
workflow_call:
jobs:
# Upload coverage reports to codecov once as a single bundle
upload:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
with:
python-version: '3.11'
cache: 'pip'
- name: Install python dependencies
run: pip install -r .github/workflows/requirements/coverage/requirements.txt
- name: Download coverage artifact files
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16
with:
pattern: coverage-*
path: coverage
merge-multiple: true
- run: ls -la coverage
- run: coverage combine -a coverage/.coverage*
- run: coverage xml
- name: "Upload coverage report to CodeCov"
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238
with:
verbose: true

8
.github/workflows/install_spack.sh vendored Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env sh
. share/spack/setup-env.sh
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
spack config add "packages:all:target:[x86_64]"
spack compiler find
spack compiler info apple-clang
spack debug report
spack solve zlib

View File

@@ -14,10 +14,10 @@ jobs:
build-paraview-deps: build-paraview-deps:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages

View File

@@ -1 +0,0 @@
coverage==7.6.1

View File

@@ -1,6 +1,6 @@
black==24.8.0 black==24.4.2
clingo==5.7.1 clingo==5.7.1
flake8==7.1.1 flake8==7.1.0
isort==5.13.2 isort==5.13.2
mypy==1.8.0 mypy==1.8.0
types-six==1.16.21.20240513 types-six==1.16.21.20240513

View File

@@ -16,34 +16,45 @@ jobs:
matrix: matrix:
os: [ubuntu-latest] os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
concretizer: ['clingo']
on_develop: on_develop:
- ${{ github.ref == 'refs/heads/develop' }} - ${{ github.ref == 'refs/heads/develop' }}
include: include:
- python-version: '3.11'
os: ubuntu-latest
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.6' - python-version: '3.6'
os: ubuntu-20.04 os: ubuntu-20.04
concretizer: clingo
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude: exclude:
- python-version: '3.7' - python-version: '3.7'
os: ubuntu-latest os: ubuntu-latest
concretizer: 'clingo'
on_develop: false on_develop: false
- python-version: '3.8' - python-version: '3.8'
os: ubuntu-latest os: ubuntu-latest
concretizer: 'clingo'
on_develop: false on_develop: false
- python-version: '3.9' - python-version: '3.9'
os: ubuntu-latest os: ubuntu-latest
concretizer: 'clingo'
on_develop: false on_develop: false
- python-version: '3.10' - python-version: '3.10'
os: ubuntu-latest os: ubuntu-latest
concretizer: 'clingo'
on_develop: false on_develop: false
- python-version: '3.11' - python-version: '3.11'
os: ubuntu-latest os: ubuntu-latest
concretizer: 'clingo'
on_develop: false on_develop: false
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install System packages - name: Install System packages
@@ -61,7 +72,7 @@ jobs:
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
git --version git --version
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
- name: Bootstrap clingo - name: Bootstrap clingo
if: ${{ matrix.concretizer == 'clingo' }} if: ${{ matrix.concretizer == 'clingo' }}
env: env:
@@ -74,25 +85,25 @@ jobs:
- name: Run unit tests - name: Run unit tests
env: env:
SPACK_PYTHON: python SPACK_PYTHON: python
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2 SPACK_TEST_PARALLEL: 2
COVERAGE: true COVERAGE: true
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }} UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with: with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }} flags: unittests,linux,${{ matrix.concretizer }}
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true
# Test shell integration # Test shell integration
shell: shell:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: '3.11' python-version: '3.11'
- name: Install System packages - name: Install System packages
@@ -107,17 +118,17 @@ jobs:
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
git --version git --version
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
- name: Run shell tests - name: Run shell tests
env: env:
COVERAGE: true COVERAGE: true
run: | run: |
share/spack/qa/run-shell-tests share/spack/qa/run-shell-tests
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with: with:
name: coverage-shell flags: shelltests,linux
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true
# Test RHEL8 UBI with platform Python. This job is run # Test RHEL8 UBI with platform Python. This job is run
# only on PRs modifying core Spack # only on PRs modifying core Spack
@@ -130,13 +141,13 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
git config --global --add safe.directory /__w/spack/spack git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow git fetch --unshallow
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
useradd spack-test useradd spack-test
chown -R spack-test . chown -R spack-test .
- name: Run unit tests - name: Run unit tests
@@ -149,37 +160,36 @@ jobs:
clingo-cffi: clingo-cffi:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: '3.13' python-version: '3.11'
- name: Install System packages - name: Install System packages
run: | run: |
sudo apt-get -y update sudo apt-get -y update
sudo apt-get -y install coreutils gfortran graphviz gnupg2 sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black" pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/setup_git.sh
- name: Run unit tests (full suite with coverage) - name: Run unit tests (full suite with coverage)
env: env:
COVERAGE: true COVERAGE: true
COVERAGE_FILE: coverage/.coverage-clingo-cffi SPACK_TEST_SOLVER: clingo
run: | run: |
. share/spack/setup-env.sh share/spack/qa/run-unit-tests
spack bootstrap disable spack-install - uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.5
spack bootstrap status
spack solve zlib
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with: with:
name: coverage-clingo-cffi flags: unittests,linux,clingo
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true
# Run unit tests on MacOS # Run unit tests on MacOS
macos: macos:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
@@ -188,10 +198,10 @@ jobs:
os: [macos-13, macos-14] os: [macos-13, macos-14]
python-version: ["3.11"] python-version: ["3.11"]
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install Python packages - name: Install Python packages
@@ -200,24 +210,24 @@ jobs:
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
- name: Setup Homebrew packages - name: Setup Homebrew packages
run: | run: |
brew install dash fish gcc gnupg kcov brew install dash fish gcc gnupg2 kcov
- name: Run unit tests - name: Run unit tests
env: env:
SPACK_TEST_SOLVER: clingo
SPACK_TEST_PARALLEL: 4 SPACK_TEST_PARALLEL: 4
COVERAGE_FILE: coverage/.coverage-${{ matrix.os }}-python${{ matrix.python-version }}
run: | run: |
git --version git --version
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
. share/spack/setup-env.sh . share/spack/setup-env.sh
$(which spack) bootstrap disable spack-install $(which spack) bootstrap disable spack-install
$(which spack) solve zlib $(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x) common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}" $(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 - uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with: with:
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }} flags: unittests,macos
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true
# Run unit tests on Windows # Run unit tests on Windows
windows: windows:
defaults: defaults:
@@ -226,10 +236,10 @@ jobs:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0} powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
@@ -237,15 +247,15 @@ jobs:
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop - name: Create local develop
run: | run: |
./.github/workflows/bin/setup_git.ps1 ./.github/workflows/setup_git.ps1
- name: Unit Test - name: Unit Test
env:
COVERAGE_FILE: coverage/.coverage-windows
run: | run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 coverage combine -a
coverage xml
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with: with:
name: coverage-windows flags: unittests,windows
path: coverage token: ${{ secrets.CODECOV_TOKEN }}
include-hidden-files: true verbose: true

View File

@@ -18,15 +18,15 @@ jobs:
validate: validate:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
- name: Install Python Packages - name: Install Python Packages
run: | run: |
pip install --upgrade pip setuptools pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/style/requirements.txt
- name: vermin (Spack's Core) - name: vermin (Spack's Core)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/ run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories) - name: vermin (Repositories)
@@ -35,22 +35,22 @@ jobs:
style: style:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/style/requirements.txt
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
git --version git --version
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
- name: Run style tests - name: Run style tests
run: | run: |
share/spack/qa/run-style-tests share/spack/qa/run-style-tests
@@ -70,13 +70,13 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
git config --global --add safe.directory /__w/spack/spack git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow git fetch --unshallow
. .github/workflows/bin/setup_git.sh . .github/workflows/setup_git.sh
useradd spack-test useradd spack-test
chown -R spack-test . chown -R spack-test .
- name: Bootstrap Spack development environment - name: Bootstrap Spack development environment
@@ -85,64 +85,5 @@ jobs:
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack debug report spack debug report
spack -d bootstrap now --dev spack -d bootstrap now --dev
spack -d style -t black spack style -t black
spack unit-test -V spack unit-test -V
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with:
path: new
- name: Install circular import checker
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871
with:
repository: haampie/circular-import-fighter
ref: 555519c6fd5564fd2eb844e7b87e84f4d12602e2
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Import cycles before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old && cp solution solution.old
- name: Import cycles after
working-directory: circular-import-fighter
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi

View File

@@ -1,64 +1,3 @@
# v0.22.2 (2024-09-21)
## Bugfixes
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
- Support macOS Sequoia (#45018, #45127)
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
- Do not halt concretization on unknown variants in externals (#45326)
- Improve validation of `develop` config section (#46485)
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
- Improve backwards compatibility in `include_concrete` (#45766)
- Fix issue where package tags were sometimes repeated (#45160)
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
- Remove debug statements in package hash computation (#45235)
- Remove redundant clingo warnings (#45269)
- Remove hard-coded layout version (#45645)
- Do not initialize previous store state in `use_store` (#45268)
- Docs improvements (#46475)
## Package updates
- `chapel` major update (#42197, #44931, #45304)
# v0.22.1 (2024-07-04)
## Bugfixes
- Fix reuse of externals on Linux (#44316)
- Ensure parent gcc-runtime version >= child (#44834, #44870)
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
- Improve version detection of glibc (#44154)
- Improve heuristics for solver (#44893, #44976, #45023)
- Make strong preferences override reuse (#44373)
- Reduce verbosity when C compiler is missing (#44182)
- Make missing ccache executable an error when required (#44740)
- Make every environment view containing `python` a `venv` (#44382)
- Fix external detection for compilers with os but no target (#44156)
- Fix version optimization for roots (#44272)
- Handle common implementations of pagination of tags in OCI build caches (#43136)
- Apply fetched patches to develop specs (#44950)
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
- Fix issue with long filenames in build caches on Windows (#43851)
- Fix formatting issue in `spack audit` (#45045)
- CI fixes (#44582, #43965, #43967, #44279, #44213)
## Package updates
- protobuf: fix 3.4:3.21 patch checksum (#44443)
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
- Remove mesa18 and libosmesa (#44264)
- Enforce consistency of `gl` providers (#44307)
- Require libiconv for iconv (#44335, #45026).
Notice that glibc/musl also provide iconv, but are not guaranteed to be
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
- py-matplotlib: qualify when to do a post install (#44191)
- rust: fix v1.78.0 instructions (#44127)
- suite-sparse: improve setting of the `libs` property (#44214)
- netlib-lapack: provide blas and lapack together (#44981)
# v0.22.0 (2024-05-12) # v0.22.0 (2024-05-12)
@@ -380,16 +319,6 @@
* 344 committers to packages * 344 committers to packages
* 45 committers to core * 45 committers to core
# v0.21.3 (2024-10-02)
## Bugfixes
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
- Bump `archspec` to 0.2.5-dev for better aarch64 and Windows support (#42854, #44005,
#45721, #46445)
- Support macOS Sequoia (#45018, #45127, #43862)
- CI and test maintenance (#42909, #42728, #46711, #41943, #43363)
# v0.21.2 (2024-03-01) # v0.21.2 (2024-03-01)
## Bugfixes ## Bugfixes

View File

@@ -46,18 +46,13 @@ See the
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html) [Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
for examples and highlights. for examples and highlights.
To install spack and your first package, make sure you have Python & Git. To install spack and your first package, make sure you have Python.
Then: Then:
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git $ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
$ cd spack/bin $ cd spack/bin
$ ./spack install zlib $ ./spack install zlib
> [!TIP]
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
>
> `--depth=2` prunes the git history to reduce the size of the Spack installation.
Documentation Documentation
---------------- ----------------

View File

@@ -115,6 +115,12 @@ config:
suppress_gpg_warnings: false suppress_gpg_warnings: false
# If set to true, Spack will attempt to build any compiler on the spec
# that is not already available. If set to False, Spack will only use
# compilers already configured in compilers.yaml
install_missing_compilers: false
# If set to true, Spack will always check checksums after downloading # If set to true, Spack will always check checksums after downloading
# archives. If false, Spack skips the checksum step. # archives. If false, Spack skips the checksum step.
checksum: true checksum: true
@@ -164,6 +170,23 @@ config:
# If set to true, Spack will use ccache to cache C compiles. # If set to true, Spack will use ccache to cache C compiles.
ccache: false ccache: false
# The concretization algorithm to use in Spack. Options are:
#
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
# backtracking and optimization for user preferences. Spack will
# try to bootstrap the logic solver, if not already available.
#
# 'original': Spack's original greedy, fixed-point concretizer. This
# algorithm can make decisions too early and will not backtrack
# sufficiently for many specs. This will soon be deprecated in
# favor of clingo.
#
# See `concretizer.yaml` for more settings you can fine-tune when
# using clingo.
concretizer: clingo
# How long to wait to lock the Spack installation database. This lock is used # How long to wait to lock the Spack installation database. This lock is used
# when Spack needs to manage its own package metadata and all operations are # when Spack needs to manage its own package metadata and all operations are
# expected to complete within the default time limit. The timeout should # expected to complete within the default time limit. The timeout should

View File

@@ -0,0 +1,3 @@
packages:
iconv:
require: [libiconv]

View File

@@ -20,14 +20,11 @@ packages:
awk: [gawk] awk: [gawk]
armci: [armcimpi] armci: [armcimpi]
blas: [openblas, amdblis] blas: [openblas, amdblis]
c: [gcc]
cxx: [gcc]
D: [ldc] D: [ldc]
daal: [intel-oneapi-daal] daal: [intel-oneapi-daal]
elf: [elfutils] elf: [elfutils]
fftw-api: [fftw, amdfftw] fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame] flame: [libflame, amdlibflame]
fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime] fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse] fuse: [libfuse]
gl: [glx, osmesa] gl: [glx, osmesa]
@@ -64,7 +61,6 @@ packages:
tbb: [intel-tbb] tbb: [intel-tbb]
unwind: [libunwind] unwind: [libunwind]
uuid: [util-linux-uuid, libuuid] uuid: [util-linux-uuid, libuuid]
wasi-sdk: [wasi-sdk-prebuilt]
xxd: [xxd-standalone, vim] xxd: [xxd-standalone, vim]
yacc: [bison, byacc] yacc: [bison, byacc]
ziglang: [zig] ziglang: [zig]
@@ -72,13 +68,3 @@ packages:
permissions: permissions:
read: world read: world
write: user write: user
cray-mpich:
buildable: false
cray-mvapich2:
buildable: false
fujitsu-mpi:
buildable: false
hpcx-mpi:
buildable: false
spectrum-mpi:
buildable: false

View File

@@ -1,5 +1,6 @@
config: config:
locks: false locks: false
concretizer: clingo
build_stage:: build_stage::
- '$spack/.staging' - '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}' stage_name: '{name}-{version}-{hash:7}'

View File

@@ -1175,17 +1175,6 @@ unspecified version, but packages can depend on other packages with
could depend on ``mpich@1.2:`` if it can only build with version could depend on ``mpich@1.2:`` if it can only build with version
``1.2`` or higher of ``mpich``. ``1.2`` or higher of ``mpich``.
.. note:: Windows Spec Syntax Caveats
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells
Spack's spec dependency syntax uses the carat (``^``) character, however this is an escape string in CMD
so it must be escaped with an additional carat (i.e. ``^^``).
CMD also will attempt to interpret strings with ``=`` characters in them. Any spec including this symbol
must double quote the string.
Note: All of these issues are unique to CMD, they can be avoided by using Powershell.
For more context on these caveats see the related issues: `carat <https://github.com/spack/spack/issues/42833>`_ and `equals <https://github.com/spack/spack/issues/43348>`_
Below are more details about the specifiers that you can add to specs. Below are more details about the specifiers that you can add to specs.
.. _version-specifier: .. _version-specifier:

View File

@@ -166,74 +166,3 @@ while `py-numpy` still needs an older version:
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
default behavior is ``duplicates:strategy:minimal``. default behavior is ``duplicates:strategy:minimal``.
--------
Splicing
--------
The ``splice`` key covers config attributes for splicing specs in the solver.
"Splicing" is a method for replacing a dependency with another spec
that provides the same package or virtual. There are two types of
splices, referring to different behaviors for shared dependencies
between the root spec and the new spec replacing a dependency:
"transitive" and "intransitive". A "transitive" splice is one that
resolves all conflicts by taking the dependency from the new node. An
"intransitive" splice is one that resolves all conflicts by taking the
dependency from the original root. From a theory perspective, hybrid
splices are possible but are not modeled by Spack.
All spliced specs retain a ``build_spec`` attribute that points to the
original Spec before any splice occurred. The ``build_spec`` for a
non-spliced spec is itself.
The figure below shows examples of transitive and intransitive splices:
.. figure:: images/splices.png
:align: center
The concretizer can be configured to explicitly splice particular
replacements for a target spec. Splicing will allow the user to make
use of generically built public binary caches, while swapping in
highly optimized local builds for performance critical components
and/or components that interact closely with the specific hardware
details of the system. The most prominent candidate for splicing is
MPI providers. MPI packages have relatively well-understood ABI
characteristics, and most High Performance Computing facilities deploy
highly optimized MPI packages tailored to their particular
hardware. The following config block configures Spack to replace
whatever MPI provider each spec was concretized to use with the
particular package of ``mpich`` with the hash that begins ``abcdef``.
.. code-block:: yaml
concretizer:
splice:
explicit:
- target: mpi
replacement: mpich/abcdef
transitive: false
.. warning::
When configuring an explicit splice, you as the user take on the
responsibility for ensuring ABI compatibility between the specs
matched by the target and the replacement you provide. If they are
not compatible, Spack will not warn you and your application will
fail to run.
The ``target`` field of an explicit splice can be any abstract
spec. The ``replacement`` field must be a spec that includes the hash
of a concrete spec, and the replacement must either be the same
package as the target, provide the virtual that is the target, or
provide a virtual that the target provides. The ``transitive`` field
is optional -- by default, splices will be transitive.
.. note::
With explicit splices configured, it is possible for Spack to
concretize to a spec that does not satisfy the input. For example,
with the config above ``hdf5 ^mvapich2`` will concretize to user
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
will warn the user in this case, but will not fail the
concretization.

View File

@@ -130,19 +130,14 @@ before or after a particular phase. For example, in ``perl``, we see:
@run_after("install") @run_after("install")
def install_cpanm(self): def install_cpanm(self):
spec = self.spec spec = self.spec
maker = make
cpan_dir = join_path("cpanm", "cpanm") if spec.satisfies("+cpanm"):
if sys.platform == "win32": with working_dir(join_path("cpanm", "cpanm")):
maker = nmake perl = spec["perl"].command
cpan_dir = join_path(self.stage.source_path, cpan_dir) perl("Makefile.PL")
cpan_dir = windows_sfn(cpan_dir) make()
if "+cpanm" in spec: make("install")
with working_dir(cpan_dir):
perl = spec["perl"].command
perl("Makefile.PL")
maker()
maker("install")
This extra step automatically installs ``cpanm`` in addition to the This extra step automatically installs ``cpanm`` in addition to the
base Perl installation. base Perl installation.
@@ -181,14 +176,8 @@ In the ``perl`` package, we can see:
@run_after("build") @run_after("build")
@on_package_attributes(run_tests=True) @on_package_attributes(run_tests=True)
def build_test(self): def test(self):
if sys.platform == "win32": make("test")
win32_dir = os.path.join(self.stage.source_path, "win32")
win32_dir = windows_sfn(win32_dir)
with working_dir(win32_dir):
nmake("test", ignore_quotes=True)
else:
make("test")
As you can guess, this runs ``make test`` *after* building the package, As you can guess, this runs ``make test`` *after* building the package,
if and only if testing is requested. Again, this is not specific to if and only if testing is requested. Again, this is not specific to

View File

@@ -49,14 +49,14 @@ following phases:
#. ``install`` - install the package #. ``install`` - install the package
Package developers often add unit tests that can be invoked with Package developers often add unit tests that can be invoked with
``scons test`` or ``scons check``. Spack provides a ``build_test`` method ``scons test`` or ``scons check``. Spack provides a ``test`` method
to handle this. Since we don't know which one the package developer to handle this. Since we don't know which one the package developer
chose, the ``build_test`` method does nothing by default, but can be easily chose, the ``test`` method does nothing by default, but can be easily
overridden like so: overridden like so:
.. code-block:: python .. code-block:: python
def build_test(self): def test(self):
scons("check") scons("check")

View File

@@ -5,9 +5,9 @@
.. chain: .. chain:
============================================= ============================
Chaining Spack Installations (upstreams.yaml) Chaining Spack Installations
============================================= ============================
You can point your Spack installation to another installation to use any You can point your Spack installation to another installation to use any
packages that are installed there. To register the other Spack instance, packages that are installed there. To register the other Spack instance,

View File

@@ -206,7 +206,6 @@ def setup(sphinx):
("py:class", "six.moves.urllib.parse.ParseResult"), ("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"), ("py:class", "TextIO"),
("py:class", "hashlib._Hash"), ("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
# Spack classes that are private and we don't want to expose # Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.repo._PrependFileLoader"),
@@ -218,10 +217,6 @@ def setup(sphinx):
("py:class", "spack.spec.SpecfileReaderBase"), ("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"), ("py:class", "spack.install_test.Pb"),
("py:class", "spack.filesystem_view.SimpleFilesystemView"), ("py:class", "spack.filesystem_view.SimpleFilesystemView"),
("py:class", "spack.traverse.EdgeAndDepth"),
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
# TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -281,7 +281,7 @@ When spack queries for configuration parameters, it searches in
higher-precedence scopes first. So, settings in a higher-precedence file higher-precedence scopes first. So, settings in a higher-precedence file
can override those with the same key in a lower-precedence one. For can override those with the same key in a lower-precedence one. For
list-valued settings, Spack *prepends* higher-precedence settings to list-valued settings, Spack *prepends* higher-precedence settings to
lower-precedence settings. Completely ignoring lower-precedence configuration lower-precedence settings. Completely ignoring higher-level configuration
options is supported with the ``::`` notation for keys (see options is supported with the ``::`` notation for keys (see
:ref:`config-overrides` below). :ref:`config-overrides` below).

View File

@@ -270,7 +270,7 @@ under the ``container`` attribute of environments:
# Sets the base images for the stages where Spack builds the # Sets the base images for the stages where Spack builds the
# software or where the software gets installed after being built.. # software or where the software gets installed after being built..
images: images:
os: "centos:7" os: "almalinux:9"
spack: develop spack: develop
# Whether or not to strip binaries # Whether or not to strip binaries
@@ -321,32 +321,33 @@ following ``spack.yaml``:
container: container:
images: images:
os: centos:7 os: almalinux:9
spack: 0.15.4 spack: 0.22.0
uses ``spack/centos7:0.15.4`` and ``centos:7`` for the stages where the uses ``spack/almalinux9:0.22.0`` and ``almalinux:9`` for the stages where the
software is respectively built and installed: software is respectively built and installed:
.. code-block:: docker .. code-block:: docker
# Build stage with Spack pre-installed and ready to be used # Build stage with Spack pre-installed and ready to be used
FROM spack/centos7:0.15.4 as builder FROM spack/almalinux9:0.22.0 AS builder
# What we want to install and how we want to install it # What we want to install and how we want to install it
# is specified in a manifest file (spack.yaml) # is specified in a manifest file (spack.yaml)
RUN mkdir /opt/spack-environment \ RUN mkdir -p /opt/spack-environment && \
&& (echo "spack:" \ set -o noclobber \
&& echo " specs:" \ && (echo spack: \
&& echo " - gromacs+mpi" \ && echo ' specs:' \
&& echo " - mpich" \ && echo ' - gromacs+mpi' \
&& echo " concretizer:" \ && echo ' - mpich' \
&& echo " unify: true" \ && echo ' concretizer:' \
&& echo " config:" \ && echo ' unify: true' \
&& echo " install_tree: /opt/software" \ && echo ' config:' \
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml && echo ' install_tree: /opt/software' \
&& echo ' view: /opt/views/view') > /opt/spack-environment/spack.yaml
[ ... ] [ ... ]
# Bare OS image to run the installed executables # Bare OS image to run the installed executables
FROM centos:7 FROM quay.io/almalinuxorg/almalinux:9
COPY --from=builder /opt/spack-environment /opt/spack-environment COPY --from=builder /opt/spack-environment /opt/spack-environment
COPY --from=builder /opt/software /opt/software COPY --from=builder /opt/software /opt/software

View File

@@ -316,215 +316,6 @@ documentation tests to make sure there are no errors. Documentation changes can
in some obfuscated warning messages. If you don't understand what they mean, feel free in some obfuscated warning messages. If you don't understand what they mean, feel free
to ask when you submit your PR. to ask when you submit your PR.
.. _spack-builders-and-pipelines:
^^^^^^^^^
GitLab CI
^^^^^^^^^
""""""""""""""""""
Build Cache Stacks
""""""""""""""""""
Spack welcomes the contribution of software stacks of interest to the community. These
stacks are used to test package recipes and generate publicly available build caches.
Spack uses GitLab CI for managing the orchestration of build jobs.
GitLab Entry Point
~~~~~~~~~~~~~~~~~~
Add stack entrypoint to the ``share/spack/gitlab/cloud_pipelines/.gitlab-ci.yml``. There
are two stages required for each new stack, the generation stage and the build stage.
The generate stage is defined using the job template ``.generate`` configured with
environment variables defining the name of the stack in ``SPACK_CI_STACK_NAME`` and the
platform (``SPACK_TARGET_PLATFORM``) and architecture (``SPACK_TARGET_ARCH``) configuration,
and the tags associated with the class of runners to build on.
.. note::
The ``SPACK_CI_STACK_NAME`` must match the name of the directory containing the
stacks ``spack.yaml``.
.. note::
The platform and architecture variables are specified in order to select the
correct configurations from the generic configurations used in Spack CI. The
configurations currently available are:
* ``.cray_rhel_zen4``
* ``.cray_sles_zen4``
* ``.darwin_aarch64``
* ``.darwin_x86_64``
* ``.linux_aarch64``
* ``.linux_icelake``
* ``.linux_neoverse_n1``
* ``.linux_neoverse_v1``
* ``.linux_neoverse_v2``
* ``.linux_power``
* ``.linux_skylake``
* ``.linux_x86_64``
* ``.linux_x86_64_v4``
New configurations can be added to accommodate new platforms and architectures.
The build stage is defined as a trigger job that consumes the GitLab CI pipeline generated in
the generate stage for this stack. Build stage jobs use the ``.build`` job template which
handles the basic configuration.
An example entry point for a new stack called ``my-super-cool-stack``
.. code-block:: yaml
.my-super-cool-stack:
extends: [ ".linux_x86_64_v3" ]
variables:
SPACK_CI_STACK_NAME: my-super-cool-stack
tags: [ "all", "tags", "your", "job", "needs"]
my-super-cool-stack-generate:
extends: [ ".generate", ".my-super-cool-stack" ]
image: my-super-cool-stack-image:0.0.1
my-super-cool-stack-build:
extends: [ ".build", ".my-super-cool-stack" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: my-super-cool-stack-generate
strategy: depend
needs:
- artifacts: True
job: my-super-cool-stack-generate
Stack Configuration
~~~~~~~~~~~~~~~~~~~
The stack configuration is a spack environment file with two additional sections added.
Stack configurations should be located in ``share/spack/gitlab/cloud_pipelines/stacks/<stack_name>/spack.yaml``.
The ``ci`` section is generally used to define stack specific mappings such as image or tags.
For more information on what can go into the ``ci`` section refer to the docs on pipelines.
The ``cdash`` section is used for defining where to upload the results of builds. Spack configures
most of the details for posting pipeline results to
`cdash.spack.io <https://cdash.spack.io/index.php?project=Spack+Testing>`_. The only
requirement in the stack configuration is to define a ``build-group`` that is unique,
this is usually the long name of the stack.
An example stack that builds ``zlib``.
.. code-block:: yaml
spack:
view: false
packages:
all:
require: ["%gcc", "target=x86_64_v3"]
specs:
- zlib
ci:
pipeline-gen
- build-job:
image: my-super-cool-stack-image:0.0.1
cdash:
build-group: My Super Cool Stack
.. note::
The ``image`` used in the ``*-generate`` job must match exactly the ``image`` used in the ``build-job``.
When the images do not match the build job may fail.
"""""""""""""""""""
Registering Runners
"""""""""""""""""""
Contributing computational resources to Spack's CI build farm is one way to help expand the
capabilities and offerings of the public Spack build caches. Currently, Spack utilizes linux runners
from AWS, Google, and the University of Oregon (UO).
Runners require three key peices:
* Runner Registration Token
* Accurate tags
* OIDC Authentication script
* GPG keys
Minimum GitLab Runner Version: ``16.1.0``
`Intallation instructions <https://docs.gitlab.com/runner/install/>`_
Registration Token
~~~~~~~~~~~~~~~~~~
The first step to contribute new runners is to open an issue in the `spack infrastructure <https://github.com/spack/spack-infrastructure/issues/new?assignees=&labels=runner-registration&projects=&template=runner_registration.yml>`_
project. This will be reported to the spack infrastructure team who will guide users through the process
of registering new runners for Spack CI.
The information needed to register a runner is the motivation for the new resources, a semi-detailed description of
the runner, and finallly the point of contact for maintaining the software on the runner.
The point of contact will then work with the infrastruture team to obtain runner registration token(s) for interacting with
with Spack's GitLab instance. Once the runner is active, this point of contact will also be responsible for updating the
GitLab runner software to keep pace with Spack's Gitlab.
Tagging
~~~~~~~
In the initial stages of runner registration it is important to **exclude** the special tag ``spack``. This will prevent
the new runner(s) from being picked up for production CI jobs while it is configured and evaluated. Once it is determined
that the runner is ready for production use the ``spack`` tag will be added.
Because gitlab has no concept of tag exclustion, runners that provide specialized resource also require specialized tags.
For example, a basic CPU only x86_64 runner may have a tag ``x86_64`` associated with it. However, a runner containing an
CUDA capable GPU may have the tag ``x86_64-cuda`` to denote that it should only be used for packages that will benefit from
a CUDA capable resource.
OIDC
~~~~
Spack runners use OIDC authentication for connecting to the appropriate AWS bucket
which is used for coordinating the communication of binaries between build jobs. In
order to configure OIDC authentication, Spack CI runners use a python script with minimal
dependencies. This script can be configured for runners as seen here using the ``pre_build_script``.
.. code-block:: toml
[[runners]]
pre_build_script = """
echo 'Executing Spack pre-build setup script'
for cmd in "${PY3:-}" python3 python; do
if command -v > /dev/null "$cmd"; then
export PY3="$(command -v "$cmd")"
break
fi
done
if [ -z "${PY3:-}" ]; then
echo "Unable to find python3 executable"
exit 1
fi
$PY3 -c "import urllib.request;urllib.request.urlretrieve('https://raw.githubusercontent.com/spack/spack-infrastructure/main/scripts/gitlab_runner_pre_build/pre_build.py', 'pre_build.py')"
$PY3 pre_build.py > envvars
. ./envvars
rm -f envvars
unset GITLAB_OIDC_TOKEN
"""
GPG Keys
~~~~~~~~
Runners that may be utilized for ``protected`` CI require the registration of an intermediate signing key that
can be used to sign packages. For more information on package signing read :ref:`key_architecture`.
-------- --------
Coverage Coverage
-------- --------

View File

@@ -181,6 +181,10 @@ Spec-related modules
:mod:`spack.parser` :mod:`spack.parser`
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs. Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
:mod:`spack.concretize`
Contains :class:`~spack.concretize.Concretizer` implementation,
which allows site administrators to change Spack's :ref:`concretization-policies`.
:mod:`spack.version` :mod:`spack.version`
Implements a simple :class:`~spack.version.Version` class with simple Implements a simple :class:`~spack.version.Version` class with simple
comparison semantics. Also implements :class:`~spack.version.VersionRange` comparison semantics. Also implements :class:`~spack.version.VersionRange`
@@ -712,27 +716,27 @@ Release branches
^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
There are currently two types of Spack releases: :ref:`major releases There are currently two types of Spack releases: :ref:`major releases
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases <major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a <point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
diagram of how Spack release branches work:: diagram of how Spack release branches work::
o branch: develop (latest version, v0.23.0.dev0) o branch: develop (latest version, v0.19.0.dev0)
| |
o o
| o branch: releases/v0.22, tag: v0.22.1 | o branch: releases/v0.18, tag: v0.18.1
o | o |
| o tag: v0.22.0 | o tag: v0.18.0
o | o |
| o | o
|/ |/
o o
| |
o o
| o branch: releases/v0.21, tag: v0.21.2 | o branch: releases/v0.17, tag: v0.17.2
o | o |
| o tag: v0.21.1 | o tag: v0.17.1
o | o |
| o tag: v0.21.0 | o tag: v0.17.0
o | o |
| o | o
|/ |/
@@ -743,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
version is that of the next **major** release with a ``.dev0`` suffix. version is that of the next **major** release with a ``.dev0`` suffix.
Each Spack release series also has a corresponding branch, e.g. Each Spack release series also has a corresponding branch, e.g.
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and ``releases/v0.18`` has ``0.18.x`` versions of Spack, and
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first ``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
tagged version on a release branch. Minor releases are back-ported from tagged version on a release branch. Minor releases are back-ported from
develop onto release branches. This is typically done by cherry-picking develop onto release branches. This is typically done by cherry-picking
bugfix commits off of ``develop``. bugfix commits off of ``develop``.
@@ -774,40 +778,27 @@ for more details.
Scheduling work for releases Scheduling work for releases
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We schedule work for **major releases** through `milestones We schedule work for releases by creating `GitHub projects
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects <https://github.com/spack/spack/projects>`_. At any time, there may be
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels several open release projects. For example, below are two releases (from
<https://github.com/spack/spack/labels>`_. some past version of the page linked above):
There is only one milestone open at a time. Its name corresponds to the next major version, for .. image:: images/projects.png
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
core developers, so that they are not forgotten at the time of release. The milestone is closed
when the release is made, and a new milestone is created for the next major release.
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers This image shows one release in progress for ``0.15.1`` and another for
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the ``0.16.0``. Each of these releases has a project board containing issues
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board. and pull requests. GitHub shows a status bar with completed work in
Important issues should be assigned to the next milestone as well, so they appear at the top of green, work in progress in purple, and work not started yet in gray, so
the project board. it's fairly easy to see progress.
Spack's milestones are not firm commitments so we move work between releases frequently. If we Spack's project boards are not firm commitments so we move work between
need to make a release and some tasks are not yet done, we will simply move them to the next major releases frequently. If we need to make a release and some tasks are not
release milestone, rather than delaying the release to complete them. yet done, we will simply move them to the next minor or major release, rather
than delaying the release to complete them.
^^^^^^^^^^^^^^^^^^^^^ For more on using GitHub project boards, see `GitHub's documentation
Backporting bug fixes <https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
^^^^^^^^^^^^^^^^^^^^^
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
that the bug fix should be backported to.
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
Typically there are one or two backport pull requests open at any given time.
.. _major-releases: .. _major-releases:
@@ -815,21 +806,25 @@ Typically there are one or two backport pull requests open at any given time.
Making major releases Making major releases
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Assuming all required work from the milestone is completed, the steps to make the major release Assuming a project board has already been created and all required work
are: completed, the steps to make the major release are:
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major #. Create two new project boards:
release.
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release. * One for the next major release
* One for the next point release
#. Move any optional tasks that are not done to the next milestone. #. Move any optional tasks that are not done to one of the new project boards.
In general, small bugfixes should go to the next point release. Major
features, refactors, and changes that could affect concretization should
go in the next major release.
#. Create a branch for the release, based on ``develop``: #. Create a branch for the release, based on ``develop``:
.. code-block:: console .. code-block:: console
$ git checkout -b releases/v0.23 develop $ git checkout -b releases/v0.15 develop
For a version ``vX.Y.Z``, the branch's name should be For a version ``vX.Y.Z``, the branch's name should be
``releases/vX.Y``. That is, you should create a ``releases/vX.Y`` ``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
@@ -865,8 +860,8 @@ are:
Create a pull request targeting the ``develop`` branch, bumping the major Create a pull request targeting the ``develop`` branch, bumping the major
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment. version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
For instance when you have just released ``v0.23.0``, set the version For instance when you have just released ``v0.15.0``, set the version
to ``(0, 24, 0, 'dev0')`` on ``develop``. to ``(0, 16, 0, 'dev0')`` on ``develop``.
#. Follow the steps in :ref:`publishing-releases`. #. Follow the steps in :ref:`publishing-releases`.
@@ -875,52 +870,82 @@ are:
#. Follow the steps in :ref:`announcing-releases`. #. Follow the steps in :ref:`announcing-releases`.
.. _patch-releases: .. _point-releases:
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Making patch releases Making point releases
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
To make the patch release process both efficient and transparent, we use a *backports pull request* Assuming a project board has already been created and all required work
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to completed, the steps to make the point release are:
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
changes are fresh in the mind of the developer.
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It #. Create a new project board for the next point release.
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit #. Move any optional tasks that are not done to the next project board.
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
squashed), cherry-pick each associated commit individually. Never force push to the
``backports/vX.Y.Z`` branch.
.. warning:: #. Check out the release branch (it should already exist).
Sometimes you may **still** get merge conflicts even if you have For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
cherry-picked all the commits in order. This generally means there For ``v0.15.1``, you would check out ``releases/v0.15``:
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call regarding those pull requests. Consider the number of affected
files and/or the resulting differences.
1. If the changes are small, you might just cherry-pick it. .. code-block:: console
2. If the changes are large, then you may decide that this fix is not $ git checkout releases/v0.15
worth including in a patch release, in which case you should remove
the label from the pull request. Remember that large, manual backports
are seldom the right choice for a patch release.
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch #. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
release as follows: in the project, create it. This pull request ought to be created as early as
possible when working on a release project, so that we can build the release
commits incrementally, and identify potential conflicts at an early stage.
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch #. Cherry-pick each pull request in the ``Done`` column of the release
release. project board onto the ``Backports vX.Y.Z`` pull request.
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done. This is **usually** fairly simple since we squash the commits from the
vast majority of pull requests. That means there is only one commit
per pull request to cherry-pick. For example, `this pull request
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
they were squashed into a single commit on merge. You can see the
commit that was created here:
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the .. image:: images/pr-commit.png
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
You can easily cherry pick it like this (assuming you already have the
release branch checked out):
.. code-block:: console
$ git cherry-pick 7e46da7
For pull requests that were rebased (or not squashed), you'll need to
cherry-pick each associated commit individually.
.. warning::
It is important to cherry-pick commits in the order they happened,
otherwise you can get conflicts while cherry-picking. When
cherry-picking look at the merge date,
**not** the number of the pull request or the date it was opened.
Sometimes you may **still** get merge conflicts even if you have
cherry-picked all the commits in order. This generally means there
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call regarding those pull requests. Consider the number of affected
files and or the resulting differences.
1. If the dependency changes are small, you might just cherry-pick it,
too. If you do this, add the task to the release board.
2. If the changes are large, then you may decide that this fix is not
worth including in a point release, in which case you should remove
the task from the release project.
3. You can always decide to manually back-port the fix to the release
branch if neither of the above options makes sense, but this can
require a lot of work. It's seldom the right choice.
#. When all the commits from the project board are cherry-picked into
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
1. Bump the version in ``lib/spack/spack/__init__.py``. 1. Bump the version in ``lib/spack/spack/__init__.py``.
2. Update ``CHANGELOG.md`` with a list of the changes. 2. Update ``CHANGELOG.md`` with a list of the changes.
@@ -929,22 +954,20 @@ release as follows:
release branch. See `the changelog from 0.14.1 release branch. See `the changelog from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_. <https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Make sure CI passes on the **backports pull request**, including: #. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
is needed to keep track in the release branch of all the commits that were
cherry-picked.
#. Make sure CI passes on the release branch, including:
* Regular unit tests * Regular unit tests
* Build tests * Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_ * The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This If CI does not pass, you'll need to figure out why, and make changes
is needed to keep track in the release branch of all the commits that were to the release branch until it does. You can make more commits, modify
cherry-picked. or remove cherry-picked commits, or cherry-pick **more** from
``develop`` to make this happen.
#. Make sure CI passes on the last commit of the **release branch**.
#. In the rare case you need to include additional commits in the patch release after the backports
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
repeat the process. Avoid repeated force pushes to the release branch.
#. Follow the steps in :ref:`publishing-releases`. #. Follow the steps in :ref:`publishing-releases`.
@@ -1019,31 +1042,25 @@ Updating `releases/latest`
If the new release is the **highest** Spack release yet, you should If the new release is the **highest** Spack release yet, you should
also tag it as ``releases/latest``. For example, suppose the highest also tag it as ``releases/latest``. For example, suppose the highest
release is currently ``0.22.3``: release is currently ``0.15.3``:
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag * If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
it with ``releases/latest``, as these are higher than ``0.22.3``. it with ``releases/latest``, as these are higher than ``0.15.3``.
* If you are making a new release of an **older** major version of * If you are making a new release of an **older** major version of
Spack, e.g. ``0.21.4``, then you should not tag it as Spack, e.g. ``0.14.4``, then you should not tag it as
``releases/latest`` (as there are newer major versions). ``releases/latest`` (as there are newer major versions).
To do so, first fetch the latest tag created on GitHub, since you may not have it locally: To tag ``releases/latest``, do this:
.. code-block:: console .. code-block:: console
$ git fetch --force git@github.com:spack/spack vX.Y.Z $ git checkout releases/vX.Y # vX.Y is the new release's branch
$ git tag --force releases/latest
$ git push --force --tags
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub. The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
``releases/latest`` tag with the new one.
.. code-block:: console
$ git tag --force releases/latest vX.Y.Z
$ git push --force git@github.com:spack/spack releases/latest
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
local tags.
.. _announcing-releases: .. _announcing-releases:

View File

@@ -5,56 +5,49 @@
.. _environments: .. _environments:
===================================== =========================
Environments (spack.yaml, spack.lock) Environments (spack.yaml)
===================================== =========================
An environment is used to group a set of specs intended for some purpose An environment is used to group together a set of specs for the
to be built, rebuilt, and deployed in a coherent fashion. Environments purpose of building, rebuilding and deploying in a coherent fashion.
define aspects of the installation of the software, such as: Environments provide a number of advantages over the *à la carte*
approach of building and loading individual Spack modules:
#. *which* specs to install; #. Environments separate the steps of (a) choosing what to
#. *how* those specs are configured; and install, (b) concretizing, and (c) installing. This allows
#. *where* the concretized software will be installed. Environments to remain stable and repeatable, even if Spack packages
are upgraded: specs are only re-concretized when the user
Aggregating this information into an environment for processing has advantages explicitly asks for it. It is even possible to reliably
over the *à la carte* approach of building and loading individual Spack modules. transport environments between different computers running
different versions of Spack!
With environments, you concretize, install, or load (activate) all of the #. Environments allow several specs to be built at once; a more robust
specs with a single command. Concretization fully configures the specs solution than ad-hoc scripts making multiple calls to ``spack
and dependencies of the environment in preparation for installing the install``.
software. This is a more robust solution than ad-hoc installation scripts. #. An Environment that is built as a whole can be loaded as a whole
And you can share an environment or even re-use it on a different computer. into the user environment. An Environment can be built to maintain
a filesystem view of its packages, and the environment can load
Environment definitions, especially *how* specs are configured, allow the that view into the user environment at activation time. Spack can
software to remain stable and repeatable even when Spack packages are upgraded. Changes are only picked up when the environment is explicitly re-concretized. also generate a script to load all modules related to an
environment.
Defining *where* specs are installed supports a filesystem view of the
environment. Yet Spack maintains a single installation of the software that
can be re-used across multiple environments.
Activating an environment determines *when* all of the associated (and
installed) specs are loaded so limits the software loaded to those specs
actually needed by the environment. Spack can even generate a script to
load all modules related to an environment.
Other packaging systems also provide environments that are similar in Other packaging systems also provide environments that are similar in
some ways to Spack environments; for example, `Conda environments some ways to Spack environments; for example, `Conda environments
<https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or <https://conda.io/docs/user-guide/tasks/manage-environments.html>`_ or
`Python Virtual Environments `Python Virtual Environments
<https://docs.python.org/3/tutorial/venv.html>`_. Spack environments <https://docs.python.org/3/tutorial/venv.html>`_. Spack environments
provide some distinctive features though: provide some distinctive features:
#. A spec installed "in" an environment is no different from the same #. A spec installed "in" an environment is no different from the same
spec installed anywhere else in Spack. spec installed anywhere else in Spack. Environments are assembled
#. Spack environments may contain more than one spec of the same simply by collecting together a set of specs.
#. Spack Environments may contain more than one spec of the same
package. package.
Spack uses a "manifest and lock" model similar to `Bundler gemfiles Spack uses a "manifest and lock" model similar to `Bundler gemfiles
<https://bundler.io/man/gemfile.5.html>`_ and other package managers. <https://bundler.io/man/gemfile.5.html>`_ and other package
The environment's user input file (or manifest), is named ``spack.yaml``. managers. The user input file is named ``spack.yaml`` and the lock
The lock file, which contains the fully configured and concretized specs, file is named ``spack.lock``
is named ``spack.lock``.
.. _environments-using: .. _environments-using:
@@ -75,60 +68,55 @@ An environment is created by:
$ spack env create myenv $ spack env create myenv
The directory ``$SPACK_ROOT/var/spack/environments/myenv`` is created Spack then creates the directory ``var/spack/environments/myenv``.
to manage the environment.
.. note:: .. note::
All managed environments by default are stored in the All managed environments by default are stored in the ``var/spack/environments`` folder.
``$SPACK_ROOT/var/spack/environments`` folder. This location can be changed This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
by setting the ``environments_root`` variable in ``config.yaml``.
Spack creates the file ``spack.yaml``, hidden directory ``.spack-env``, and In the ``var/spack/environments/myenv`` directory, Spack creates the
``spack.lock`` file under ``$SPACK_ROOT/var/spack/environments/myenv``. User file ``spack.yaml`` and the hidden directory ``.spack-env``.
interaction occurs through the ``spack.yaml`` file and the Spack commands
that affect it. Metadata and, by default, the view are stored in the Spack stores metadata in the ``.spack-env`` directory. User
``.spack-env`` directory. When the environment is concretized, Spack creates interaction will occur through the ``spack.yaml`` file and the Spack
the ``spack.lock`` file with the fully configured specs and dependencies for commands that affect it. When the environment is concretized, Spack
will create a file ``spack.lock`` with the concrete information for
the environment. the environment.
The ``.spack-env`` subdirectory also contains: In addition to being the default location for the view associated with
an Environment, the ``.spack-env`` directory also contains:
* ``repo/``: A subdirectory acting as the repo consisting of the Spack * ``repo/``: A repo consisting of the Spack packages used in this
packages used in the environment. It allows the environment to build environment. This allows the environment to build the same, in
the same, in theory, even on different versions of Spack with different theory, even on different versions of Spack with different
packages! packages!
* ``logs/``: A subdirectory containing the build logs for the packages * ``logs/``: A directory containing the build logs for the packages
in this environment. in this Environment.
Spack Environments can also be created from either the user input, or Spack Environments can also be created from either a manifest file
manifest, file or the lockfile. Create an environment from a manifest using: (usually but not necessarily named, ``spack.yaml``) or a lockfile.
To create an Environment from a manifest:
.. code-block:: console .. code-block:: console
$ spack env create myenv spack.yaml $ spack env create myenv spack.yaml
The resulting environment is guaranteed to have the same root specs as To create an Environment from a ``spack.lock`` lockfile:
the original but may concretize differently in the presence of different
explicit or default configuration settings (e.g., a different version of
Spack or for a different user account).
Create an environment from a ``spack.lock`` file using:
.. code-block:: console .. code-block:: console
$ spack env create myenv spack.lock $ spack env create myenv spack.lock
The resulting environment, when on the same or a compatible machine, is Either of these commands can also take a full path to the
guaranteed to initially have the same concrete specs as the original. initialization file.
.. note:: A Spack Environment created from a ``spack.yaml`` manifest is
guaranteed to have the same root specs as the original Environment,
Environment creation also accepts a full path to the file. but may concretize differently. A Spack Environment created from a
``spack.lock`` lockfile is guaranteed to have the same concrete specs
If the path is not under the ``$SPACK_ROOT/var/spack/environments`` as the original Environment. Either may obviously then differ as the
directory then the source is referred to as an user modifies it.
:ref:`independent environment <independent_environments>`.
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^
Activating an Environment Activating an Environment
@@ -141,7 +129,7 @@ To activate an environment, use the following command:
$ spack env activate myenv $ spack env activate myenv
By default, the ``spack env activate`` will load the view associated By default, the ``spack env activate`` will load the view associated
with the environment into the user environment. The ``-v, with the Environment into the user environment. The ``-v,
--with-view`` argument ensures this behavior, and the ``-V, --with-view`` argument ensures this behavior, and the ``-V,
--without-view`` argument activates the environment without changing --without-view`` argument activates the environment without changing
the user environment variables. the user environment variables.
@@ -154,11 +142,8 @@ user's prompt to begin with the environment name in brackets.
$ spack env activate -p myenv $ spack env activate -p myenv
[myenv] $ ... [myenv] $ ...
The ``activate`` command can also be used to create a new environment, if it is The ``activate`` command can also be used to create a new environment if it does not already
not already defined, by adding the ``--create`` flag. Managed and independent exist.
environments can both be created using the same flags that `spack env create`
accepts. If an environment already exists then spack will simply activate it
and ignore the create-specific flags.
.. code-block:: console .. code-block:: console
@@ -183,50 +168,49 @@ or the shortcut alias
If the environment was activated with its view, deactivating the If the environment was activated with its view, deactivating the
environment will remove the view from the user environment. environment will remove the view from the user environment.
.. _independent_environments: ^^^^^^^^^^^^^^^^^^^^^^
Anonymous Environments
^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^ Apart from managed environments, Spack also supports anonymous environments.
Independent Environments
^^^^^^^^^^^^^^^^^^^^^^^^
Independent environments can be located in any directory outside of Spack. Anonymous environments can be placed in any directory of choice.
.. note:: .. note::
When uninstalling packages, Spack asks the user to confirm the removal of packages When uninstalling packages, Spack asks the user to confirm the removal of packages
that are still used in a managed environment. This is not the case for independent that are still used in a managed environment. This is not the case for anonymous
environments. environments.
To create an independent environment, use one of the following commands: To create an anonymous environment, use one of the following commands:
.. code-block:: console .. code-block:: console
$ spack env create --dir my_env $ spack env create --dir my_env
$ spack env create ./my_env $ spack env create ./my_env
As a shorthand, you can also create an independent environment upon activation if it does not As a shorthand, you can also create an anonymous environment upon activation if it does not
already exist: already exist:
.. code-block:: console .. code-block:: console
$ spack env activate --create ./my_env $ spack env activate --create ./my_env
For convenience, Spack can also place an independent environment in a temporary directory for you: For convenience, Spack can also place an anonymous environment in a temporary directory for you:
.. code-block:: console .. code-block:: console
$ spack env activate --temp $ spack env activate --temp
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Environment-Aware Commands Environment Sensitive Commands
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack commands are environment-aware. For example, the ``find`` Spack commands are environment sensitive. For example, the ``find``
command shows only the specs in the active environment if an command shows only the specs in the active Environment if an
environment has been activated. Otherwise it shows all specs in Environment has been activated. Similarly, the ``install`` and
the Spack instance. The same rule applies to the ``install`` and ``uninstall`` commands act on the active environment.
``uninstall`` commands.
.. code-block:: console .. code-block:: console
@@ -271,33 +255,32 @@ the Spack instance. The same rule applies to the ``install`` and
Note that when we installed the abstract spec ``zlib@1.2.8``, it was Note that when we installed the abstract spec ``zlib@1.2.8``, it was
presented as a root of the environment. All explicitly installed presented as a root of the Environment. All explicitly installed
packages will be listed as roots of the environment. packages will be listed as roots of the Environment.
All of the Spack commands that act on the list of installed specs are All of the Spack commands that act on the list of installed specs are
environment-aware in this way, including ``install``, Environment-sensitive in this way, including ``install``,
``uninstall``, ``find``, ``extensions``, etcetera. In the ``uninstall``, ``find``, ``extensions``, and more. In the
:ref:`environment-configuration` section we will discuss :ref:`environment-configuration` section we will discuss
environment-aware commands further. Environment-sensitive commands further.
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Adding Abstract Specs Adding Abstract Specs
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
An abstract spec is the user-specified spec before Spack applies An abstract spec is the user-specified spec before Spack has applied
defaults or dependency information. any defaults or dependency information.
Users can add abstract specs to an environment using the ``spack add`` Users can add abstract specs to an Environment using the ``spack add``
command. The most important component of an environment is a list of command. The most important component of an Environment is a list of
abstract specs. abstract specs.
Adding a spec adds it as a root spec of the environment in the user Adding a spec adds to the manifest (the ``spack.yaml`` file), which is
input file (``spack.yaml``). It does not affect the concrete specs used to define the roots of the Environment, but does not affect the
in the lock file (``spack.lock``) and it does not install the spec. concrete specs in the lockfile, nor does it install the spec.
The ``spack add`` command is environment-aware. It adds the spec to the The ``spack add`` command is environment aware. It adds to the
currently active environment. An error is generated if there isn't an currently active environment. All environment aware commands can also
active environment. All environment-aware commands can also
be called using the ``spack -e`` flag to specify the environment. be called using the ``spack -e`` flag to specify the environment.
.. code-block:: console .. code-block:: console
@@ -317,11 +300,11 @@ or
Concretizing Concretizing
^^^^^^^^^^^^ ^^^^^^^^^^^^
Once user specs have been added to an environment, they can be concretized. Once some user specs have been added to an environment, they can be concretized.
There are three different modes of operation to concretize an environment, There are at the moment three different modes of operation to concretize an environment,
explained in detail in :ref:`environments_concretization_config`. which are explained in details in :ref:`environments_concretization_config`.
Regardless of which mode of operation is chosen, the following Regardless of which mode of operation has been chosen, the following
command will ensure all of the root specs are concretized according to the command will ensure all the root specs are concretized according to the
constraints that are prescribed in the configuration: constraints that are prescribed in the configuration:
.. code-block:: console .. code-block:: console
@@ -330,15 +313,16 @@ constraints that are prescribed in the configuration:
In the case of specs that are not concretized together, the command In the case of specs that are not concretized together, the command
above will concretize only the specs that were added and not yet above will concretize only the specs that were added and not yet
concretized. Forcing a re-concretization of all of the specs can be done concretized. Forcing a re-concretization of all the specs can be done
by adding the ``-f`` option: instead with this command:
.. code-block:: console .. code-block:: console
[myenv]$ spack concretize -f [myenv]$ spack concretize -f
Without the option, Spack guarantees that already concretized specs are When the ``-f`` flag is not used to reconcretize all specs, Spack
unchanged in the environment. guarantees that already concretized specs are unchanged in the
environment.
The ``concretize`` command does not install any packages. For packages The ``concretize`` command does not install any packages. For packages
that have already been installed outside of the environment, the that have already been installed outside of the environment, the
@@ -371,16 +355,16 @@ installed specs using the ``-c`` (``--concretized``) flag.
Installing an Environment Installing an Environment
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^
In addition to adding individual specs to an environment, one In addition to installing individual specs into an Environment, one
can install the entire environment at once using the command can install the entire Environment at once using the command
.. code-block:: console .. code-block:: console
[myenv]$ spack install [myenv]$ spack install
If the environment has been concretized, Spack will install the If the Environment has been concretized, Spack will install the
concretized specs. Otherwise, ``spack install`` will concretize concretized specs. Otherwise, ``spack install`` will first concretize
the environment before installing the concretized specs. the Environment and then install the concretized specs.
.. note:: .. note::
@@ -401,17 +385,17 @@ the environment before installing the concretized specs.
As it installs, ``spack install`` creates symbolic links in the As it installs, ``spack install`` creates symbolic links in the
``logs/`` directory in the environment, allowing for easy inspection ``logs/`` directory in the Environment, allowing for easy inspection
of build logs related to that environment. The ``spack install`` of build logs related to that environment. The ``spack install``
command also stores a Spack repo containing the ``package.py`` file command also stores a Spack repo containing the ``package.py`` file
used at install time for each package in the ``repos/`` directory in used at install time for each package in the ``repos/`` directory in
the environment. the Environment.
The ``--no-add`` option can be used in a concrete environment to tell The ``--no-add`` option can be used in a concrete environment to tell
spack to install specs already present in the environment but not to spack to install specs already present in the environment but not to
add any new root specs to the environment. For root specs provided add any new root specs to the environment. For root specs provided
to ``spack install`` on the command line, ``--no-add`` is the default, to ``spack install`` on the command line, ``--no-add`` is the default,
while for dependency specs, it is optional. In other while for dependency specs on the other hand, it is optional. In other
words, if there is an unambiguous match in the active concrete environment words, if there is an unambiguous match in the active concrete environment
for a root spec provided to ``spack install`` on the command line, spack for a root spec provided to ``spack install`` on the command line, spack
does not require you to specify the ``--no-add`` option to prevent the spec does not require you to specify the ``--no-add`` option to prevent the spec
@@ -425,22 +409,12 @@ Developing Packages in a Spack Environment
The ``spack develop`` command allows one to develop Spack packages in The ``spack develop`` command allows one to develop Spack packages in
an environment. It requires a spec containing a concrete version, and an environment. It requires a spec containing a concrete version, and
will configure Spack to install the package from local source. will configure Spack to install the package from local source. By
If a version is not provided from the command line interface then spack default, it will also clone the package to a subdirectory in the
will automatically pick the highest version the package has defined. environment. This package will have a special variant ``dev_path``
This means any infinity versions (``develop``, ``main``, ``stable``) will be
preferred in this selection process.
By default, ``spack develop`` will also clone the package to a subdirectory in the
environment for the local source. This package will have a special variant ``dev_path``
set, and Spack will ensure the package and its dependents are rebuilt set, and Spack will ensure the package and its dependents are rebuilt
any time the environment is installed if the package's local source any time the environment is installed if the package's local source
code has been modified. Spack's native implementation to check for modifications code has been modified. Spack ensures that all instances of a
is to check if ``mtime`` is newer than the installation.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
Spack ensures that all instances of a
developed package in the environment are concretized to match the developed package in the environment are concretized to match the
version (and other constraints) passed as the spec argument to the version (and other constraints) passed as the spec argument to the
``spack develop`` command. ``spack develop`` command.
@@ -450,7 +424,7 @@ also be used as valid concrete versions (see :ref:`version-specifier`).
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
the ``main`` branch of the package, and ``spack install`` will install from the ``main`` branch of the package, and ``spack install`` will install from
that git clone if ``foo`` is in the environment. that git clone if ``foo`` is in the environment.
Further development on ``foo`` can be tested by re-installing the environment, Further development on ``foo`` can be tested by reinstalling the environment,
and eventually committed and pushed to the upstream git repo. and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the If the package being developed supports out-of-source builds then users can use the
@@ -635,7 +609,7 @@ manipulate configuration inline in the ``spack.yaml`` file.
Inline configurations Inline configurations
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Inline environment-scope configuration is done using the same yaml Inline Environment-scope configuration is done using the same yaml
format as standard Spack configuration scopes, covered in the format as standard Spack configuration scopes, covered in the
:ref:`configuration` section. Each section is contained under a :ref:`configuration` section. Each section is contained under a
top-level yaml object with it's name. For example, a ``spack.yaml`` top-level yaml object with it's name. For example, a ``spack.yaml``
@@ -660,7 +634,7 @@ Included configurations
Spack environments allow an ``include`` heading in their yaml Spack environments allow an ``include`` heading in their yaml
schema. This heading pulls in external configuration files and applies schema. This heading pulls in external configuration files and applies
them to the environment. them to the Environment.
.. code-block:: yaml .. code-block:: yaml
@@ -673,9 +647,6 @@ them to the environment.
Environments can include files or URLs. File paths can be relative or Environments can include files or URLs. File paths can be relative or
absolute. URLs include the path to the text for individual files or absolute. URLs include the path to the text for individual files or
can be the path to a directory containing configuration files. can be the path to a directory containing configuration files.
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
schemes). Spack-specific, environment and user path variables may be
used in these paths. See :ref:`config-file-variables` for more information.
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
Configuration precedence Configuration precedence
@@ -690,7 +661,7 @@ have higher precedence, as the included configs are applied in reverse order.
Manually Editing the Specs List Manually Editing the Specs List
------------------------------- -------------------------------
The list of abstract/root specs in the environment is maintained in The list of abstract/root specs in the Environment is maintained in
the ``spack.yaml`` manifest under the heading ``specs``. the ``spack.yaml`` manifest under the heading ``specs``.
.. code-block:: yaml .. code-block:: yaml
@@ -798,7 +769,7 @@ evaluates to the cross-product of those specs. Spec matrices also
contain an ``excludes`` directive, which eliminates certain contain an ``excludes`` directive, which eliminates certain
combinations from the evaluated result. combinations from the evaluated result.
The following two environment manifests are identical: The following two Environment manifests are identical:
.. code-block:: yaml .. code-block:: yaml
@@ -873,7 +844,7 @@ files are identical.
In short files like the example, it may be easier to simply list the In short files like the example, it may be easier to simply list the
included specs. However for more complicated examples involving many included specs. However for more complicated examples involving many
packages across many toolchains, separately factored lists make packages across many toolchains, separately factored lists make
environments substantially more manageable. Environments substantially more manageable.
Additionally, the ``-l`` option to the ``spack add`` command allows Additionally, the ``-l`` option to the ``spack add`` command allows
one to add to named lists in the definitions section of the manifest one to add to named lists in the definitions section of the manifest
@@ -892,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
spack: spack:
definitions: definitions:
- compilers: ['%gcc', '%clang'] - compilers: ['%gcc', '%clang']
- when: arch.satisfies('target=x86_64:') - when: arch.satisfies('x86_64:')
compilers: ['%intel'] compilers: ['%intel']
.. note:: .. note::
@@ -960,84 +931,32 @@ This allows for a much-needed reduction in redundancy between packages
and constraints. and constraints.
----------------- ----------------
Environment Views Filesystem Views
----------------- ----------------
Spack Environments can have an associated filesystem view, which is a directory Spack Environments can define filesystem views, which provide a direct access point
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include`` for software similar to the directory hierarchy that might exist under ``/usr/local``.
in which all files of the installed packages are linked. Filesystem views are updated every time the environment is written out to the lock
file ``spack.lock``, so the concrete environment and the view are always compatible.
By default a view is created for each environment, thanks to the ``view: true`` The files of the view's installed packages are brought into the view by symbolic or
option in the ``spack.yaml`` manifest file: hard links, referencing the original Spack installation, or by copy.
.. code-block:: yaml
spack:
specs: [perl, python]
view: true
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
If you've used ``spack env activate``, you may have already interacted with this view. Spack
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
you can directly run executables from all installed packages in the environment.
Views are highly customizable: you can control where they are put, modify their structure,
include and exclude specs, change how files are linked, and you can even generate multiple
views for a single environment.
.. _configuring_environment_views: .. _configuring_environment_views:
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Minimal view configuration Configuration in ``spack.yaml``
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The minimal configuration The Spack Environment manifest file has a top-level keyword
``view``. Each entry under that heading is a **view descriptor**, headed
.. code-block:: yaml by a name. Any number of views may be defined under the ``view`` heading.
The view descriptor contains the root of the view, and
spack: optionally the projections for the view, ``select`` and
# ... ``exclude`` lists for the view and link information via ``link`` and
view: true
lets Spack generate a single view with default settings under the
``.spack-env/view`` directory of the environment.
Another short way to configure a view is to specify just where to put it:
.. code-block:: yaml
spack:
# ...
view: /path/to/view
Views can also be disabled by setting ``view: false``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Advanced view configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^
One or more **view descriptors** can be defined under ``view``, keyed by a name.
The example from the previous section with ``view: /path/to/view`` is equivalent
to defining a view descriptor named ``default`` with a ``root`` attribute:
.. code-block:: yaml
spack:
# ...
view:
default: # name of the view
root: /path/to/view # view descriptor attribute
The ``default`` view descriptor name is special: when you ``spack env activate`` your
environment, this view will be used to update (among other things) your ``PATH``
variable.
View descriptors must contain the root of the view, and optionally projections,
``select`` and ``exclude`` lists and link information via ``link`` and
``link_type``. ``link_type``.
As a more advanced example, in the following manifest For example, in the following manifest
file snippet we define a view named ``mpis``, rooted at file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name, ``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given version, and compiler name to determine the path for a given
@@ -1082,14 +1001,63 @@ of ``hardlink`` or ``copy``.
when the environment is not activated, and linked libraries will be located when the environment is not activated, and linked libraries will be located
*outside* of the view thanks to rpaths. *outside* of the view thanks to rpaths.
There are two shorthands for environments with a single view. If the
environment at ``/path/to/env`` has a single view, with a root at
``/path/to/env/.spack-env/view``, with default selection and exclusion
and the default projection, we can put ``view: True`` in the
environment manifest. Similarly, if the environment has a view with a
different root, but default selection, exclusion, and projections, the
manifest can say ``view: /path/to/view``. These views are
automatically named ``default``, so that
.. code-block:: yaml
spack:
# ...
view: True
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: .spack-env/view
and
.. code-block:: yaml
spack:
# ...
view: /path/to/view
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: /path/to/view
By default, Spack environments are configured with ``view: True`` in
the manifest. Environments can be configured without views using
``view: False``. For backwards compatibility reasons, environments
with no ``view`` key are treated the same as ``view: True``.
From the command line, the ``spack env create`` command takes an From the command line, the ``spack env create`` command takes an
argument ``--with-view [PATH]`` that sets the path for a single, default argument ``--with-view [PATH]`` that sets the path for a single, default
view. If no path is specified, the default path is used (``view: view. If no path is specified, the default path is used (``view:
true``). The argument ``--without-view`` can be used to create an True``). The argument ``--without-view`` can be used to create an
environment without any view configured. environment without any view configured.
The ``spack env view`` command can be used to change the manage views The ``spack env view`` command can be used to change the manage views
of an environment. The subcommand ``spack env view enable`` will add a of an Environment. The subcommand ``spack env view enable`` will add a
view named ``default`` to an environment. It takes an optional view named ``default`` to an environment. It takes an optional
argument to specify the path for the new default view. The subcommand argument to specify the path for the new default view. The subcommand
``spack env view disable`` will remove the view named ``default`` from ``spack env view disable`` will remove the view named ``default`` from
@@ -1151,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
Activating environment views Activating environment views
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack env activate <env>`` has two effects: The ``spack env activate`` command will put the default view for the
environment into the user's path, in addition to activating the
1. It activates the environment so that further Spack commands such environment for Spack commands. The arguments ``-v,--with-view`` and
as ``spack install`` will run in the context of the environment. ``-V,--without-view`` can be used to tune this behavior. The default
2. It activates the view so that environment variables such as behavior is to activate with the environment view if there is one.
``PATH`` are updated to include the view.
Without further arguments, the ``default`` view of the environment is
activated. If a view with a different name has to be activated,
``spack env activate --with-view <name> <env>`` can be
used instead. You can also activate the environment without modifying
further environment variables using ``--without-view``.
The environment variables affected by the ``spack env activate`` The environment variables affected by the ``spack env activate``
command and the paths that are used to update them are determined by command and the paths that are used to update them are determined by
@@ -1185,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
recommended to use non-default projections with the default view of an recommended to use non-default projections with the default view of an
environment. environment.
The ``spack env deactivate`` command will remove the active view of The ``spack env deactivate`` command will remove the default view of
the Spack environment from the user's environment variables. the environment from the user's path.
.. _env-generate-depfile: .. _env-generate-depfile:
@@ -1257,7 +1218,7 @@ gets installed and is available for use in the ``env`` target.
$(SPACK) -e . env depfile -o $@ --make-prefix spack $(SPACK) -e . env depfile -o $@ --make-prefix spack
env: spack/env env: spack/env
$(info environment installed!) $(info Environment installed!)
clean: clean:
rm -rf spack.lock env.mk spack/ rm -rf spack.lock env.mk spack/
@@ -1345,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
example/push/%: example/install/% example/push/%: example/install/%
@mkdir -p $(dir $@) @mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache) $(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH) $(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
@touch $@ @touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS)) push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))

View File

@@ -61,15 +61,10 @@ Getting Spack is easy. You can clone it from the `github repository
.. code-block:: console .. code-block:: console
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git $ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
This will create a directory called ``spack``. This will create a directory called ``spack``.
.. note::
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
.. _shell-support: .. _shell-support:
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
@@ -1480,14 +1475,16 @@ in a Windows CMD prompt.
Step 3: Run and configure Spack Step 3: Run and configure Spack
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
On Windows, Spack supports both primary native shells, Powershell and the traditional command prompt. To use Spack, run ``bin\spack_cmd.bat`` (you may need to Run as Administrator) from the top-level spack
To use Spack, pick your favorite shell, and run ``bin\spack_cmd.bat`` or ``share/spack/setup-env.ps1`` directory. This will provide a Windows command prompt with an environment properly set up with Spack
(you may need to Run as Administrator) from the top-level spack and its prerequisites. If you receive a warning message that Python is not in your ``PATH``
directory. This will provide a Spack enabled shell. If you receive a warning message that Python is not in your ``PATH``
(which may happen if you installed Python from the website and not the Windows Store) add the location (which may happen if you installed Python from the website and not the Windows Store) add the location
of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable of the Python executable to your ``PATH`` now. You can permanently add Python to your ``PATH`` variable
by using the ``Edit the system environment variables`` utility in Windows Control Panel. by using the ``Edit the system environment variables`` utility in Windows Control Panel.
.. note::
Alternatively, Powershell can be used in place of CMD
To configure Spack, first run the following command inside the Spack console: To configure Spack, first run the following command inside the Spack console:
.. code-block:: console .. code-block:: console
@@ -1552,7 +1549,7 @@ and not tabs, so ensure that this is the case when editing one directly.
.. note:: Cygwin .. note:: Cygwin
The use of Cygwin is not officially supported by Spack and is not tested. The use of Cygwin is not officially supported by Spack and is not tested.
However Spack will not prevent this, so use if choosing to use Spack However Spack will not throw an error, so use if choosing to use Spack
with Cygwin, know that no functionality is garunteed. with Cygwin, know that no functionality is garunteed.
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
@@ -1566,12 +1563,21 @@ Spack console via:
spack install cpuinfo spack install cpuinfo
If in the previous step, you did not have CMake or Ninja installed, running the command above should install both packages If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages
.. note:: Spec Syntax Caveats """""""""""""""""""""""""""
Windows has a few idiosyncrasies when it comes to the Spack spec syntax and the use of certain shells Windows Compatible Packages
See the Spack spec syntax doc for more information """""""""""""""""""""""""""
Not all spack packages currently have Windows support. Some are inherently incompatible with the
platform, and others simply have yet to be ported. To view the current set of packages with Windows
support, the list command should be used via `spack list -t windows`. If there's a package you'd like
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
the port yourself.
.. note::
This is by no means a comprehensive list, some packages may have ports that were not tagged
while others may just work out of the box on Windows and have not been tagged as such.
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
For developers For developers
@@ -1581,3 +1587,6 @@ The intent is to provide a Windows installer that will automatically set up
Python, Git, and Spack, instead of requiring the user to do so manually. Python, Git, and Spack, instead of requiring the user to do so manually.
Instructions for creating the installer are at Instructions for creating the installer are at
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
available at each run of the CI on develop or any PR.

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 358 KiB

View File

@@ -39,15 +39,10 @@ package:
.. code-block:: console .. code-block:: console
$ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git $ git clone -c feature.manyFiles=true https://github.com/spack/spack.git
$ cd spack/bin $ cd spack/bin
$ ./spack install libelf $ ./spack install libelf
.. note::
``-c feature.manyFiles=true`` improves git's performance on repositories with 1,000+ files.
``--depth=2`` prunes the git history to reduce the size of the Spack installation.
If you're new to spack and want to start using it, see :doc:`getting_started`, If you're new to spack and want to start using it, see :doc:`getting_started`,
or refer to the full manual below. or refer to the full manual below.

View File

@@ -457,11 +457,11 @@ For instance, the following config options,
tcl: tcl:
all: all:
suffixes: suffixes:
^python@3: 'python{^python.version}' ^python@3.12: 'python-3.12'
^openblas: 'openblas' ^openblas: 'openblas'
will add a ``python-3.12.1`` version string to any packages compiled with will add a ``python-3.12`` version string to any packages compiled with
Python matching the spec, ``python@3``. This is useful to know which Python matching the spec, ``python@3.12``. This is useful to know which
version of Python a set of Python extensions is associated with. Likewise, the version of Python a set of Python extensions is associated with. Likewise, the
``openblas`` string is attached to any program that has openblas in the spec, ``openblas`` string is attached to any program that has openblas in the spec,
most likely via the ``+blas`` variant specification. most likely via the ``+blas`` variant specification.

View File

@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
option ``--depth 1`` will be used if the version of git and the specified option ``--depth 1`` will be used if the version of git and the specified
transport protocol support it, and ``--single-branch`` will be used if the transport protocol support it, and ``--single-branch`` will be used if the
version of git supports it. version of git supports it.
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time. Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1366,41 +1361,6 @@ Submodules
For more information about git submodules see the manpage of git: ``man For more information about git submodules see the manpage of git: ``man
git-submodule``. git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
If ``git_sparse_paths`` is supplied and the git version is too old
then a warning will be issued and that package will use the standard cloning operations instead.
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
or a more complex package attribute using the ``@property`` decorator. The return value should be
a list for a callable implementation of ``git_sparse_paths``.
.. code-block:: python
def sparse_path_function(package)
"""a callable function that can be used in side a version"""
# paths can be directories or functions, all subdirectories and files are included
paths = ["doe", "rae", "me/file.cpp"]
if package.spec.version > Version("1.2.0"):
paths.extend(["fae"])
return paths
class MyPackage(package):
# can also be a package attribute that will be used if not specified in versions
git_sparse_paths = ["doe", "rae"]
# use the package attribute
version("1.0.0")
version("1.1.0")
# use the function
version("1.1.5", git_sparse_paths=sparse_path_func)
version("1.2.0", git_sparse_paths=sparse_path_func)
version("1.2.5", git_sparse_paths=sparse_path_func)
version("1.1.5", git_sparse_paths=sparse_path_func)
.. _github-fetch: .. _github-fetch:
^^^^^^ ^^^^^^

View File

@@ -592,77 +592,6 @@ the attributes will be merged starting from the bottom match going up to the top
In the case that no match is found in a submapping section, no additional attributes will be applied. In the case that no match is found in a submapping section, no additional attributes will be applied.
^^^^^^^^^^^^^^^^^^^^^^^^
Dynamic Mapping Sections
^^^^^^^^^^^^^^^^^^^^^^^^
For large scale CI where cost optimization is required, dynamic mapping allows for the use of real-time
mapping schemes served by a web service. This type of mapping does not support the ``-remove`` type
behavior, but it does follow the rest of the merge rules for configurations.
The dynamic mapping service needs to implement a single REST API interface for getting
requests ``GET <URL>[:PORT][/PATH]?spec=<pkg_name@pkg_version +variant1+variant2%compiler@compiler_version>``.
example request.
.. code-block::
https://my-dyn-mapping.spack.io/allocation?spec=zlib-ng@2.1.6 +compat+opt+shared+pic+new_strategies arch=linux-ubuntu20.04-x86_64_v3%gcc@12.0.0
With an example response the updates kubernetes request variables, overrides the max retries for gitlab,
and prepends a note about the modifications made by the my-dyn-mapping.spack.io service.
.. code-block::
200 OK
{
"variables":
{
"KUBERNETES_CPU_REQUEST": "500m",
"KUBERNETES_MEMORY_REQUEST": "2G",
},
"retry": { "max:": "1"}
"script+:":
[
"echo \"Job modified by my-dyn-mapping.spack.io\""
]
}
The ci.yaml configuration section takes the URL endpoint as well as a number of options to configure how responses are handled.
It is possible to specify a list of allowed and ignored configuration attributes under ``allow`` and ``ignore``
respectively. It is also possible to configure required attributes under ``required`` section.
Options to configure the client timeout and SSL verification using the ``timeout`` and ``verify_ssl`` options.
By default, the ``timeout`` is set to the option in ``config:timeout`` and ``veryify_ssl`` is set the the option in ``config::verify_ssl``.
Passing header parameters to the request can be achieved through the ``header`` section. The values of the variables passed to the
header may be environment variables that are expanded at runtime, such as a private token configured on the runner.
Here is an example configuration pointing to ``my-dyn-mapping.spack.io/allocation``.
.. code-block:: yaml
ci:
- dynamic-mapping:
endpoint: my-dyn-mapping.spack.io/allocation
timeout: 10
verify_ssl: True
header:
PRIVATE_TOKEN: ${MY_PRIVATE_TOKEN}
MY_CONFIG: "fuzz_allocation:false"
allow:
- variables
ignore:
- script
require: []
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
Bootstrapping Bootstrapping
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
@@ -734,7 +663,11 @@ build the package.
When including a bootstrapping phase as in the example above, the result is that When including a bootstrapping phase as in the example above, the result is that
the bootstrapped compiler packages will be pushed to the binary mirror (and the the bootstrapped compiler packages will be pushed to the binary mirror (and the
local artifacts mirror) before the actual release specs are built. local artifacts mirror) before the actual release specs are built. In this case,
the jobs corresponding to subsequent release specs are configured to
``install_missing_compilers``, so that if spack is asked to install a package
with a compiler it doesn't know about, it can be quickly installed from the
binary mirror first.
Since bootstrapping compilers is optional, those items can be left out of the Since bootstrapping compilers is optional, those items can be left out of the
environment/stack file, and in that case no bootstrapping will be done (only the environment/stack file, and in that case no bootstrapping will be done (only the

View File

@@ -1,13 +1,13 @@
sphinx==7.4.7 sphinx==7.2.6
sphinxcontrib-programoutput==0.17 sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1 sphinx_design==0.6.0
sphinx-rtd-theme==3.0.1 sphinx-rtd-theme==2.0.0
python-levenshtein==0.26.0 python-levenshtein==0.25.1
docutils==0.20.1 docutils==0.20.1
pygments==2.18.0 pygments==2.18.0
urllib3==2.2.3 urllib3==2.2.2
pytest==8.3.3 pytest==8.2.2
isort==5.13.2 isort==5.13.2
black==24.8.0 black==24.4.2
flake8==7.1.1 flake8==7.1.0
mypy==1.11.1 mypy==1.10.1

96
lib/spack/env/cc vendored
View File

@@ -174,46 +174,6 @@ preextend() {
unset IFS unset IFS
} }
execute() {
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list
exit
}
# Fail with a clear message if the input contains any bell characters. # Fail with a clear message if the input contains any bell characters.
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
die "Compiler command line contains our separator ('${lsep}'). Cannot parse." die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
@@ -271,17 +231,12 @@ fi
# ld link # ld link
# ccld compile & link # ccld compile & link
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
# including version checks (SPACK_XFLAGS variants are not applied
# for version checks).
command="${0##*/}" command="${0##*/}"
comp="CC" comp="CC"
vcheck_flags=""
case "$command" in case "$command" in
cpp) cpp)
mode=cpp mode=cpp
debug_flags="-g" debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
;; ;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc) cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
command="$SPACK_CC" command="$SPACK_CC"
@@ -289,7 +244,6 @@ case "$command" in
comp="CC" comp="CC"
lang_flags=C lang_flags=C
debug_flags="-g" debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
;; ;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC) c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX" command="$SPACK_CXX"
@@ -297,7 +251,6 @@ case "$command" in
comp="CXX" comp="CXX"
lang_flags=CXX lang_flags=CXX
debug_flags="-g" debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
;; ;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn) ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
command="$SPACK_FC" command="$SPACK_FC"
@@ -305,7 +258,6 @@ case "$command" in
comp="FC" comp="FC"
lang_flags=F lang_flags=F
debug_flags="-g" debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;; ;;
f77|xlf|xlf_r|pgf77) f77|xlf|xlf_r|pgf77)
command="$SPACK_F77" command="$SPACK_F77"
@@ -313,7 +265,6 @@ case "$command" in
comp="F77" comp="F77"
lang_flags=F lang_flags=F
debug_flags="-g" debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;; ;;
ld|ld.gold|ld.lld) ld|ld.gold|ld.lld)
mode=ld mode=ld
@@ -414,11 +365,7 @@ unset IFS
export PATH="$new_dirs" export PATH="$new_dirs"
if [ "$mode" = vcheck ]; then if [ "$mode" = vcheck ]; then
full_command_list="$command" exec "${command}" "$@"
args="$@"
extend full_command_list vcheck_flags
extend full_command_list args
execute
fi fi
# Darwin's linker has a -r argument that merges object files together. # Darwin's linker has a -r argument that merges object files together.
@@ -775,7 +722,6 @@ case "$mode" in
cc|ccld) cc|ccld)
case $lang_flags in case $lang_flags in
F) F)
extend spack_flags_list SPACK_ALWAYS_FFLAGS
extend spack_flags_list SPACK_FFLAGS extend spack_flags_list SPACK_FFLAGS
;; ;;
esac esac
@@ -785,7 +731,6 @@ esac
# C preprocessor flags come before any C/CXX flags # C preprocessor flags come before any C/CXX flags
case "$mode" in case "$mode" in
cpp|as|cc|ccld) cpp|as|cc|ccld)
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
extend spack_flags_list SPACK_CPPFLAGS extend spack_flags_list SPACK_CPPFLAGS
;; ;;
esac esac
@@ -796,11 +741,9 @@ case "$mode" in
cc|ccld) cc|ccld)
case $lang_flags in case $lang_flags in
C) C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS extend spack_flags_list SPACK_CFLAGS
;; ;;
CXX) CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS extend spack_flags_list SPACK_CXXFLAGS
;; ;;
esac esac
@@ -990,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
esac esac
fi fi
execute # dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47) * Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
astunparse astunparse
---------------- ----------------

View File

@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else: else:
try: try:
with os.scandir(self.etc_dir) as it: basenames = [
etc_files = [ basename
p.path for p in it for basename in os.listdir(self.etc_dir)
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
] and os.path.isfile(os.path.join(self.etc_dir, basename))
]
# We sort for repeatability in cases where there are multiple # We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all # distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own. # containing `redhat-release` on top of their own.
etc_files.sort() basenames.sort()
except OSError: except OSError:
# This may occur when /etc is not readable but we can't be # This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of # sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the # /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`. # error is handled in `_parse_distro_release_file()`.
etc_files = [ basenames = _DISTRO_RELEASE_BASENAMES
os.path.join(self.etc_dir, basename) for basename in basenames:
for basename in _DISTRO_RELEASE_BASENAMES match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
]
for filepath in etc_files:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
if match is None: if match is None:
continue continue
filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath) distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches. # The name is always present if the pattern matches.
if "name" not in distro_info: if "name" not in distro_info:

View File

@@ -231,6 +231,96 @@ def is_host_name(instance):
return True return True
try:
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
except ImportError:
pass
else:
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
try:
from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(draft7="iri", raises=ValueError)
def is_iri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(draft7="iri-reference", raises=ValueError)
def is_iri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI_reference")
try:
from strict_rfc3339 import validate_rfc3339
except ImportError:
try:
from rfc3339_validator import validate_rfc3339
except ImportError:
validate_rfc3339 = None
if validate_rfc3339:
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3339(instance)
@_checks_drafts(draft7="time")
def is_time(instance):
if not isinstance(instance, str_types):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error) @_checks_drafts(name="regex", raises=re.error)
def is_regex(instance): def is_regex(instance):
if not isinstance(instance, str_types): if not isinstance(instance, str_types):
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types): if not isinstance(instance, str_types):
return True return True
return datetime.datetime.strptime(instance, "%H:%M:%S") return datetime.datetime.strptime(instance, "%H:%M:%S")
try:
import webcolors
except ImportError:
pass
else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str_types) or
instance.lower() in webcolors.css21_names_to_hex
):
return True
return is_css_color_code(instance)
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
try:
import jsonpointer
except ImportError:
pass
else:
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str_types):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str_types):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
try:
import uritemplate.exceptions
except ImportError:
pass
else:
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
raises=uritemplate.exceptions.InvalidTemplate,
)
def is_uri_template(
instance,
template_validator=uritemplate.Validator().force_balanced_braces(),
):
template = uritemplate.URITemplate(instance)
return template_validator.validate(template)

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch( def partial_uarch(
name: str = "", name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
) -> Microarchitecture: ) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan.""" """Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture( return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(), features=features or set(),
compilers={}, compilers={},
generation=generation, generation=generation,
cpu_part=cpu_part,
) )
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch( return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data), vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"), features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
) )
if architecture in (PPC64LE, PPC64): if architecture in (PPC64LE, PPC64):
@@ -351,10 +345,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"] generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn) best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate. # Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled # This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture # from e.g. BIOS prevents detection of a reasonably performant architecture

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details. # Archspec Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures.""" """Types and functions to manage information
on CPU microarchitectures.
"""
import functools import functools
import platform import platform
import re import re
@@ -63,31 +65,23 @@ class Microarchitecture:
passed in as argument above. passed in as argument above.
* versions: versions that support this micro-architecture. * versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant. generation (int): generation of the micro-architecture, if
cpu_part (str): cpu part of the architecture, if relevant. relevant.
""" """
# pylint: disable=too-many-arguments,too-many-instance-attributes # pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features #: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""): def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name self.name = name
self.parents = parents self.parents = parents
self.vendor = vendor self.vendor = vendor
self.features = features self.features = features
self.compilers = compilers self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation self.generation = generation
# Only relevant for AArch64 # Cache the ancestor computation
self.cpu_part = cpu_part
# Cache the "ancestor" computation
self._ancestors = None self._ancestors = None
# Cache the "generic" computation
self._generic = None
# Cache the "family" computation
self._family = None
@property @property
def ancestors(self): def ancestors(self):
@@ -117,12 +111,8 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers and self.compilers == other.compilers
and self.generation == other.generation and self.generation == other.generation
and self.cpu_part == other.cpu_part
) )
def __hash__(self):
return hash(self.name)
@coerce_target_names @coerce_target_names
def __ne__(self, other): def __ne__(self, other):
return not self == other return not self == other
@@ -153,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__ cls_name = self.__class__.__name__
fmt = ( fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, " cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, " "{0.features!r}, {0.compilers!r}, {0.generation!r})"
"cpu_part={0.cpu_part!r})"
) )
return fmt.format(self) return fmt.format(self)
@@ -179,22 +168,18 @@ def __contains__(self, feature):
@property @property
def family(self): def family(self):
"""Returns the architecture family a given target belongs to""" """Returns the architecture family a given target belongs to"""
if self._family is None: roots = [x for x in [self] + self.ancestors if not x.ancestors]
roots = [x for x in [self] + self.ancestors if not x.ancestors] msg = "a target is expected to belong to just one architecture family"
msg = "a target is expected to belong to just one architecture family" msg += f"[found {', '.join(str(x) for x in roots)}]"
msg += f"[found {', '.join(str(x) for x in roots)}]" assert len(roots) == 1, msg
assert len(roots) == 1, msg
self._family = roots.pop()
return self._family return roots.pop()
@property @property
def generic(self): def generic(self):
"""Returns the best generic architecture that is compatible with self""" """Returns the best generic architecture that is compatible with self"""
if self._generic is None: generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"] return max(generics, key=lambda x: len(x.ancestors))
self._generic = max(generics, key=lambda x: len(x.ancestors))
return self._generic
def to_dict(self): def to_dict(self):
"""Returns a dictionary representation of this object.""" """Returns a dictionary representation of this object."""
@@ -205,7 +190,6 @@ def to_dict(self):
"generation": self.generation, "generation": self.generation,
"parents": [str(x) for x in self.parents], "parents": [str(x) for x in self.parents],
"compilers": self.compilers, "compilers": self.compilers,
"cpupart": self.cpu_part,
} }
@staticmethod @staticmethod
@@ -218,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]), features=set(data["features"]),
compilers=data.get("compilers", {}), compilers=data.get("compilers", {}),
generation=data.get("generation", 0), generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
) )
def optimization_flags(self, compiler, version): def optimization_flags(self, compiler, version):
@@ -377,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"]) features = set(values["features"])
compilers = values.get("compilers", {}) compilers = values.get("compilers", {})
generation = values.get("generation", 0) generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture( targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
known_targets = {} known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"] data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]

View File

@@ -1482,6 +1482,7 @@
"cldemote", "cldemote",
"movdir64b", "movdir64b",
"movdiri", "movdiri",
"pdcm",
"serialize", "serialize",
"waitpkg" "waitpkg"
], ],
@@ -2224,96 +2225,14 @@
], ],
"nvhpc": [ "nvhpc": [
{ {
"versions": "21.11:23.8", "versions": "21.11:",
"name": "zen3", "name": "zen3",
"flags": "-tp {name}", "flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3" "warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
} }
] ]
} }
}, },
"zen5": {
"from": ["zen4"],
"vendor": "AuthenticAMD",
"features": [
"abm",
"aes",
"avx",
"avx2",
"avx512_bf16",
"avx512_bitalg",
"avx512bw",
"avx512cd",
"avx512dq",
"avx512f",
"avx512ifma",
"avx512vbmi",
"avx512_vbmi2",
"avx512vl",
"avx512_vnni",
"avx512_vp2intersect",
"avx512_vpopcntdq",
"avx_vnni",
"bmi1",
"bmi2",
"clflushopt",
"clwb",
"clzero",
"cppc",
"cx16",
"f16c",
"flush_l1d",
"fma",
"fsgsbase",
"gfni",
"ibrs_enhanced",
"mmx",
"movbe",
"movdir64b",
"movdiri",
"pclmulqdq",
"popcnt",
"rdseed",
"sse",
"sse2",
"sse4_1",
"sse4_2",
"sse4a",
"ssse3",
"tsc_adjust",
"vaes",
"vpclmulqdq",
"xsavec",
"xsaveopt"
],
"compilers": {
"gcc": [
{
"versions": "14.1:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
],
"aocc": [
{
"versions": "5.0:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
],
"clang": [
{
"versions": "19.1:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
]
}
},
"ppc64": { "ppc64": {
"from": [], "from": [],
"vendor": "generic", "vendor": "generic",
@@ -2792,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99" "flags": "-mcpu=thunderx2t99"
} }
] ]
}, }
"cpupart": "0x0af"
}, },
"a64fx": { "a64fx": {
"from": ["armv8.2a"], "from": ["armv8.2a"],
@@ -2861,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve" "flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
} }
] ]
}, }
"cpupart": "0x001"
}, },
"cortex_a72": { "cortex_a72": {
"from": ["aarch64"], "from": ["aarch64"],
@@ -2899,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72" "flags" : "-mcpu=cortex-a72"
} }
] ]
}, }
"cpupart": "0xd08"
}, },
"neoverse_n1": { "neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"], "from": ["cortex_a72", "armv8.2a"],
@@ -2921,7 +2837,8 @@
"asimdrdm", "asimdrdm",
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"asimddp" "asimddp",
"ssbs"
], ],
"compilers" : { "compilers" : {
"gcc": [ "gcc": [
@@ -2985,8 +2902,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd0c"
}, },
"neoverse_v1": { "neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"], "from": ["neoverse_n1", "armv8.4a"],
@@ -3010,6 +2926,8 @@
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"sha3", "sha3",
"sm3",
"sm4",
"asimddp", "asimddp",
"sha512", "sha512",
"sve", "sve",
@@ -3018,6 +2936,7 @@
"uscat", "uscat",
"ilrcpc", "ilrcpc",
"flagm", "flagm",
"ssbs",
"dcpodp", "dcpodp",
"svei8mm", "svei8mm",
"svebf16", "svebf16",
@@ -3085,7 +3004,7 @@
}, },
{ {
"versions": "11:", "versions": "11:",
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng" "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
}, },
{ {
"versions": "12:", "versions": "12:",
@@ -3109,8 +3028,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd40"
}, },
"neoverse_v2": { "neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"], "from": ["neoverse_n1", "armv9.0a"],
@@ -3134,22 +3052,32 @@
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"sha3", "sha3",
"sm3",
"sm4",
"asimddp", "asimddp",
"sha512", "sha512",
"sve", "sve",
"asimdfhm", "asimdfhm",
"dit",
"uscat", "uscat",
"ilrcpc", "ilrcpc",
"flagm", "flagm",
"ssbs",
"sb", "sb",
"dcpodp", "dcpodp",
"sve2", "sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2", "flagm2",
"frint", "frint",
"svei8mm", "svei8mm",
"svebf16", "svebf16",
"i8mm", "i8mm",
"bf16" "bf16",
"dgh"
], ],
"compilers" : { "compilers" : {
"gcc": [ "gcc": [
@@ -3174,19 +3102,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76" "flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
}, },
{ {
"versions": "10.0:11.3.99", "versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77" "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
}, },
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{ {
"versions": "12.0:12.2.99", "versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710" "flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
}, },
{ {
"versions": "12.3:", "versions": "13.0:",
"flags" : "-mcpu=neoverse-v2" "flags" : "-mcpu=neoverse-v2"
} }
], ],
@@ -3221,112 +3145,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}, },
"m1": { "m1": {
"from": ["armv8.4a"], "from": ["armv8.4a"],
@@ -3392,8 +3211,7 @@
"flags" : "-mcpu=apple-m1" "flags" : "-mcpu=apple-m1"
} }
] ]
}, }
"cpupart": "0x022"
}, },
"m2": { "m2": {
"from": ["m1", "armv8.5a"], "from": ["m1", "armv8.5a"],
@@ -3471,8 +3289,7 @@
"flags" : "-mcpu=apple-m2" "flags" : "-mcpu=apple-m2"
} }
] ]
}, }
"cpupart": "0x032"
}, },
"arm": { "arm": {
"from": [], "from": [],

View File

@@ -52,9 +52,6 @@
} }
} }
} }
},
"cpupart": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -110,4 +107,4 @@
"additionalProperties": false "additionalProperties": false
} }
} }
} }

View File

@@ -1,45 +0,0 @@
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
index 89e1868047..50c3b18d4d 100644
--- a/lib/spack/external/_vendoring/distro/distro.py
+++ b/lib/spack/external/_vendoring/distro/distro.py
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
- basenames = [
- basename
- for basename in os.listdir(self.etc_dir)
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
- and os.path.isfile(os.path.join(self.etc_dir, basename))
- ]
+ with os.scandir(self.etc_dir) as it:
+ etc_files = [
+ p.path for p in it
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
+ ]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
- basenames.sort()
+ etc_files.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
- basenames = _DISTRO_RELEASE_BASENAMES
- for basename in basenames:
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ etc_files = [
+ os.path.join(self.etc_dir, basename)
+ for basename in _DISTRO_RELEASE_BASENAMES
+ ]
+
+ for filepath in etc_files:
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
if match is None:
continue
- filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
-__version__ = metadata.version("jsonschema") -__version__ = metadata.version("jsonschema")
+ +
+__version__ = "3.2.0" +__version__ = "3.2.0"
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
index 281a7cfcff..29061e3661 100644
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
@@ -231,96 +231,6 @@ def is_host_name(instance):
return True
-try:
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
- import idna
-except ImportError:
- pass
-else:
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
- def is_idn_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- idna.encode(instance)
- return True
-
-
-try:
- import rfc3987
-except ImportError:
- try:
- from rfc3986_validator import validate_rfc3986
- except ImportError:
- pass
- else:
- @_checks_drafts(name="uri")
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI_reference")
-
-else:
- @_checks_drafts(draft7="iri", raises=ValueError)
- def is_iri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI")
-
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
- def is_iri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI_reference")
-
- @_checks_drafts(name="uri", raises=ValueError)
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI_reference")
-
-
-try:
- from strict_rfc3339 import validate_rfc3339
-except ImportError:
- try:
- from rfc3339_validator import validate_rfc3339
- except ImportError:
- validate_rfc3339 = None
-
-if validate_rfc3339:
- @_checks_drafts(name="date-time")
- def is_datetime(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3339(instance)
-
- @_checks_drafts(draft7="time")
- def is_time(instance):
- if not isinstance(instance, str_types):
- return True
- return is_datetime("1970-01-01T" + instance)
-
-
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
-
-
-try:
- import webcolors
-except ImportError:
- pass
-else:
- def is_css_color_code(instance):
- return webcolors.normalize_hex(instance)
-
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
- def is_css21_color(instance):
- if (
- not isinstance(instance, str_types) or
- instance.lower() in webcolors.css21_names_to_hex
- ):
- return True
- return is_css_color_code(instance)
-
- def is_css3_color(instance):
- if instance.lower() in webcolors.css3_names_to_hex:
- return True
- return is_css_color_code(instance)
-
-
-try:
- import jsonpointer
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="json-pointer",
- draft7="json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_json_pointer(instance):
- if not isinstance(instance, str_types):
- return True
- return jsonpointer.JsonPointer(instance)
-
- # TODO: I don't want to maintain this, so it
- # needs to go either into jsonpointer (pending
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
- # into a new external library.
- @_checks_drafts(
- draft7="relative-json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_relative_json_pointer(instance):
- # Definition taken from:
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
- if not isinstance(instance, str_types):
- return True
- non_negative_integer, rest = [], ""
- for i, character in enumerate(instance):
- if character.isdigit():
- non_negative_integer.append(character)
- continue
-
- if not non_negative_integer:
- return False
-
- rest = instance[i:]
- break
- return (rest == "#") or jsonpointer.JsonPointer(rest)
-
-
-try:
- import uritemplate.exceptions
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="uri-template",
- draft7="uri-template",
- raises=uritemplate.exceptions.InvalidTemplate,
- )
- def is_uri_template(
- instance,
- template_validator=uritemplate.Validator().force_balanced_braces(),
- ):
- template = uritemplate.URITemplate(instance)
- return template_validator.validate(template)

View File

@@ -41,20 +41,6 @@ def comma_and(sequence: List[str]) -> str:
return comma_list(sequence, "and") return comma_list(sequence, "and")
def ordinal(number: int) -> str:
"""Return the ordinal representation (1st, 2nd, 3rd, etc.) for the provided number.
Args:
number: int to convert to ordinal number
Returns: number's corresponding ordinal
"""
idx = (number % 10) << 1
tens = number % 100 // 10
suffix = "th" if tens == 1 or idx > 6 else "thstndrd"[idx : idx + 2]
return f"{number}{suffix}"
def quote(sequence: List[str], q: str = "'") -> List[str]: def quote(sequence: List[str], q: str = "'") -> List[str]:
"""Quotes each item in the input list with the quote character passed as second argument.""" """Quotes each item in the input list with the quote character passed as second argument."""
return [f"{q}{e}{q}" for e in sequence] return [f"{q}{e}{q}" for e in sequence]

View File

@@ -27,6 +27,8 @@
from llnl.util.lang import dedupe, memoized from llnl.util.lang import dedupe, memoized
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
from spack.util.executable import Executable, which
from ..path import path_to_os_path, system_path_filter from ..path import path_to_os_path, system_path_filter
if sys.platform != "win32": if sys.platform != "win32":
@@ -47,11 +49,11 @@
"copy_mode", "copy_mode",
"filter_file", "filter_file",
"find", "find",
"find_first",
"find_headers", "find_headers",
"find_all_headers", "find_all_headers",
"find_libraries", "find_libraries",
"find_system_libraries", "find_system_libraries",
"fix_darwin_install_name",
"force_remove", "force_remove",
"force_symlink", "force_symlink",
"getuid", "getuid",
@@ -246,6 +248,42 @@ def path_contains_subdirectory(path, root):
return norm_path.startswith(norm_root) return norm_path.startswith(norm_root)
@memoized
def file_command(*args):
"""Creates entry point to `file` system command with provided arguments"""
file_cmd = which("file", required=True)
for arg in args:
file_cmd.add_default_arg(arg)
return file_cmd
@memoized
def _get_mime_type():
"""Generate method to call `file` system command to aquire mime type
for a specified path
"""
if sys.platform == "win32":
# -h option (no-dereference) does not exist in Windows
return file_command("-b", "--mime-type")
else:
return file_command("-b", "-h", "--mime-type")
def mime_type(filename):
"""Returns the mime type and subtype of a file.
Args:
filename: file to be analyzed
Returns:
Tuple containing the MIME type and subtype
"""
output = _get_mime_type()(filename, output=str, error=str).strip()
tty.debug("==> " + output)
type, _, subtype = output.partition("/")
return type, subtype
#: This generates the library filenames that may appear on any OS. #: This generates the library filenames that may appear on any OS.
library_extensions = ["a", "la", "so", "tbd", "dylib"] library_extensions = ["a", "la", "so", "tbd", "dylib"]
@@ -1586,12 +1624,6 @@ def remove_linked_tree(path):
shutil.rmtree(os.path.realpath(path), **kwargs) shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path) os.unlink(path)
else: else:
if sys.platform == "win32":
# Adding this prefix allows shutil to remove long paths on windows
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
long_path_pfx = "\\\\?\\"
if not path.startswith(long_path_pfx):
path = long_path_pfx + path
shutil.rmtree(path, **kwargs) shutil.rmtree(path, **kwargs)
@@ -1641,6 +1673,41 @@ def safe_remove(*files_or_dirs):
raise raise
@system_path_filter
def fix_darwin_install_name(path):
"""Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
1. Use ``install_name('-id', ...)`` to change install name of a single lib
2. Use ``install_name('-change', ...)`` to change the cross linking between
libs. The function assumes that all libraries are in one folder and
currently won't follow subfolders.
Parameters:
path (str): directory in which .dylib files are located
"""
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
install_name_tool = Executable("install_name_tool")
install_name_tool("-id", lib, lib)
otool = Executable("otool")
long_deps = otool("-L", lib, output=str).split("\n")
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
# We really want to check for either
# dep == os.path.basename(loc) or
# dep == join_path(builddir, os.path.basename(loc)),
# but we don't know builddir (nor how symbolic links look
# in builddir). We thus only compare the basenames.
if os.path.basename(dep) == os.path.basename(loc):
install_name_tool("-change", dep, loc, lib)
break
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]: def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
"""Find the first file matching a pattern. """Find the first file matching a pattern.

View File

@@ -6,16 +6,17 @@
import collections.abc import collections.abc
import contextlib import contextlib
import functools import functools
import inspect
import itertools import itertools
import os import os
import re import re
import sys import sys
import traceback import traceback
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Callable, Iterable, List, Tuple, TypeVar from typing import Any, Callable, Iterable, List, Tuple
# Ignore emacs backups when listing modules # Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$" ignore_modules = [r"^\.#", "~$"]
def index_by(objects, *funcs): def index_by(objects, *funcs):
@@ -83,6 +84,20 @@ def index_by(objects, *funcs):
return result return result
def caller_locals():
"""This will return the locals of the *parent* of the caller.
This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
del stack
def attr_setdefault(obj, name, value): def attr_setdefault(obj, name, value):
"""Like dict.setdefault, but for objects.""" """Like dict.setdefault, but for objects."""
if not hasattr(obj, name): if not hasattr(obj, name):
@@ -90,6 +105,15 @@ def attr_setdefault(obj, name, value):
return getattr(obj, name) return getattr(obj, name)
def has_method(cls, name):
for base in inspect.getmro(cls):
if base is object:
continue
if name in base.__dict__:
return True
return False
def union_dicts(*dicts): def union_dicts(*dicts):
"""Use update() to combine all dicts into one. """Use update() to combine all dicts into one.
@@ -154,22 +178,19 @@ def list_modules(directory, **kwargs):
order.""" order."""
list_directories = kwargs.setdefault("directories", True) list_directories = kwargs.setdefault("directories", True)
ignore = re.compile(ignore_modules) for name in os.listdir(directory):
if name == "__init__.py":
continue
with os.scandir(directory) as it: path = os.path.join(directory, name)
for entry in it: if list_directories and os.path.isdir(path):
if entry.name == "__init__.py" or entry.name == "__pycache__": init_py = os.path.join(path, "__init__.py")
continue if os.path.isfile(init_py):
yield name
if ( elif name.endswith(".py"):
list_directories if not any(re.search(pattern, name) for pattern in ignore_modules):
and entry.is_dir() yield re.sub(".py$", "", name)
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
):
yield entry.name
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
yield entry.name[:-3] # strip .py
def decorator_with_or_without_args(decorator): def decorator_with_or_without_args(decorator):
@@ -216,8 +237,8 @@ def setter(name, value):
value.__name__ = name value.__name__ = name
setattr(cls, name, value) setattr(cls, name, value)
if not hasattr(cls, "_cmp_key"): if not has_method(cls, "_cmp_key"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().") raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key())) setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key()) setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
@@ -367,8 +388,8 @@ def cd_fun():
TypeError: If the class does not have a ``_cmp_iter`` method TypeError: If the class does not have a ``_cmp_iter`` method
""" """
if not hasattr(cls, "_cmp_iter"): if not has_method(cls, "_cmp_iter"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().") raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
# comparison operators are implemented in terms of lazy_eq and lazy_lt # comparison operators are implemented in terms of lazy_eq and lazy_lt
def eq(self, other): def eq(self, other):
@@ -843,19 +864,20 @@ def uniq(sequence):
return uniq_list return uniq_list
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]: def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements, """Takes a long list and limits it to a smaller number of elements,
replacing intervening elements with '...'. For example:: replacing intervening elements with '...'. For example::
elide_list(["1", "2", "3", "4", "5", "6"], 4) elide_list([1,2,3,4,5,6], 4)
gives:: gives::
["1", "2", "3", "...", "6"] [1, 2, 3, '...', 6]
""" """
if len(line_list) > max_num: if len(line_list) > max_num:
return [*line_list[: max_num - 1], "...", line_list[-1]] return line_list[: max_num - 1] + ["..."] + line_list[-1:]
return line_list else:
return line_list
@contextlib.contextmanager @contextlib.contextmanager
@@ -879,12 +901,9 @@ def enum(**kwargs):
return type("Enum", (object,), kwargs) return type("Enum", (object,), kwargs)
T = TypeVar("T")
def stable_partition( def stable_partition(
input_iterable: Iterable[T], predicate_fn: Callable[[T], bool] input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
) -> Tuple[List[T], List[T]]: ) -> Tuple[List[Any], List[Any]]:
"""Partition the input iterable according to a custom predicate. """Partition the input iterable according to a custom predicate.
Args: Args:
@@ -896,13 +915,12 @@ def stable_partition(
Tuple of the list of elements evaluating to True, and Tuple of the list of elements evaluating to True, and
list of elements evaluating to False. list of elements evaluating to False.
""" """
true_items: List[T] = [] true_items, false_items = [], []
false_items: List[T] = []
for item in input_iterable: for item in input_iterable:
if predicate_fn(item): if predicate_fn(item):
true_items.append(item) true_items.append(item)
else: continue
false_items.append(item) false_items.append(item)
return true_items, false_items return true_items, false_items

View File

@@ -10,7 +10,6 @@
import errno import errno
import io import io
import multiprocessing import multiprocessing
import multiprocessing.connection
import os import os
import re import re
import select import select

View File

@@ -3,13 +3,6 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
from typing import Optional
import spack.paths
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.23.0.dev0" __version__ = "0.23.0.dev0"
spack_version = __version__ spack_version = __version__
@@ -26,47 +19,4 @@ def __try_int(v):
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")]) spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
def get_spack_commit() -> Optional[str]: __all__ = ["spack_version_info", "spack_version"]
"""Get the Spack git commit sha.
Returns:
(str or None) the commit sha if available, otherwise None
"""
git_path = os.path.join(spack.paths.prefix, ".git")
if not os.path.exists(git_path):
return None
git = spack.util.git.git()
if not git:
return None
rev = git(
"-C",
spack.paths.prefix,
"rev-parse",
"HEAD",
output=str,
error=os.devnull,
fail_on_error=False,
)
if git.returncode != 0:
return None
match = re.match(r"[a-f\d]{7,}$", rev)
return match.group(0) if match else None
def get_version() -> str:
"""Get a descriptive version of this instance of Spack.
Outputs '<PEP440 version> (<git commit sha>)'.
The commit sha is only added when available.
"""
commit = get_spack_commit()
if commit:
return f"{spack_version} ({commit})"
return spack_version
__all__ = ["spack_version_info", "spack_version", "get_version", "get_spack_commit"]

131
lib/spack/spack/abi.py Normal file
View File

@@ -0,0 +1,131 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from llnl.util.lang import memoized
import spack.spec
import spack.version
from spack.compilers.clang import Clang
from spack.util.executable import Executable, ProcessError
class ABI:
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec
) -> bool:
"""Return true if architecture of target spec is ABI compatible
to the architecture of constraint spec. If either the target
or constraint specs have no architecture, target is also defined
as architecture ABI compatible to constraint."""
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
)
@memoized
def _gcc_get_libstdcxx_version(self, version):
"""Returns gcc ABI compatibility info by getting the library version of
a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
spec = spack.spec.CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
return None
compiler = compilers[0]
rungcc = None
libname = None
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
libname = "libgcc_s." + dso_suffix
else:
return None
try:
# Some gcc's are actually clang and don't respond properly to
# --print-file-name (they just print the filename, not the
# full path). Ignore these and expect them to be handled as clang.
if Clang.default_version(rungcc.exe[0]) != "unknown":
return None
output = rungcc("--print-file-name=%s" % libname, output=str)
except ProcessError:
return None
if not output:
return None
libpath = os.path.realpath(output.strip())
if not libpath:
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
are ABI compatible."""
plib = self._gcc_get_libstdcxx_version(pversion)
clib = self._gcc_get_libstdcxx_version(cversion)
if not plib or not clib:
return False
return plib == clib
def _intel_compiler_compare(
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
) -> bool:
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
# Test major and minor versions. Ignore build version.
pv = pversion.lo
cv = cversion.lo
return pv.up_to(2) == cv.up_to(2)
def compiler_compatible(
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
) -> bool:
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
if parent.compiler.name != child.compiler.name:
# Different compiler families are assumed ABI incompatible
return False
if loose:
return True
# TODO: Can we move the specialized ABI matching stuff
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion
):
return True
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
pversion, cversion
):
return True
return False
def compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
) -> bool:
"""Returns true if target spec is ABI compatible to constraint spec"""
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
target, constraint, loose=loose
)

View File

@@ -42,20 +42,15 @@ def _search_duplicate_compilers(error_cls):
import inspect import inspect
import io import io
import itertools import itertools
import os
import pathlib import pathlib
import pickle import pickle
import re import re
import warnings import warnings
from typing import Iterable, List, Set, Tuple
from urllib.request import urlopen from urllib.request import urlopen
import llnl.util.lang import llnl.util.lang
from llnl.string import plural
import spack.builder
import spack.config import spack.config
import spack.fetch_strategy
import spack.patch import spack.patch
import spack.repo import spack.repo
import spack.spec import spack.spec
@@ -78,9 +73,7 @@ def __init__(self, summary, details):
self.details = tuple(details) self.details = tuple(details)
def __str__(self): def __str__(self):
if self.details: return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
return f"{self.summary}\n" + "\n".join(f" {detail}" for detail in self.details)
return self.summary
def __eq__(self, other): def __eq__(self, other):
if self.summary != other.summary or self.details != other.details: if self.summary != other.summary or self.details != other.details:
@@ -217,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=() group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
) )
#: Sanity checks on packages.yaml
config_repos = AuditClass(
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
)
@config_packages @config_packages
def _search_duplicate_specs_in_externals(error_cls): def _search_duplicate_specs_in_externals(error_cls):
@@ -264,6 +252,40 @@ def _search_duplicate_specs_in_externals(error_cls):
return errors return errors
@config_packages
def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
def make_error(attribute_name, config_data, summary):
s = io.StringIO()
s.write("Occurring in the following file:\n")
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
syaml.dump_config(dict_view, stream=s, blame=True)
return error_cls(summary=summary, details=[s.getvalue()])
if "all" in packages_yaml and "version" in packages_yaml["all"]:
summary = "Using the deprecated 'version' attribute under 'packages:all'"
errors.append(make_error("version", packages_yaml["all"], summary))
for package_name in packages_yaml:
if package_name == "all":
continue
package_conf = packages_yaml[package_name]
for attribute in ("compiler", "providers", "target"):
if attribute not in package_conf:
continue
summary = (
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
)
errors.append(make_error(attribute, package_conf, summary))
return errors
@config_packages @config_packages
def _avoid_mismatched_variants(error_cls): def _avoid_mismatched_variants(error_cls):
"""Warns if variant preferences have mismatched types or names.""" """Warns if variant preferences have mismatched types or names."""
@@ -284,7 +306,7 @@ def _avoid_mismatched_variants(error_cls):
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for variant in current_spec.variants.values(): for variant in current_spec.variants.values():
# Variant does not exist at all # Variant does not exist at all
if variant.name not in pkg_cls.variant_names(): if variant.name not in pkg_cls.variants:
summary = ( summary = (
f"Setting a preference for the '{pkg_name}' package to the " f"Setting a preference for the '{pkg_name}' package to the "
f"non-existing variant '{variant.name}'" f"non-existing variant '{variant.name}'"
@@ -293,8 +315,9 @@ def _avoid_mismatched_variants(error_cls):
continue continue
# Variant cannot accept this value # Variant cannot accept this value
s = spack.spec.Spec(pkg_name)
try: try:
spack.variant.prevalidate_variant_value(pkg_cls, variant, strict=True) s.update_variant_validate(variant.name, variant.value)
except Exception: except Exception:
summary = ( summary = (
f"Setting the variant '{variant.name}' of the '{pkg_name}' package " f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
@@ -328,43 +351,6 @@ def _wrongly_named_spec(error_cls):
return errors return errors
@config_packages
def _ensure_all_virtual_packages_have_default_providers(error_cls):
"""All virtual packages must have a default provider explicitly set."""
configuration = spack.config.create()
defaults = configuration.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
virtuals = spack.repo.PATH.provider_index.providers
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
return [
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
for virtual in virtuals
if virtual not in default_providers
]
@config_repos
def _ensure_no_folders_without_package_py(error_cls):
"""Check that we don't leave any folder without a package.py in repos"""
errors = []
for repository in spack.repo.PATH.repos:
missing = []
for entry in os.scandir(repository.packages_path):
if not entry.is_dir():
continue
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
if not package_py.exists():
missing.append(entry.path)
if missing:
summary = (
f"The '{repository.namespace}' repository misses a package.py file"
f" in the following folders"
)
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
return errors
def _make_config_error(config_data, summary, error_cls): def _make_config_error(config_data, summary, error_cls):
s = io.StringIO() s = io.StringIO()
s.write("Occurring in the following file:\n") s.write("Occurring in the following file:\n")
@@ -388,14 +374,6 @@ def _make_config_error(config_data, summary, error_cls):
) )
package_deprecated_attributes = AuditClass(
group="packages",
tag="PKG-DEPRECATED-ATTRIBUTES",
description="Sanity checks to preclude use of deprecated package attributes",
kwargs=("pkgs",),
)
package_properties = AuditClass( package_properties = AuditClass(
group="packages", group="packages",
tag="PKG-PROPERTIES", tag="PKG-PROPERTIES",
@@ -414,23 +392,22 @@ def _make_config_error(config_data, summary, error_cls):
) )
@package_properties @package_directives
def _check_build_test_callbacks(pkgs, error_cls): def _check_build_test_callbacks(pkgs, error_cls):
"""Ensure stand-alone test methods are not included in build-time callbacks. """Ensure stand-alone test method is not included in build-time callbacks"""
Test methods are for checking the installed software as stand-alone tests.
They could also be called during the post-install phase of a build.
"""
errors = [] errors = []
for pkg_name in pkgs: for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None) test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
has_test_method = test_callbacks and any([m.startswith("test_") for m in test_callbacks]) # TODO (post-34236): "test*"->"test_*" once remove deprecated methods
# TODO (post-34236): "test"->"test_" once remove deprecated methods
has_test_method = test_callbacks and any([m.startswith("test") for m in test_callbacks])
if has_test_method: if has_test_method:
msg = f"Package {pkg_name} includes stand-alone test methods in build-time checks." msg = '{0} package contains "test*" method(s) in ' "build_time_test_callbacks"
callbacks = ", ".join(test_callbacks) instr = 'Remove all methods whose names start with "test" from: [{0}]'.format(
instr = f"Remove the following from 'build_time_test_callbacks': {callbacks}" ", ".join(test_callbacks)
)
errors.append(error_cls(msg.format(pkg_name), [instr])) errors.append(error_cls(msg.format(pkg_name), [instr]))
return errors return errors
@@ -505,7 +482,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
name_definitions = collections.defaultdict(list) name_definitions = collections.defaultdict(list)
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for cls_item in pkg_cls.__mro__: for cls_item in inspect.getmro(pkg_cls):
for name in RESERVED_NAMES: for name in RESERVED_NAMES:
current_value = cls_item.__dict__.get(name) current_value = cls_item.__dict__.get(name)
if current_value is None: if current_value is None:
@@ -528,53 +505,13 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
return errors return errors
@package_deprecated_attributes
def _search_for_deprecated_package_methods(pkgs, error_cls):
"""Ensure the package doesn't define or use deprecated methods"""
DEPRECATED_METHOD = (("test", "a name starting with 'test_'"),)
DEPRECATED_USE = (
("self.cache_extra_test_sources(", "cache_extra_test_sources(self, ..)"),
("self.install_test_root(", "install_test_root(self, ..)"),
("self.run_test(", "test_part(self, ..)"),
)
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
method_errors = collections.defaultdict(list)
for name, function in methods:
for deprecated_name, alternate in DEPRECATED_METHOD:
if name == deprecated_name:
msg = f"Rename '{deprecated_name}' method to {alternate} instead."
method_errors[name].append(msg)
source = inspect.getsource(function)
for deprecated_name, alternate in DEPRECATED_USE:
if deprecated_name in source:
msg = f"Change '{deprecated_name}' to '{alternate}' in '{name}' method."
method_errors[name].append(msg)
num_methods = len(method_errors)
if num_methods > 0:
methods = plural(num_methods, "method", show_n=False)
error_msg = (
f"Package '{pkg_name}' implements or uses unsupported deprecated {methods}."
)
instr = [f"Make changes to '{pkg_cls.__module__}':"]
for name in sorted(method_errors):
instr.extend([f" {msg}" for msg in method_errors[name]])
errors.append(error_cls(error_msg, instr))
return errors
@package_properties @package_properties
def _ensure_all_package_names_are_lowercase(pkgs, error_cls): def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
"""Ensure package names are lowercase and consistent""" """Ensure package names are lowercase and consistent"""
badname_regex, errors = re.compile(r"[_A-Z]"), [] badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs: for pkg_name in pkgs:
if badname_regex.search(pkg_name): if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'" error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, [])) errors.append(error_cls(error_msg, []))
return errors return errors
@@ -713,15 +650,9 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
errors = [] errors = []
for pkg_name in pkgs: for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
buildsystem_variant, _ = pkg_cls.variants["build_system"]
# values are either Value objects (for conditional values) or the values themselves buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
build_system_names = set( builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
v.value if isinstance(v, spack.variant.Value) else v
for _, variant in pkg_cls.variant_definitions("build_system")
for v in variant.values
)
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
module = pkg_cls.module module = pkg_cls.module
has_builders_in_package_py = any( has_builders_in_package_py = any(
getattr(module, name, False) for name in builder_cls_names getattr(module, name, False) for name in builder_cls_names
@@ -740,171 +671,6 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
return errors return errors
class DeprecatedMagicGlobals(ast.NodeVisitor):
def __init__(self, magic_globals: Iterable[str]):
super().__init__()
self.magic_globals: Set[str] = set(magic_globals)
# State to track whether we're in a class function
self.depth: int = 0
self.in_function: bool = False
self.path = (ast.Module, ast.ClassDef, ast.FunctionDef)
# Defined locals in the current function (heuristically at least)
self.locals: Set[str] = set()
# List of (name, lineno) tuples for references to magic globals
self.references_to_globals: List[Tuple[str, int]] = []
def descend_in_function_def(self, node: ast.AST) -> None:
if not isinstance(node, self.path[self.depth]):
return
self.depth += 1
if self.depth == len(self.path):
self.in_function = True
super().generic_visit(node)
if self.depth == len(self.path):
self.in_function = False
self.locals.clear()
self.depth -= 1
def generic_visit(self, node: ast.AST) -> None:
# Recurse into function definitions
if self.depth < len(self.path):
return self.descend_in_function_def(node)
elif not self.in_function:
return
elif isinstance(node, ast.Global):
for name in node.names:
if name in self.magic_globals:
self.references_to_globals.append((name, node.lineno))
elif isinstance(node, ast.Assign):
# visit the rhs before lhs
super().visit(node.value)
for target in node.targets:
super().visit(target)
elif isinstance(node, ast.Name) and node.id in self.magic_globals:
if isinstance(node.ctx, ast.Load) and node.id not in self.locals:
self.references_to_globals.append((node.id, node.lineno))
elif isinstance(node.ctx, ast.Store):
self.locals.add(node.id)
else:
super().generic_visit(node)
@package_properties
def _uses_deprecated_globals(pkgs, error_cls):
"""Ensure that packages do not use deprecated globals"""
errors = []
for pkg_name in pkgs:
# some packages scheduled to be removed in v0.23 are not worth fixing.
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
if all(v.get("deprecated", False) for v in pkg_cls.versions.values()):
continue
file = spack.repo.PATH.filename_for_package_name(pkg_name)
tree = ast.parse(open(file).read())
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
visitor.visit(tree)
if visitor.references_to_globals:
errors.append(
error_cls(
f"Package '{pkg_name}' uses deprecated globals",
[
f"{file}:{line} references '{name}'"
for name, line in visitor.references_to_globals
],
)
)
return errors
@package_properties
def _ensure_test_docstring(pkgs, error_cls):
"""Ensure stand-alone test methods have a docstring.
The docstring of a test method is implicitly used as the description of
the corresponding test part during test results reporting.
"""
doc_regex = r'\s+("""[^"]+""")'
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
method_names = []
for name, test_fn in methods:
if not name.startswith("test_"):
continue
# Ensure the test method has a docstring
source = inspect.getsource(test_fn)
match = re.search(doc_regex, source)
if match is None or len(match.group(0).replace('"', "").strip()) == 0:
method_names.append(name)
num_methods = len(method_names)
if num_methods > 0:
methods = plural(num_methods, "method", show_n=False)
docstrings = plural(num_methods, "docstring", show_n=False)
msg = f"Package {pkg_name} has test {methods} with empty or missing {docstrings}."
names = ", ".join(method_names)
instr = [
"Docstrings are used as descriptions in test outputs.",
f"Add a concise summary to the following {methods} in '{pkg_cls.__module__}':",
f"{names}",
]
errors.append(error_cls(msg, instr))
return errors
@package_properties
def _ensure_test_implemented(pkgs, error_cls):
"""Ensure stand-alone test methods are implemented.
The test method is also required to be non-empty.
"""
def skip(line):
ln = line.strip()
return ln.startswith("#") or "pass" in ln
doc_regex = r'\s+("""[^"]+""")'
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
methods = inspect.getmembers(pkg_cls, predicate=lambda x: inspect.isfunction(x))
method_names = []
for name, test_fn in methods:
if not name.startswith("test_"):
continue
source = inspect.getsource(test_fn)
# Attempt to ensure the test method is implemented.
impl = re.sub(doc_regex, r"", source).splitlines()[1:]
lines = [ln.strip() for ln in impl if not skip(ln)]
if not lines:
method_names.append(name)
num_methods = len(method_names)
if num_methods > 0:
methods = plural(num_methods, "method", show_n=False)
msg = f"Package {pkg_name} has empty or missing test {methods}."
names = ", ".join(method_names)
instr = [
f"Implement or remove the following {methods} from '{pkg_cls.__module__}': {names}"
]
errors.append(error_cls(msg, instr))
return errors
@package_https_directives @package_https_directives
def _linting_package_file(pkgs, error_cls): def _linting_package_file(pkgs, error_cls):
"""Check for correctness of links""" """Check for correctness of links"""
@@ -1071,22 +837,20 @@ def check_virtual_with_variants(spec, msg):
# check variants # check variants
dependency_variants = dep.spec.variants dependency_variants = dep.spec.variants
for name, variant in dependency_variants.items(): for name, value in dependency_variants.items():
try: try:
spack.variant.prevalidate_variant_value( v, _ = dependency_pkg_cls.variants[name]
dependency_pkg_cls, variant, dep.spec, strict=True v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
)
except Exception as e: except Exception as e:
summary = ( summary = (
f"{pkg_name}: wrong variant used for dependency in 'depends_on()'" f"{pkg_name}: wrong variant used for dependency in 'depends_on()'"
) )
error_msg = str(e)
if isinstance(e, KeyError): if isinstance(e, KeyError):
error_msg = ( error_msg = (
f"variant {str(e).strip()} does not exist in package {dep_name}" f"variant {str(e).strip()} does not exist in package {dep_name}"
f" in package '{dep_name}'"
) )
error_msg += f" in package '{dep_name}'"
errors.append( errors.append(
error_cls(summary=summary, details=[error_msg, f"in {filename}"]) error_cls(summary=summary, details=[error_msg, f"in {filename}"])
@@ -1098,38 +862,39 @@ def check_virtual_with_variants(spec, msg):
@package_directives @package_directives
def _ensure_variant_defaults_are_parsable(pkgs, error_cls): def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
"""Ensures that variant defaults are present and parsable from cli""" """Ensures that variant defaults are present and parsable from cli"""
def check_variant(pkg_cls, variant, vname):
# bool is a subclass of int in python. Permitting a default that is an instance
# of 'int' means both foo=false and foo=0 are accepted. Other falsish values are
# not allowed, since they can't be parsed from CLI ('foo=')
default_is_parsable = isinstance(variant.default, int) or variant.default
if not default_is_parsable:
msg = f"Variant '{vname}' of package '{pkg_cls.name}' has an unparsable default value"
return [error_cls(msg, [])]
try:
vspec = variant.make_default()
except spack.variant.MultipleValuesInExclusiveVariantError:
msg = f"Can't create default value for variant '{vname}' in package '{pkg_cls.name}'"
return [error_cls(msg, [])]
try:
variant.validate_or_raise(vspec, pkg_cls.name)
except spack.variant.InvalidVariantValueError:
msg = "Default value of variant '{vname}' in package '{pkg.name}' is invalid"
question = "Is it among the allowed values?"
return [error_cls(msg, [question])]
return []
errors = [] errors = []
for pkg_name in pkgs: for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for vname in pkg_cls.variant_names(): for variant_name, entry in pkg_cls.variants.items():
for _, variant_def in pkg_cls.variant_definitions(vname): variant, _ = entry
errors.extend(check_variant(pkg_cls, variant_def, vname)) default_is_parsable = (
# Permitting a default that is an instance on 'int' permits
# to have foo=false or foo=0. Other falsish values are
# not allowed, since they can't be parsed from cli ('foo=')
isinstance(variant.default, int)
or variant.default
)
if not default_is_parsable:
error_msg = "Variant '{}' of package '{}' has a bad default value"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
continue
try:
vspec = variant.make_default()
except spack.variant.MultipleValuesInExclusiveVariantError:
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
continue
try:
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
except spack.variant.InvalidVariantValueError:
error_msg = (
"The default value of the variant '{}' in package '{}' failed validation"
)
question = "Is it among the allowed values?"
errors.append(error_cls(error_msg.format(variant_name, pkg_name), [question]))
return errors return errors
@@ -1139,11 +904,11 @@ def _ensure_variants_have_descriptions(pkgs, error_cls):
errors = [] errors = []
for pkg_name in pkgs: for pkg_name in pkgs:
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name) pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for name in pkg_cls.variant_names(): for variant_name, entry in pkg_cls.variants.items():
for when, variant in pkg_cls.variant_definitions(name): variant, _ = entry
if not variant.description: if not variant.description:
msg = f"Variant '{name}' in package '{pkg_name}' is missing a description" error_msg = "Variant '{}' in package '{}' is missing a description"
errors.append(error_cls(msg, [])) errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
return errors return errors
@@ -1200,26 +965,29 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls): def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
variant_exceptions = (
spack.variant.InconsistentValidationError,
spack.variant.MultipleValuesInExclusiveVariantError,
spack.variant.InvalidVariantValueError,
KeyError,
)
errors = [] errors = []
variant_names = pkg.variant_names()
summary = f"{pkg.name}: wrong variant in '{directive}' directive"
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
for name, v in constraint.variants.items(): for name, v in constraint.variants.items():
if name not in variant_names:
msg = f"variant {name} does not exist in {pkg.name}"
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"]))
continue
try: try:
spack.variant.prevalidate_variant_value(pkg, v, constraint, strict=True) variant, _ = pkg.variants[name]
except ( variant.validate_or_raise(v, pkg_cls=pkg)
spack.variant.InconsistentValidationError, except variant_exceptions as e:
spack.variant.MultipleValuesInExclusiveVariantError, summary = pkg.name + ': wrong variant in "{0}" directive'
spack.variant.InvalidVariantValueError, summary = summary.format(directive)
) as e: filename = spack.repo.PATH.filename_for_package_name(pkg.name)
msg = str(e).strip()
errors.append(error_cls(summary=summary, details=[msg, f"in {filename}"])) error_msg = str(e).strip()
if isinstance(e, KeyError):
error_msg = "the variant {0} does not exist".format(error_msg)
err = error_cls(summary=summary, details=[error_msg, "in " + filename])
errors.append(err)
return errors return errors
@@ -1257,10 +1025,9 @@ def _extracts_errors(triggers, summary):
for dname in dnames for dname in dnames
) )
for when, variants_by_name in pkg_cls.variants.items(): for vname, (variant, triggers) in pkg_cls.variants.items():
for vname, variant in variants_by_name.items(): summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant" errors.extend(_extracts_errors(triggers, summary))
errors.extend(_extracts_errors([when], summary))
for when, providers, details in _error_items(pkg_cls.provided): for when, providers, details in _error_items(pkg_cls.provided):
errors.extend( errors.extend(

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,6 @@
all_core_root_specs, all_core_root_specs,
ensure_clingo_importable_or_raise, ensure_clingo_importable_or_raise,
ensure_core_dependencies, ensure_core_dependencies,
ensure_file_in_path_or_raise,
ensure_gpg_in_path_or_raise, ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise, ensure_patchelf_in_path_or_raise,
) )
@@ -20,7 +19,6 @@
"is_bootstrapping", "is_bootstrapping",
"ensure_bootstrap_configuration", "ensure_bootstrap_configuration",
"ensure_core_dependencies", "ensure_core_dependencies",
"ensure_file_in_path_or_raise",
"ensure_gpg_in_path_or_raise", "ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise", "ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise", "ensure_patchelf_in_path_or_raise",

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package""" """Common basic functions used through the spack.bootstrap package"""
import fnmatch import fnmatch
import importlib
import os.path import os.path
import re import re
import sys import sys
@@ -29,7 +28,7 @@
def _python_import(module: str) -> bool: def _python_import(module: str) -> bool:
try: try:
importlib.import_module(module) __import__(module)
except ImportError: except ImportError:
return False return False
return True return True

View File

@@ -1,154 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap concrete specs for clingo
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
we need to rely on another mechanism to get a concrete spec that fits the current host.
This module contains the logic to get a concrete spec for clingo, starting from a prototype
JSON file for a similar platform.
"""
import pathlib
import sys
from typing import Dict, Optional, Tuple
import archspec.cpu
import spack.compiler
import spack.compilers
import spack.platforms
import spack.spec
import spack.traverse
from .config import spec_for_current_python
class ClingoBootstrapConcretizer:
def __init__(self, configuration):
self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec()
if str(self.host_platform) == "linux":
self.host_libc = self.libc_external_spec()
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux":
compiler_name = "gcc"
elif str(self.host_platform) == "darwin":
compiler_name = "apple-clang"
elif str(self.host_platform) == "windows":
compiler_name = "msvc"
elif str(self.host_platform) == "freebsd":
compiler_name = "clang"
else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture
)
if not candidates:
raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
)
candidates.sort(key=lambda x: x.spec.version, reverse=True)
return candidates[0]
def _externals_from_yaml(
self, configuration: "spack.config.Configuration"
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
packages_yaml = configuration.get("packages")
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
for pkg_name in ["cmake", "bison"]:
if pkg_name not in packages_yaml:
continue
candidates = packages_yaml[pkg_name].get("externals", [])
for candidate in candidates:
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
if not s.satisfies(requirements[pkg_name]):
continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"):
continue
selected[pkg_name] = self._external_spec(s)
break
return selected["cmake"], selected["bison"]
def prototype_path(self) -> pathlib.Path:
"""Path to a prototype concrete specfile for clingo"""
parent_dir = pathlib.Path(__file__).parent
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
if str(self.host_platform) == "linux":
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
if not result.exists():
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
elif str(self.host_platform) == "freebsd":
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
elif not result.exists():
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
return result
def concretize(self) -> "spack.spec.Spec":
# Read the prototype and mark it NOT concrete
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
s._mark_concrete(False)
# Tweak it to conform to the host architecture
for node in s.traverse():
node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture
if node.name == "gcc-runtime":
node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python":
edge.spec = self.host_python
if edge.spec.name == "bison" and self.external_bison:
edge.spec = self.external_bison
if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake
if "libc" in edge.virtuals:
edge.spec = self.host_libc
s._finalize_concretization()
# Work around the fact that the installer calls Spec.dependents() and
# we modified edges inconsistently
return s.copy()
def python_external_spec(self) -> "spack.spec.Spec":
"""Python external spec corresponding to the current running interpreter"""
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec":
result = self.host_compiler.default_libc
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = []
return spack.spec.parse_with_version_concrete(initial_spec)

View File

@@ -14,7 +14,6 @@
import spack.compilers import spack.compilers
import spack.config import spack.config
import spack.environment import spack.environment
import spack.modules
import spack.paths import spack.paths
import spack.platforms import spack.platforms
import spack.repo import spack.repo
@@ -130,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path())) configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
for name, path in configuration_paths: for name, path in configuration_paths:
platform = spack.platforms.host().name platform = spack.platforms.host().name
platform_scope = spack.config.DirectoryConfigScope( platform_scope = spack.config.ConfigScope(
f"{name}/{platform}", os.path.join(path, platform) "/".join([name, platform]), os.path.join(path, platform)
) )
generic_scope = spack.config.DirectoryConfigScope(name, path) generic_scope = spack.config.ConfigScope(name, path)
config_scopes.extend([generic_scope, platform_scope]) config_scopes.extend([generic_scope, platform_scope])
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}" msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
tty.debug(msg.format(generic_scope.name, generic_scope.path)) tty.debug(msg.format(generic_scope.name, generic_scope.path))
@@ -144,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None: def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch() arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch): if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers() new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers)
@contextlib.contextmanager @contextlib.contextmanager
@@ -153,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
bootstrap_store_path = store_path() bootstrap_store_path = store_path()
user_configuration = _read_and_sanitize_configuration() user_configuration = _read_and_sanitize_configuration()
with spack.environment.no_active_environment(): with spack.environment.no_active_environment():
with spack.platforms.use_platform( with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
spack.platforms.real_host() spack.platforms.real_host()
), spack.repo.use_repositories(spack.paths.packages_path): ), spack.repo.use_repositories(spack.paths.packages_path):
# Default configuration scopes excluding command line # Default configuration scopes excluding command line

View File

@@ -37,19 +37,23 @@
import spack.binary_distribution import spack.binary_distribution
import spack.config import spack.config
import spack.detection import spack.detection
import spack.environment
import spack.modules
import spack.paths
import spack.platforms import spack.platforms
import spack.platforms.linux
import spack.repo
import spack.spec import spack.spec
import spack.store import spack.store
import spack.user_environment import spack.user_environment
import spack.util.environment
import spack.util.executable import spack.util.executable
import spack.util.path import spack.util.path
import spack.util.spack_yaml import spack.util.spack_yaml
import spack.util.url import spack.util.url
import spack.version import spack.version
from spack.installer import PackageInstaller
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
from .clingo import ClingoBootstrapConcretizer
from .config import spack_python_interpreter, spec_for_current_python from .config import spack_python_interpreter, spec_for_current_python
#: Name of the file containing metadata about the bootstrapping source #: Name of the file containing metadata about the bootstrapping source
@@ -264,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Try to build and install from sources # Try to build and install from sources
with spack_python_interpreter(): with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
if module == "clingo": if module == "clingo":
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG) # TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
concrete_spec = bootstrapper.concretize() concrete_spec._old_concretize( # pylint: disable=protected-access
else: deprecation_warning=False
concrete_spec = spack.spec.Spec(
abstract_spec_str + " ^" + spec_for_current_python()
) )
else:
concrete_spec.concretize() concrete_spec.concretize()
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources" msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
@@ -278,7 +284,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable # Install the spec that should make the module importable
with spack.config.override(self.mirror_scope): with spack.config.override(self.mirror_scope):
PackageInstaller([concrete_spec.package], fail_fast=True).install() concrete_spec.package.do_install(fail_fast=True)
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info): if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info self.last_search = info
@@ -297,11 +303,18 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
# might reduce compilation time by a fair amount # might reduce compilation time by a fair amount
_add_externals_if_missing() _add_externals_if_missing()
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized() concrete_spec = spack.spec.Spec(abstract_spec_str)
if concrete_spec.name == "patchelf":
concrete_spec._old_concretize( # pylint: disable=protected-access
deprecation_warning=False
)
else:
concrete_spec.concretize()
msg = "[BOOTSTRAP] Try installing '{0}' from sources" msg = "[BOOTSTRAP] Try installing '{0}' from sources"
tty.debug(msg.format(abstract_spec_str)) tty.debug(msg.format(abstract_spec_str))
with spack.config.override(self.mirror_scope): with spack.config.override(self.mirror_scope):
PackageInstaller([concrete_spec.package], fail_fast=True).install() concrete_spec.package.do_install()
if _executables_in_store(executables, concrete_spec, query_info=info): if _executables_in_store(executables, concrete_spec, query_info=info):
self.last_search = info self.last_search = info
return True return True
@@ -467,8 +480,7 @@ def ensure_clingo_importable_or_raise() -> None:
def gnupg_root_spec() -> str: def gnupg_root_spec() -> str:
"""Return the root spec used to bootstrap GnuPG""" """Return the root spec used to bootstrap GnuPG"""
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg" return _root_spec("gnupg@2.3:")
return _root_spec(f"{root_spec_name}@2.3:")
def ensure_gpg_in_path_or_raise() -> None: def ensure_gpg_in_path_or_raise() -> None:
@@ -478,19 +490,6 @@ def ensure_gpg_in_path_or_raise() -> None:
) )
def file_root_spec() -> str:
"""Return the root spec used to bootstrap file"""
root_spec_name = "win-file" if IS_WINDOWS else "file"
return _root_spec(root_spec_name)
def ensure_file_in_path_or_raise() -> None:
"""Ensure file is in the PATH or raise"""
return ensure_executables_in_path_or_raise(
executables=["file"], abstract_spec=file_root_spec()
)
def patchelf_root_spec() -> str: def patchelf_root_spec() -> str:
"""Return the root spec used to bootstrap patchelf""" """Return the root spec used to bootstrap patchelf"""
# 0.13.1 is the last version not to require C++17. # 0.13.1 is the last version not to require C++17.
@@ -574,15 +573,14 @@ def ensure_core_dependencies() -> None:
"""Ensure the presence of all the core dependencies.""" """Ensure the presence of all the core dependencies."""
if sys.platform.lower() == "linux": if sys.platform.lower() == "linux":
ensure_patchelf_in_path_or_raise() ensure_patchelf_in_path_or_raise()
elif sys.platform == "win32": if not IS_WINDOWS:
ensure_file_in_path_or_raise() ensure_gpg_in_path_or_raise()
ensure_gpg_in_path_or_raise()
ensure_clingo_importable_or_raise() ensure_clingo_importable_or_raise()
def all_core_root_specs() -> List[str]: def all_core_root_specs() -> List[str]:
"""Return a list of all the core root specs that may be used to bootstrap Spack""" """Return a list of all the core root specs that may be used to bootstrap Spack"""
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()] return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
def bootstrapping_sources(scope: Optional[str] = None): def bootstrapping_sources(scope: Optional[str] = None):

View File

@@ -14,9 +14,9 @@
from llnl.util import tty from llnl.util import tty
import spack.environment import spack.environment
import spack.spec
import spack.tengine import spack.tengine
import spack.util.path import spack.util.cpus
import spack.util.executable
from ._common import _root_spec from ._common import _root_spec
from .config import root_path, spec_for_current_python, store_path from .config import root_path, spec_for_current_python, store_path

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
def _buildcache_requirements() -> List[RequiredResponseType]: def _buildcache_requirements() -> List[RequiredResponseType]:
_buildcache_exes = { _buildcache_exes = {
"file": _missing("file", "required to analyze files for buildcaches", system_only=False), "file": _missing("file", "required to analyze files for buildcaches"),
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False), ("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
} }
if platform.system().lower() == "darwin": if platform.system().lower() == "darwin":
@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
# Ensure we trigger environment modifications if we have an environment # Ensure we trigger environment modifications if we have an environment
if BootstrapEnvironment.spack_yaml().exists(): if BootstrapEnvironment.spack_yaml().exists():
with BootstrapEnvironment() as env: with BootstrapEnvironment() as env:
env.load() env.update_syspath_and_environ()
return [ return [
_required_executable( _required_executable(

View File

@@ -37,16 +37,13 @@
import multiprocessing import multiprocessing
import os import os
import re import re
import stat
import sys import sys
import traceback import traceback
import types import types
from collections import defaultdict from collections import defaultdict
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
from typing import Callable, Dict, List, Optional, Set, Tuple from typing import Dict, List, Set, Tuple
import archspec.cpu
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.string import plural from llnl.string import plural
@@ -56,7 +53,6 @@
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd from llnl.util.tty.log import MultiProcessFd
import spack.build_systems._checks
import spack.build_systems.cmake import spack.build_systems.cmake
import spack.build_systems.meson import spack.build_systems.meson
import spack.build_systems.python import spack.build_systems.python
@@ -65,21 +61,26 @@
import spack.config import spack.config
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
import spack.multimethod import spack.main
import spack.package_base import spack.package_base
import spack.paths import spack.paths
import spack.platforms import spack.platforms
import spack.repo
import spack.schema.environment import spack.schema.environment
import spack.spec import spack.spec
import spack.stage import spack.stage
import spack.store import spack.store
import spack.subprocess_context import spack.subprocess_context
import spack.user_environment
import spack.util.executable import spack.util.executable
import spack.util.libc import spack.util.path
import spack.util.pattern
from spack import traverse from spack import traverse
from spack.context import Context from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log from spack.install_test import spack_install_test_log
from spack.installer import InstallError
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import ( from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY, SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications, EnvironmentModifications,
@@ -91,7 +92,7 @@
) )
from spack.util.executable import Executable from spack.util.executable import Executable
from spack.util.log_parse import make_log_context, parse_log_events from spack.util.log_parse import make_log_context, parse_log_events
from spack.util.module_cmd import load_module from spack.util.module_cmd import load_module, path_from_modules
# #
# This can be set by the user to globally disable parallel builds. # This can be set by the user to globally disable parallel builds.
@@ -362,7 +363,7 @@ def set_compiler_environment_variables(pkg, env):
_add_werror_handling(keep_werror, env) _add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add # Set the target parameters that the compiler will add
isa_arg = optimization_flags(compiler, spec.target) isa_arg = spec.architecture.target.optimization_flags(compiler)
env.set("SPACK_TARGET_ARGS", isa_arg) env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate. # Trap spack-tracked compiler flags as appropriate.
@@ -407,65 +408,6 @@ def set_compiler_environment_variables(pkg, env):
return env return env
def optimization_flags(compiler, target):
if spack.compilers.is_mixed_toolchain(compiler):
msg = (
"microarchitecture specific optimizations are not "
"supported yet on mixed compiler toolchains [check"
f" {compiler.name}@{compiler.version} for further details]"
)
tty.debug(msg)
return ""
# Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a
# custom spec.
compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
try:
compiler_version = compiler.real_version
except spack.util.executable.ProcessError as e:
# log this and just return compiler.version instead
tty.debug(str(e))
try:
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
result = ""
return result
class FilterDefaultDynamicLinkerSearchPaths:
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
def __init__(self, dynamic_linker: Optional[str]) -> None:
# Identify directories by (inode, device) tuple, which handles symlinks too.
self.default_path_identifiers: Set[Tuple[int, int]] = set()
if not dynamic_linker:
return
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
try:
s = os.stat(path)
if stat.S_ISDIR(s.st_mode):
self.default_path_identifiers.add((s.st_ino, s.st_dev))
except OSError:
continue
def is_dynamic_loader_default_path(self, p: str) -> bool:
try:
s = os.stat(p)
return (s.st_ino, s.st_dev) in self.default_path_identifiers
except OSError:
return False
def __call__(self, dirs: List[str]) -> List[str]:
if not self.default_path_identifiers:
return dirs
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
def set_wrapper_variables(pkg, env): def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix """Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH. `SPACK_`) and also add the compiler wrappers to PATH.
@@ -513,81 +455,76 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG, "TRUE") env.set(SPACK_DEBUG, "TRUE")
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec) env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}")) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.paths.spack_working_dir) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"): if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True)) env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies # Gather information about various types of dependencies
rpath_hashes = set(s.dag_hash() for s in get_rpath_deps(pkg)) link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
link_deps = pkg.spec.traverse(root=False, order="topo", deptype=dt.LINK) rpath_deps = get_rpath_deps(pkg)
external_link_deps, nonexternal_link_deps = stable_partition(link_deps, lambda d: d.external)
link_dirs = [] link_dirs = []
include_dirs = [] include_dirs = []
rpath_dirs = [] rpath_dirs = []
for dep in chain(external_link_deps, nonexternal_link_deps): def _prepend_all(list_to_modify, items_to_add):
# TODO: is_system_path is wrong, but even if we knew default -L, -I flags from the compiler # Update the original list (creating a new list would be faster but
# and default search dirs from the dynamic linker, it's not obvious how to avoid a possibly # may not be convenient)
# expensive search in `query.libs.directories` and `query.headers.directories`, which is for item in reversed(list(items_to_add)):
# what this branch is trying to avoid. list_to_modify.insert(0, item)
if is_system_path(dep.prefix):
continue
# TODO: as of Spack 0.22, multiple instances of the same package may occur among the link
# deps, so keying by name is wrong. In practice it is not problematic: we obtain the same
# gcc-runtime / glibc here, and repeatedly add the same dirs that are later deduped.
query = pkg.spec[dep.name]
dep_link_dirs = []
try:
# Locating libraries can be time consuming, so log start and finish.
tty.debug(f"Collecting libraries for {dep.name}")
dep_link_dirs.extend(query.libs.directories)
tty.debug(f"Libraries for {dep.name} have been collected.")
except NoLibrariesError:
tty.debug(f"No libraries found for {dep.name}")
for default_lib_dir in ("lib", "lib64"): def update_compiler_args_for_dep(dep):
default_lib_prefix = os.path.join(dep.prefix, default_lib_dir) if dep in link_deps and (not is_system_path(dep.prefix)):
if os.path.isdir(default_lib_prefix): query = pkg.spec[dep.name]
dep_link_dirs.append(default_lib_prefix) dep_link_dirs = list()
try:
# In some circumstances (particularly for externals) finding
# libraries packages can be time consuming, so indicate that
# we are performing this operation (and also report when it
# finishes).
tty.debug("Collecting libraries for {0}".format(dep.name))
dep_link_dirs.extend(query.libs.directories)
tty.debug("Libraries for {0} have been collected.".format(dep.name))
except NoLibrariesError:
tty.debug("No libraries found for {0}".format(dep.name))
link_dirs[:0] = dep_link_dirs for default_lib_dir in ["lib", "lib64"]:
if dep.dag_hash() in rpath_hashes: default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
rpath_dirs[:0] = dep_link_dirs if os.path.isdir(default_lib_prefix):
dep_link_dirs.append(default_lib_prefix)
try: _prepend_all(link_dirs, dep_link_dirs)
tty.debug(f"Collecting headers for {dep.name}") if dep in rpath_deps:
include_dirs[:0] = query.headers.directories _prepend_all(rpath_dirs, dep_link_dirs)
tty.debug(f"Headers for {dep.name} have been collected.")
except NoHeadersError:
tty.debug(f"No headers found for {dep.name}")
# The top-level package is heuristically rpath'ed. try:
for libdir in ("lib64", "lib"): _prepend_all(include_dirs, query.headers.directories)
except NoHeadersError:
tty.debug("No headers found for {0}".format(dep.name))
for dspec in pkg.spec.traverse(root=False, order="post"):
if dspec.external:
update_compiler_args_for_dep(dspec)
# Just above, we prepended entries for -L/-rpath for externals. We
# now do this for non-external packages so that Spack-built packages
# are searched first for libraries etc.
for dspec in pkg.spec.traverse(root=False, order="post"):
if not dspec.external:
update_compiler_args_for_dep(dspec)
# The top-level package is always RPATHed. It hasn't been installed yet
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
# not be created for the install).
for libdir in ["lib64", "lib"]:
lib_path = os.path.join(pkg.prefix, libdir) lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path) rpath_dirs.insert(0, lib_path)
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
pkg.compiler.default_dynamic_linker
)
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
link_dirs = list(dedupe(filter_system_paths(link_dirs))) link_dirs = list(dedupe(filter_system_paths(link_dirs)))
include_dirs = list(dedupe(filter_system_paths(include_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
# just this filter.
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
if implicit_rpaths:
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Spack managed directories include the stage, store and upstream stores. We extend this with # Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS). # their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
@@ -617,12 +554,14 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
""" """
module = ModuleChangePropagator(pkg) module = ModuleChangePropagator(pkg)
jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
if context == Context.BUILD: if context == Context.BUILD:
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg) module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg) module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
# TODO: make these build deps that can be installed if not found. # TODO: make these build deps that can be installed if not found.
module.make = MakeExecutable("make", jobs) module.make = MakeExecutable("make", jobs)
module.gmake = MakeExecutable("gmake", jobs) module.gmake = MakeExecutable("gmake", jobs)
@@ -790,6 +729,21 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths) return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
deps = get_rpath_deps(pkg)
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
if mod_rpath:
rpaths.append(mod_rpath)
return list(dedupe(filter_system_paths(rpaths)))
def load_external_modules(pkg): def load_external_modules(pkg):
"""Traverse a package's spec DAG and load any external modules. """Traverse a package's spec DAG and load any external modules.
@@ -831,6 +785,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
# Platform specific setup goes before package specific setup. This is for setting # Platform specific setup goes before package specific setup. This is for setting
# defaults like MACOSX_DEPLOYMENT_TARGET on macOS. # defaults like MACOSX_DEPLOYMENT_TARGET on macOS.
platform = spack.platforms.by_name(pkg.spec.architecture.platform) platform = spack.platforms.by_name(pkg.spec.architecture.platform)
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods) platform.setup_platform_environment(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies") tty.debug("setup_package: grabbing modifications from dependencies")
@@ -855,8 +810,15 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
for mod in pkg.compiler.modules: for mod in pkg.compiler.modules:
load_module(mod) load_module(mod)
if target and target.module_name:
load_module(target.module_name)
load_external_modules(pkg) load_external_modules(pkg)
implicit_rpaths = pkg.compiler.implicit_rpaths()
if implicit_rpaths:
env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Make sure nothing's strange about the Spack environment. # Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn) validate(env_mods, tty.warn)
env_mods.apply_modifications() env_mods.apply_modifications()
@@ -1046,12 +1008,6 @@ def set_all_package_py_globals(self):
# This includes runtime dependencies, also runtime deps of direct build deps. # This includes runtime dependencies, also runtime deps of direct build deps.
set_package_py_globals(pkg, context=Context.RUN) set_package_py_globals(pkg, context=Context.RUN)
# Looping over the set of packages a second time
# ensures all globals are loaded into the module space prior to
# any package setup. This guarantees package setup methods have
# access to expected module level definitions such as "spack_cc"
for dspec, flag in chain(self.external, self.nonexternal):
pkg = dspec.package
for spec in dspec.dependents(): for spec in dspec.dependents():
# Note: some specs have dependents that are unreachable from the root, so avoid # Note: some specs have dependents that are unreachable from the root, so avoid
# setting globals for those. # setting globals for those.
@@ -1061,15 +1017,6 @@ def set_all_package_py_globals(self):
pkg.setup_dependent_package(dependent_module, spec) pkg.setup_dependent_package(dependent_module, spec)
dependent_module.propagate_changes_to_mro() dependent_module.propagate_changes_to_mro()
pkg = self.specs[0].package
if self.context == Context.BUILD:
module = ModuleChangePropagator(pkg)
# std_cmake_args is not sufficiently static to be defined
# in set_package_py_globals and is deprecated so its handled
# here as a special case
module.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
module.propagate_changes_to_mro()
def get_env_modifications(self) -> EnvironmentModifications: def get_env_modifications(self) -> EnvironmentModifications:
"""Returns the environment variable modifications for the given input specs and context. """Returns the environment variable modifications for the given input specs and context.
Environment modifications include: Environment modifications include:
@@ -1139,52 +1086,35 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
env.prepend_path("PATH", bin_dir) env.prepend_path("PATH", bin_dir)
def get_cmake_prefix_path(pkg):
# Note that unlike modifications_from_dependencies, this does not include
# any edits to CMAKE_PREFIX_PATH defined in custom
# setup_dependent_build_environment implementations of dependency packages
build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
build_link_deps = build_deps | link_deps
spack_built = []
externals = []
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
# prepending all externals and then all non-externals
for dspec in pkg.spec.traverse(root=False, order="post"):
if dspec in build_link_deps:
if dspec.external:
externals.insert(0, dspec)
else:
spack_built.insert(0, dspec)
ordered_build_link_deps = spack_built + externals
cmake_prefix_path_entries = []
for spec in ordered_build_link_deps:
cmake_prefix_path_entries.extend(spec.package.cmake_prefix_paths)
return filter_system_paths(cmake_prefix_path_entries)
def _setup_pkg_and_run( def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext", serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
function: Callable,
kwargs: Dict,
write_pipe: multiprocessing.connection.Connection,
input_multiprocess_fd: Optional[MultiProcessFd],
jsfd1: Optional[MultiProcessFd],
jsfd2: Optional[MultiProcessFd],
): ):
"""Main entry point in the child process for Spack builds.
``_setup_pkg_and_run`` is called by the child process created in
``start_build_process()``, and its main job is to run ``function()`` on behalf of
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
The child process is passed a ``write_pipe``, on which it's expected to send one of
the following:
* ``StopPhase``: error raised by a build process indicating it's stopping at a
particular build phase.
* ``BaseException``: any exception raised by a child build process, which will be
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
raised in the parent.
* The return value of ``function()``, which can be anything (except an exception).
This is returned to the caller.
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
does not close these file descriptors. Some ``multiprocessing`` backends will close
them automatically in the child if they are not passed at process creation time.
Arguments:
serialized_pkg: Spack package install context object (serialized form of the
package that we'll build in the child process).
function: function to call in the child process; serialized_pkg is passed to
this as the first argument.
kwargs: additional keyword arguments to pass to ``function()``.
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
child *must* send a result (or an error) back to parent on.
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
jsfd1: gmake Jobserver file descriptor 1.
jsfd2: gmake Jobserver file descriptor 2.
"""
context: str = kwargs.get("context", "build") context: str = kwargs.get("context", "build")
try: try:
@@ -1206,18 +1136,17 @@ def _setup_pkg_and_run(
return_value = function(pkg, kwargs) return_value = function(pkg, kwargs)
write_pipe.send(return_value) write_pipe.send(return_value)
except spack.error.StopPhase as e: except StopPhase as e:
# Do not create a full ChildError from this, it's not an error # Do not create a full ChildError from this, it's not an error
# it's a control statement. # it's a control statement.
write_pipe.send(e) write_pipe.send(e)
except BaseException as e: except BaseException:
# catch ANYTHING that goes wrong in the child process # catch ANYTHING that goes wrong in the child process
exc_type, exc, tb = sys.exc_info()
# Need to unwind the traceback in the child because traceback # Need to unwind the traceback in the child because traceback
# objects can't be sent to the parent. # objects can't be sent to the parent.
exc_type = type(e) tb_string = traceback.format_exc()
tb = e.__traceback__
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
# build up some context from the offending package so we can # build up some context from the offending package so we can
# show that, too. # show that, too.
@@ -1234,8 +1163,8 @@ def _setup_pkg_and_run(
elif context == "test": elif context == "test":
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec)) logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
error_msg = str(e) error_msg = str(exc)
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)): if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
process = "test the installation" if context == "test" else "build from sources" process = "test the installation" if context == "test" else "build from sources"
error_msg = ( error_msg = (
"The '{}' package cannot find an attribute while trying to {}. " "The '{}' package cannot find an attribute while trying to {}. "
@@ -1245,7 +1174,7 @@ def _setup_pkg_and_run(
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure" "More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
).format(pkg.name, process, context) ).format(pkg.name, process, context)
error_msg = colorize("@*R{{{}}}".format(error_msg)) error_msg = colorize("@*R{{{}}}".format(error_msg))
error_msg = "{}\n\n{}".format(str(e), error_msg) error_msg = "{}\n\n{}".format(str(exc), error_msg)
# make a pickleable exception to send to parent. # make a pickleable exception to send to parent.
msg = "%s: %s" % (exc_type.__name__, error_msg) msg = "%s: %s" % (exc_type.__name__, error_msg)
@@ -1368,7 +1297,7 @@ def exitcode_msg(p):
p.join() p.join()
# If returns a StopPhase, raise it # If returns a StopPhase, raise it
if isinstance(child_result, spack.error.StopPhase): if isinstance(child_result, StopPhase):
# do not print # do not print
raise child_result raise child_result
@@ -1544,7 +1473,7 @@ def long_message(self):
out.write(" {0}\n".format(self.log_name)) out.write(" {0}\n".format(self.log_name))
# Also output the test log path IF it exists # Also output the test log path IF it exists
if self.context != "test" and have_log: if self.context != "test":
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log) test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
if os.path.isfile(test_log): if os.path.isfile(test_log):
out.write("\nSee test log for details:\n") out.write("\nSee test log for details:\n")
@@ -1577,6 +1506,17 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
return ChildError(msg, module, name, traceback, log, log_type, context) return ChildError(msg, module, name, traceback, log, log_type, context)
class StopPhase(spack.error.SpackError):
"""Pickle-able exception to control stopped builds."""
def __reduce__(self):
return _make_stop_phase, (self.message, self.long_message)
def _make_stop_phase(msg, long_msg):
return StopPhase(msg, long_msg)
def write_log_summary(out, log_type, log, last=None): def write_log_summary(out, log_type, log, last=None):
errors, warnings = parse_log_events(log) errors, warnings = parse_log_events(log)
nerr = len(errors) nerr = len(errors)
@@ -1610,21 +1550,21 @@ class ModuleChangePropagator:
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes") _PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
def __init__(self, package: spack.package_base.PackageBase) -> None: def __init__(self, package):
self._set_self_attributes("package", package) self._set_self_attributes("package", package)
self._set_self_attributes("current_module", package.module) self._set_self_attributes("current_module", package.module)
#: Modules for the classes in the MRO up to PackageBase #: Modules for the classes in the MRO up to PackageBase
modules_in_mro = [] modules_in_mro = []
for cls in package.__class__.__mro__: for cls in inspect.getmro(type(package)):
module = getattr(cls, "module", None) module = cls.module
if module is None or module is spack.package_base: if module == self.current_module:
break
if module is self.current_module:
continue continue
if module == spack.package_base:
break
modules_in_mro.append(module) modules_in_mro.append(module)
self._set_self_attributes("modules_in_mro", modules_in_mro) self._set_self_attributes("modules_in_mro", modules_in_mro)
self._set_self_attributes("_set_attributes", {}) self._set_self_attributes("_set_attributes", {})

View File

@@ -8,7 +8,7 @@
import llnl.util.lang import llnl.util.lang
import spack.builder import spack.builder
import spack.error import spack.installer
import spack.relocate import spack.relocate
import spack.spec import spack.spec
import spack.store import spack.store
@@ -34,7 +34,7 @@ def check_paths(path_list, filetype, predicate):
if not predicate(abs_path): if not predicate(abs_path):
msg = "Install failed for {0}. No such {1} in prefix: {2}" msg = "Install failed for {0}. No such {1} in prefix: {2}"
msg = msg.format(pkg.name, filetype, path) msg = msg.format(pkg.name, filetype, path)
raise spack.error.InstallError(msg) raise spack.installer.InstallError(msg)
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile) check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir) check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
@@ -42,7 +42,7 @@ def check_paths(path_list, filetype, predicate):
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes) ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
if all(map(ignore_file, os.listdir(pkg.prefix))): if all(map(ignore_file, os.listdir(pkg.prefix))):
msg = "Install failed for {0}. Nothing was installed!" msg = "Install failed for {0}. Nothing was installed!"
raise spack.error.InstallError(msg.format(pkg.name)) raise spack.installer.InstallError(msg.format(pkg.name))
def apply_macos_rpath_fixups(builder: spack.builder.Builder): def apply_macos_rpath_fixups(builder: spack.builder.Builder):

View File

@@ -2,11 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.directives import spack.directives
import spack.package_base
import spack.util.executable import spack.util.executable
from .autotools import AutotoolsBuilder, AutotoolsPackage from .autotools import AutotoolsBuilder, AutotoolsPackage
@@ -47,12 +46,18 @@ class AspellDictPackage(AutotoolsPackage):
#: Override the default autotools builder #: Override the default autotools builder
AutotoolsBuilder = AspellBuilder AutotoolsBuilder = AspellBuilder
def patch(self): def view_destination(self, view):
aspell_spec = self.spec["aspell"] aspell_spec = self.spec["aspell"]
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
raise spack.package_base.ExtensionError(
"aspell does not support non-global extensions"
)
aspell = aspell_spec.command aspell = aspell_spec.command
dictdir = aspell("dump", "config", "dict-dir", output=str).strip() return aspell("dump", "config", "dict-dir", output=str).strip()
datadir = aspell("dump", "config", "data-dir", output=str).strip()
dictdir = os.path.relpath(dictdir, aspell_spec.prefix) def view_source(self):
datadir = os.path.relpath(datadir, aspell_spec.prefix) return self.prefix.lib
fs.filter_file(r"^dictdir=.*$", f"dictdir=/{dictdir}", "configure")
fs.filter_file(r"^datadir=.*$", f"datadir=/{datadir}", "configure") def patch(self):
fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os import os
import os.path import os.path
import stat import stat
@@ -13,7 +14,6 @@
import spack.build_environment import spack.build_environment
import spack.builder import spack.builder
import spack.error
import spack.package_base import spack.package_base
from spack.directives import build_system, conflicts, depends_on from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when from spack.multimethod import when
@@ -249,7 +249,7 @@ def runs_ok(script_abs_path):
# An external gnuconfig may not not have a prefix. # An external gnuconfig may not not have a prefix.
if gnuconfig_dir is None: if gnuconfig_dir is None:
raise spack.error.InstallError( raise spack.build_environment.InstallError(
"Spack could not find substitutes for GNU config files because no " "Spack could not find substitutes for GNU config files because no "
"prefix is available for the `gnuconfig` package. Make sure you set a " "prefix is available for the `gnuconfig` package. Make sure you set a "
"prefix path instead of modules for external `gnuconfig`." "prefix path instead of modules for external `gnuconfig`."
@@ -269,7 +269,7 @@ def runs_ok(script_abs_path):
msg += ( msg += (
" or the `gnuconfig` package prefix is misconfigured as" " an external package" " or the `gnuconfig` package prefix is misconfigured as" " an external package"
) )
raise spack.error.InstallError(msg) raise spack.build_environment.InstallError(msg)
# Filter working substitutes # Filter working substitutes
candidates = [f for f in candidates if runs_ok(f)] candidates = [f for f in candidates if runs_ok(f)]
@@ -294,7 +294,9 @@ def runs_ok(script_abs_path):
and set the prefix to the directory containing the `config.guess` and and set the prefix to the directory containing the `config.guess` and
`config.sub` files. `config.sub` files.
""" """
raise spack.error.InstallError(msg.format(", ".join(to_be_found), self.name)) raise spack.build_environment.InstallError(
msg.format(", ".join(to_be_found), self.name)
)
# Copy the good files over the bad ones # Copy the good files over the bad ones
for abs_path in to_be_patched: for abs_path in to_be_patched:
@@ -547,12 +549,13 @@ def autoreconf(self, pkg, spec, prefix):
tty.warn("* a custom AUTORECONF phase in the package *") tty.warn("* a custom AUTORECONF phase in the package *")
tty.warn("*********************************************************") tty.warn("*********************************************************")
with fs.working_dir(self.configure_directory): with fs.working_dir(self.configure_directory):
m = inspect.getmodule(self.pkg)
# This line is what is needed most of the time # This line is what is needed most of the time
# --install, --verbose, --force # --install, --verbose, --force
autoreconf_args = ["-ivf"] autoreconf_args = ["-ivf"]
autoreconf_args += self.autoreconf_search_path_args autoreconf_args += self.autoreconf_search_path_args
autoreconf_args += self.autoreconf_extra_args autoreconf_args += self.autoreconf_extra_args
self.pkg.module.autoreconf(*autoreconf_args) m.autoreconf(*autoreconf_args)
@property @property
def autoreconf_search_path_args(self): def autoreconf_search_path_args(self):
@@ -576,9 +579,7 @@ def set_configure_or_die(self):
raise RuntimeError(msg.format(self.configure_directory)) raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module # Monkey-patch the configure script in the corresponding module
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg) inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
globals_for_pkg.configure = Executable(self.configure_abs_path)
globals_for_pkg.propagate_changes_to_mro()
def configure_args(self): def configure_args(self):
"""Return the list of all the arguments that must be passed to configure, """Return the list of all the arguments that must be passed to configure,
@@ -595,7 +596,7 @@ def configure(self, pkg, spec, prefix):
options += self.configure_args() options += self.configure_args()
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.configure(*options) inspect.getmodule(self.pkg).configure(*options)
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
@@ -603,12 +604,12 @@ def build(self, pkg, spec, prefix):
params = ["V=1"] params = ["V=1"]
params += self.build_targets params += self.build_targets
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*params) inspect.getmodule(self.pkg).make(*params)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets) inspect.getmodule(self.pkg).make(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)
@@ -687,8 +688,9 @@ def _activate_or_not(
variant = variant or name variant = variant or name
# Defensively look that the name passed as argument is among variants # Defensively look that the name passed as argument is among
if not self.pkg.has_variant(variant): # variants
if variant not in self.pkg.variants:
msg = '"{0}" is not a variant of "{1}"' msg = '"{0}" is not a variant of "{1}"'
raise KeyError(msg.format(variant, self.pkg.name)) raise KeyError(msg.format(variant, self.pkg.name))
@@ -697,19 +699,27 @@ def _activate_or_not(
# Create a list of pairs. Each pair includes a configuration # Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated # option and whether or not that option is activated
vdef = self.pkg.get_variant(variant) variant_desc, _ = self.pkg.variants[variant]
if set(vdef.values) == set((True, False)): if set(variant_desc.values) == set((True, False)):
# BoolValuedVariant carry information about a single option. # BoolValuedVariant carry information about a single option.
# Nonetheless, for uniformity of treatment we'll package them # Nonetheless, for uniformity of treatment we'll package them
# in an iterable of one element. # in an iterable of one element.
options = [(name, f"+{variant}" in spec)] condition = "+{name}".format(name=variant)
options = [(name, condition in spec)]
else: else:
condition = "{variant}={value}"
# "feature_values" is used to track values which correspond to # "feature_values" is used to track values which correspond to
# features which can be enabled or disabled as understood by the # features which can be enabled or disabled as understood by the
# package's build system. It excludes values which have special # package's build system. It excludes values which have special
# meanings and do not correspond to features (e.g. "none") # meanings and do not correspond to features (e.g. "none")
feature_values = getattr(vdef.values, "feature_values", None) or vdef.values feature_values = (
options = [(value, f"{variant}={value}" in spec) for value in feature_values] getattr(variant_desc.values, "feature_values", None) or variant_desc.values
)
options = [
(value, condition.format(variant=variant, value=value) in spec)
for value in feature_values
]
# For each allowed value in the list of values # For each allowed value in the list of values
for option_value, activated in options: for option_value, activated in options:

View File

@@ -10,6 +10,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
import spack.build_environment
import spack.builder import spack.builder
from .cmake import CMakeBuilder, CMakePackage from .cmake import CMakeBuilder, CMakePackage
@@ -88,7 +89,7 @@ def define_cmake_cache_from_variant(self, cmake_var, variant=None, comment=""):
if variant is None: if variant is None:
variant = cmake_var.lower() variant = cmake_var.lower()
if not self.pkg.has_variant(variant): if variant not in self.pkg.variants:
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name)) raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.pkg.spec.variants: if variant not in self.pkg.spec.variants:
@@ -296,6 +297,18 @@ def initconfig_hardware_entries(self):
def std_initconfig_entries(self): def std_initconfig_entries(self):
cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"] cmake_prefix_path_env = os.environ["CMAKE_PREFIX_PATH"]
cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";") cmake_prefix_path = cmake_prefix_path_env.replace(os.pathsep, ";")
cmake_rpaths_env = spack.build_environment.get_rpaths(self.pkg)
cmake_rpaths_path = ";".join(cmake_rpaths_env)
complete_rpath_list = cmake_rpaths_path
if "SPACK_COMPILER_EXTRA_RPATHS" in os.environ:
spack_extra_rpaths_env = os.environ["SPACK_COMPILER_EXTRA_RPATHS"]
spack_extra_rpaths_path = spack_extra_rpaths_env.replace(os.pathsep, ";")
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_extra_rpaths_path)
if "SPACK_COMPILER_IMPLICIT_RPATHS" in os.environ:
spack_implicit_rpaths_env = os.environ["SPACK_COMPILER_IMPLICIT_RPATHS"]
spack_implicit_rpaths_path = spack_implicit_rpaths_env.replace(os.pathsep, ";")
complete_rpath_list = "{0};{1}".format(complete_rpath_list, spack_implicit_rpaths_path)
return [ return [
"#------------------{0}".format("-" * 60), "#------------------{0}".format("-" * 60),
@@ -305,6 +318,8 @@ def std_initconfig_entries(self):
"#------------------{0}\n".format("-" * 60), "#------------------{0}\n".format("-" * 60),
cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path), cmake_cache_string("CMAKE_PREFIX_PATH", cmake_prefix_path),
cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"), cmake_cache_string("CMAKE_INSTALL_RPATH_USE_LINK_PATH", "ON"),
cmake_cache_string("CMAKE_BUILD_RPATH", complete_rpath_list),
cmake_cache_string("CMAKE_INSTALL_RPATH", complete_rpath_list),
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"), self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
] ]

View File

@@ -3,6 +3,8 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.builder import spack.builder
@@ -70,7 +72,9 @@ def check_args(self):
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Runs ``cargo install`` in the source directory""" """Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args) inspect.getmodule(pkg).cargo(
"install", "--root", "out", "--path", ".", *self.build_args
)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Copy build files into package prefix.""" """Copy build files into package prefix."""
@@ -82,4 +86,4 @@ def install(self, pkg, spec, prefix):
def check(self): def check(self):
"""Run "cargo test".""" """Run "cargo test"."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg.module.cargo("test", *self.check_args) inspect.getmodule(self.pkg).cargo("test", *self.check_args)

View File

@@ -3,24 +3,22 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc import collections.abc
import inspect
import os import os
import pathlib import pathlib
import platform import platform
import re import re
import sys import sys
from itertools import chain from typing import List, Optional, Tuple
from typing import List, Optional, Set, Tuple
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
from llnl.util.lang import stable_partition
import spack.build_environment
import spack.builder import spack.builder
import spack.deptypes as dt import spack.deptypes as dt
import spack.error
import spack.package_base import spack.package_base
from spack.directives import build_system, conflicts, depends_on, variant from spack.directives import build_system, conflicts, depends_on, variant
from spack.multimethod import when from spack.multimethod import when
from spack.util.environment import filter_system_paths
from ._checks import BaseBuilder, execute_build_time_tests from ._checks import BaseBuilder, execute_build_time_tests
@@ -110,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
if _supports_compilation_databases(pkg): if _supports_compilation_databases(pkg):
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True)) args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
def generator(*names: str, default: Optional[str] = None): def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use. """The build system generator to use.
@@ -148,30 +141,11 @@ def _values(x):
default=default, default=default,
values=_values, values=_values,
description="the build system generator to use", description="the build system generator to use",
when="build_system=cmake",
) )
for x in not_used: for x in not_used:
conflicts(f"generator={x}") conflicts(f"generator={x}")
def get_cmake_prefix_path(pkg: spack.package_base.PackageBase) -> List[str]:
"""Obtain the CMAKE_PREFIX_PATH entries for a package, based on the cmake_prefix_path package
attribute of direct build/test and transitive link dependencies."""
# Add direct build/test deps
selected: Set[str] = {s.dag_hash() for s in pkg.spec.dependencies(deptype=dt.BUILD | dt.TEST)}
# Add transitive link deps
selected.update(s.dag_hash() for s in pkg.spec.traverse(root=False, deptype=dt.LINK))
# Separate out externals so they do not shadow Spack prefixes
externals, spack_built = stable_partition(
(s for s in pkg.spec.traverse(root=False, order="topo") if s.dag_hash() in selected),
lambda x: x.external,
)
return filter_system_paths(
path for spec in chain(spack_built, externals) for path in spec.package.cmake_prefix_paths
)
class CMakePackage(spack.package_base.PackageBase): class CMakePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using CMake """Specialized class for packages built using CMake
@@ -366,7 +340,7 @@ def std_args(pkg, generator=None):
msg = "Invalid CMake generator: '{0}'\n".format(generator) msg = "Invalid CMake generator: '{0}'\n".format(generator)
msg += "CMakePackage currently supports the following " msg += "CMakePackage currently supports the following "
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators)) msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
raise spack.error.InstallError(msg) raise spack.package_base.InstallError(msg)
try: try:
build_type = pkg.spec.variants["build_type"].value build_type = pkg.spec.variants["build_type"].value
@@ -378,16 +352,6 @@ def std_args(pkg, generator=None):
"-G", "-G",
generator, generator,
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()), define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
# only include the install prefix lib dirs; rpaths for deps are added by USE_LINK_PATH
define(
"CMAKE_INSTALL_RPATH",
[
pathlib.Path(pkg.prefix, "lib").as_posix(),
pathlib.Path(pkg.prefix, "lib64").as_posix(),
],
),
define("CMAKE_PREFIX_PATH", get_cmake_prefix_path(pkg)),
define("CMAKE_BUILD_TYPE", build_type), define("CMAKE_BUILD_TYPE", build_type),
] ]
@@ -402,6 +366,15 @@ def std_args(pkg, generator=None):
_conditional_cmake_defaults(pkg, args) _conditional_cmake_defaults(pkg, args)
_maybe_set_python_hints(pkg, args) _maybe_set_python_hints(pkg, args)
# Set up CMake rpath
args.extend(
[
define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
]
)
return args return args
@staticmethod @staticmethod
@@ -527,7 +500,7 @@ def define_from_variant(self, cmake_var, variant=None):
if variant is None: if variant is None:
variant = cmake_var.lower() variant = cmake_var.lower()
if not self.pkg.has_variant(variant): if variant not in self.pkg.variants:
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name)) raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
if variant not in self.pkg.spec.variants: if variant not in self.pkg.spec.variants:
@@ -562,35 +535,28 @@ def cmake_args(self):
def cmake(self, pkg, spec, prefix): def cmake(self, pkg, spec, prefix):
"""Runs ``cmake`` in the build directory""" """Runs ``cmake`` in the build directory"""
# skip cmake phase if it is an incremental develop build
if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, "CMakeCache.txt")
):
return
options = self.std_cmake_args options = self.std_cmake_args
options += self.cmake_args() options += self.cmake_args()
options.append(os.path.abspath(self.root_cmakelists_dir)) options.append(os.path.abspath(self.root_cmakelists_dir))
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.cmake(*options) inspect.getmodule(self.pkg).cmake(*options)
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
pkg.module.make(*self.build_targets) inspect.getmodule(self.pkg).make(*self.build_targets)
elif self.generator == "Ninja": elif self.generator == "Ninja":
self.build_targets.append("-v") self.build_targets.append("-v")
pkg.module.ninja(*self.build_targets) inspect.getmodule(self.pkg).ninja(*self.build_targets)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
if self.generator == "Unix Makefiles": if self.generator == "Unix Makefiles":
pkg.module.make(*self.install_targets) inspect.getmodule(self.pkg).make(*self.install_targets)
elif self.generator == "Ninja": elif self.generator == "Ninja":
pkg.module.ninja(*self.install_targets) inspect.getmodule(self.pkg).ninja(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -14,7 +14,6 @@
import spack.compiler import spack.compiler
import spack.package_base import spack.package_base
import spack.util.executable
# Local "type" for type hints # Local "type" for type hints
Path = Union[str, pathlib.Path] Path = Union[str, pathlib.Path]

View File

@@ -3,9 +3,6 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from typing import Iterable, List
import spack.variant import spack.variant
from spack.directives import conflicts, depends_on, variant from spack.directives import conflicts, depends_on, variant
from spack.multimethod import when from spack.multimethod import when
@@ -47,7 +44,6 @@ class CudaPackage(PackageBase):
"87", "87",
"89", "89",
"90", "90",
"90a",
) )
# FIXME: keep cuda and cuda_arch separate to make usage easier until # FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -74,27 +70,6 @@ def cuda_flags(arch_list):
for s in arch_list for s in arch_list
] ]
@staticmethod
def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
"""Adds a decimal place to each CUDA arch.
>>> compute_capabilities(['90', '90a'])
['9.0', '9.0a']
Args:
arch_list: A list of integer strings, optionally followed by a suffix.
Returns:
A list of float strings, optionally followed by a suffix
"""
pattern = re.compile(r"(\d+)")
capabilities = []
for arch in arch_list:
_, number, letter = re.split(pattern, arch)
number = "{0:.1f}".format(float(number) / 10.0)
capabilities.append(number + letter)
return capabilities
depends_on("cuda", when="+cuda") depends_on("cuda", when="+cuda")
# CUDA version vs Architecture # CUDA version vs Architecture
@@ -110,8 +85,8 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
depends_on("cuda@5.0:10.2", when="cuda_arch=30") depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32") depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35") depends_on("cuda@5.0:", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37") depends_on("cuda@6.5:", when="cuda_arch=37")
depends_on("cuda@6.0:", when="cuda_arch=50") depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52") depends_on("cuda@6.5:", when="cuda_arch=52")
@@ -131,7 +106,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
depends_on("cuda@11.8:", when="cuda_arch=89") depends_on("cuda@11.8:", when="cuda_arch=89")
depends_on("cuda@12.0:", when="cuda_arch=90") depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a")
# From the NVIDIA install guide we know of conflicts for particular # From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers # platforms (linux, darwin), architectures (x86, powerpc) and compilers
@@ -150,7 +124,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# minimum supported versions # minimum supported versions
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:") conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:") conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
# maximum supported version # maximum supported version
# NOTE: # NOTE:
@@ -163,7 +136,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5") conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8") conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3") conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6") conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0") conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5") conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7") conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -171,7 +144,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@16:", when="+cuda ^cuda@:12.1") conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3") conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5") conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114 # https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0") conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
@@ -239,21 +211,12 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0") conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1") conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0") conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
# ARM
# https://github.com/spack/spack/pull/39666#issuecomment-2377609263
# Might need to be expanded to other gcc versions
conflicts("%gcc@13.2.0", when="+cuda ^cuda@:12.4 target=aarch64:")
# XL is mostly relevant for ppc64le Linux # XL is mostly relevant for ppc64le Linux
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1") conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2") conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0") conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
# PowerPC.
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
# Darwin. # Darwin.
# TODO: add missing conflicts for %apple-clang cuda@:10 # TODO: add missing conflicts for %apple-clang cuda@:10
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:") conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")

View File

@@ -3,6 +3,8 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import spack.builder import spack.builder
@@ -44,27 +46,16 @@ class GoBuilder(BaseBuilder):
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| **Method** | **Purpose** | | **Method** | **Purpose** |
+===============================================+====================+ +===============================================+====================+
| :py:attr:`~.GoBuilder.build_args` | Specify arguments | | :py:meth:`~.GoBuilder.build_args` | Specify arguments |
| | to ``go build`` | | | to ``go build`` |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
| :py:attr:`~.GoBuilder.check_args` | Specify arguments | | :py:meth:`~.GoBuilder.check_args` | Specify arguments |
| | to ``go test`` | | | to ``go test`` |
+-----------------------------------------------+--------------------+ +-----------------------------------------------+--------------------+
""" """
phases = ("build", "install") phases = ("build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("check", "installcheck")
#: Names associated with package attributes in the old build-system format
legacy_attributes = (
"build_args",
"check_args",
"build_directory",
"install_time_test_callbacks",
)
#: Callback names for install-time test #: Callback names for install-time test
install_time_test_callbacks = ["check"] install_time_test_callbacks = ["check"]
@@ -81,7 +72,7 @@ def build_directory(self):
def build_args(self): def build_args(self):
"""Arguments for ``go build``.""" """Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default # Pass ldflags -s = --strip-all and -w = --no-warnings by default
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"] return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
@property @property
def check_args(self): def check_args(self):
@@ -91,7 +82,7 @@ def check_args(self):
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Runs ``go build`` in the source directory""" """Runs ``go build`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.go("build", *self.build_args) inspect.getmodule(pkg).go("build", *self.build_args)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Install built binaries into prefix bin.""" """Install built binaries into prefix bin."""
@@ -104,4 +95,4 @@ def install(self, pkg, spec, prefix):
def check(self): def check(self):
"""Run ``go test .`` in the source directory""" """Run ``go test .`` in the source directory"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
self.pkg.module.go("test", *self.check_args) inspect.getmodule(self.pkg).go("test", *self.check_args)

View File

@@ -22,10 +22,9 @@
install, install,
) )
import spack.builder
import spack.error import spack.error
from spack.build_environment import dso_suffix from spack.build_environment import dso_suffix
from spack.error import InstallError from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable from spack.util.executable import Executable
from spack.util.prefix import Prefix from spack.util.prefix import Prefix

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List from typing import List
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@@ -102,12 +103,12 @@ def edit(self, pkg, spec, prefix):
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Run "make" on the build targets specified by the builder.""" """Run "make" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.build_targets) inspect.getmodule(self.pkg).make(*self.build_targets)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Run "make" on the install targets specified by the builder.""" """Run "make" on the install targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.make(*self.install_targets) inspect.getmodule(self.pkg).make(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os import os
from typing import List from typing import List
@@ -194,19 +195,19 @@ def meson(self, pkg, spec, prefix):
options += self.std_meson_args options += self.std_meson_args
options += self.meson_args() options += self.meson_args()
with fs.working_dir(self.build_directory, create=True): with fs.working_dir(self.build_directory, create=True):
pkg.module.meson(*options) inspect.getmodule(self.pkg).meson(*options)
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
options = ["-v"] options = ["-v"]
options += self.build_targets options += self.build_targets
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.ninja(*options) inspect.getmodule(self.pkg).ninja(*options)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.ninja(*self.install_targets) inspect.getmodule(self.pkg).ninja(*self.install_targets)
spack.builder.run_after("build")(execute_build_time_tests) spack.builder.run_after("build")(execute_build_time_tests)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List # novm from typing import List # novm
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@@ -103,7 +104,7 @@ def msbuild_install_args(self):
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Run "msbuild" on the build targets specified by the builder.""" """Run "msbuild" on the build targets specified by the builder."""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.msbuild( inspect.getmodule(self.pkg).msbuild(
*self.std_msbuild_args, *self.std_msbuild_args,
*self.msbuild_args(), *self.msbuild_args(),
self.define_targets(*self.build_targets), self.define_targets(*self.build_targets),
@@ -113,6 +114,6 @@ def install(self, pkg, spec, prefix):
"""Run "msbuild" on the install targets specified by the builder. """Run "msbuild" on the install targets specified by the builder.
This is INSTALL by default""" This is INSTALL by default"""
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.msbuild( inspect.getmodule(self.pkg).msbuild(
*self.msbuild_install_args(), self.define_targets(*self.install_targets) *self.msbuild_install_args(), self.define_targets(*self.install_targets)
) )

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import List # novm from typing import List # novm
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
@@ -131,7 +132,9 @@ def build(self, pkg, spec, prefix):
if self.makefile_name: if self.makefile_name:
opts.append("/F{}".format(self.makefile_name)) opts.append("/F{}".format(self.makefile_name))
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes) inspect.getmodule(self.pkg).nmake(
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
)
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Run "nmake" on the install targets specified by the builder. """Run "nmake" on the install targets specified by the builder.
@@ -143,4 +146,6 @@ def install(self, pkg, spec, prefix):
opts.append("/F{}".format(self.makefile_name)) opts.append("/F{}".format(self.makefile_name))
opts.append(self.define("PREFIX", fs.windows_sfn(prefix))) opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes) inspect.getmodule(self.pkg).nmake(
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
)

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import spack.builder import spack.builder
import spack.package_base import spack.package_base
from spack.directives import build_system, extends from spack.directives import build_system, extends
@@ -45,7 +47,7 @@ class OctaveBuilder(BaseBuilder):
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Install the package from the archive file""" """Install the package from the archive file"""
pkg.module.octave( inspect.getmodule(self.pkg).octave(
"--quiet", "--quiet",
"--norc", "--norc",
"--built-in-docstrings-file=/dev/null", "--built-in-docstrings-file=/dev/null",

View File

@@ -3,6 +3,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common utilities for managing intel oneapi packages.""" """Common utilities for managing intel oneapi packages."""
import getpass
import os import os
import platform import platform
import shutil import shutil
@@ -12,10 +13,9 @@
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
from llnl.util.link_tree import LinkTree from llnl.util.link_tree import LinkTree
import spack.util.path
from spack.build_environment import dso_suffix from spack.build_environment import dso_suffix
from spack.directives import conflicts, license, redistribute, variant from spack.directives import conflicts, license, redistribute, variant
from spack.error import InstallError from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable from spack.util.executable import Executable
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
# with other install depends on the userid. For root, we # with other install depends on the userid. For root, we
# delete the installercache before and after install. For # delete the installercache before and after install. For
# non root we redefine the HOME environment variable. # non root we redefine the HOME environment variable.
if spack.util.path.get_user() == "root": if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True) shutil.rmtree("/var/intel/installercache", ignore_errors=True)
bash = Executable("bash") bash = Executable("bash")
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
self.prefix, self.prefix,
) )
if spack.util.path.get_user() == "root": if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True) shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing # Some installers have a bug and do not return an error code when failing

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
import os import os
from typing import Iterable from typing import Iterable
@@ -133,7 +134,7 @@ def build_method(self):
def build_executable(self): def build_executable(self):
"""Returns the executable method to build the perl package""" """Returns the executable method to build the perl package"""
if self.build_method == "Makefile.PL": if self.build_method == "Makefile.PL":
build_executable = self.pkg.module.make build_executable = inspect.getmodule(self.pkg).make
elif self.build_method == "Build.PL": elif self.build_method == "Build.PL":
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build")) build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
return build_executable return build_executable
@@ -157,7 +158,7 @@ def configure(self, pkg, spec, prefix):
options = ["Build.PL", "--install_base", prefix] options = ["Build.PL", "--install_base", prefix]
options += self.configure_args() options += self.configure_args()
pkg.module.perl(*options) inspect.getmodule(self.pkg).perl(*options)
# It is possible that the shebang in the Build script that is created from # It is possible that the shebang in the Build script that is created from
# Build.PL may be too long causing the build to fail. Patching the shebang # Build.PL may be too long causing the build to fail. Patching the shebang

View File

@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools import functools
import inspect
import operator import operator
import os import os
import re import re
@@ -16,7 +17,7 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.lang as lang import llnl.util.lang as lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path from llnl.util.filesystem import HeaderList, LibraryList
import spack.builder import spack.builder
import spack.config import spack.config
@@ -24,8 +25,6 @@
import spack.detection import spack.detection
import spack.multimethod import spack.multimethod
import spack.package_base import spack.package_base
import spack.platforms
import spack.repo
import spack.spec import spack.spec
import spack.store import spack.store
from spack.directives import build_system, depends_on, extends from spack.directives import build_system, depends_on, extends
@@ -121,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
""" """
return [] return []
@property
def bindir(self) -> str:
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
windows = self.spec.satisfies("platform=windows")
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
def view_file_conflicts(self, view, merge_map): def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python. """Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate Specifically, this does not report errors for duplicate
@@ -229,7 +222,7 @@ def test_imports(self) -> None:
# Make sure we are importing the installed modules, # Make sure we are importing the installed modules,
# not the ones in the source directory # not the ones in the source directory
python = self.module.python python = inspect.getmodule(self).python # type: ignore[union-attr]
for module in self.import_modules: for module in self.import_modules:
with test_part( with test_part(
self, self,
@@ -316,9 +309,9 @@ def get_external_python_for_prefix(self):
) )
python_externals_detected = [ python_externals_detected = [
spec d.spec
for spec in python_externals_detection.get("python", []) for d in python_externals_detection.get("python", [])
if spec.external_path == self.spec.external_path if d.prefix == self.spec.external_path
] ]
if python_externals_detected: if python_externals_detected:
return python_externals_detected[0] return python_externals_detected[0]
@@ -339,7 +332,7 @@ class PythonPackage(PythonExtension):
legacy_buildsystem = "python_pip" legacy_buildsystem = "python_pip"
#: Callback names for install-time test #: Callback names for install-time test
install_time_test_callbacks = ["test_imports"] install_time_test_callbacks = ["test"]
build_system("python_pip") build_system("python_pip")
@@ -429,7 +422,7 @@ class PythonPipBuilder(BaseBuilder):
phases = ("install",) phases = ("install",)
#: Names associated with package methods in the old build-system format #: Names associated with package methods in the old build-system format
legacy_methods = ("test_imports",) legacy_methods = ("test",)
#: Same as legacy_methods, but the signature is different #: Same as legacy_methods, but the signature is different
legacy_long_methods = ("install_options", "global_options", "config_settings") legacy_long_methods = ("install_options", "global_options", "config_settings")
@@ -438,7 +431,7 @@ class PythonPipBuilder(BaseBuilder):
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks") legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
#: Callback names for install-time test #: Callback names for install-time test
install_time_test_callbacks = ["test_imports"] install_time_test_callbacks = ["test"]
@staticmethod @staticmethod
def std_args(cls) -> List[str]: def std_args(cls) -> List[str]:

View File

@@ -2,6 +2,8 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from llnl.util.filesystem import working_dir from llnl.util.filesystem import working_dir
import spack.builder import spack.builder
@@ -64,17 +66,17 @@ def qmake_args(self):
def qmake(self, pkg, spec, prefix): def qmake(self, pkg, spec, prefix):
"""Run ``qmake`` to configure the project and generate a Makefile.""" """Run ``qmake`` to configure the project and generate a Makefile."""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.qmake(*self.qmake_args()) inspect.getmodule(self.pkg).qmake(*self.qmake_args())
def build(self, pkg, spec, prefix): def build(self, pkg, spec, prefix):
"""Make the build targets""" """Make the build targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.make() inspect.getmodule(self.pkg).make()
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Make the install targets""" """Make the install targets"""
with working_dir(self.build_directory): with working_dir(self.build_directory):
pkg.module.make("install") inspect.getmodule(self.pkg).make("install")
def check(self): def check(self):
"""Search the Makefile for a ``check:`` target and runs it if found.""" """Search the Makefile for a ``check:`` target and runs it if found."""

View File

@@ -2,10 +2,10 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import inspect
from typing import Optional, Tuple from typing import Optional, Tuple
import llnl.util.lang as lang import llnl.util.lang as lang
from llnl.util.filesystem import mkdirp
from spack.directives import extends from spack.directives import extends
@@ -37,7 +37,6 @@ def configure_vars(self):
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
"""Installs an R package.""" """Installs an R package."""
mkdirp(pkg.module.r_lib_dir)
config_args = self.configure_args() config_args = self.configure_args()
config_vars = self.configure_vars() config_vars = self.configure_vars()
@@ -45,14 +44,14 @@ def install(self, pkg, spec, prefix):
args = ["--vanilla", "CMD", "INSTALL"] args = ["--vanilla", "CMD", "INSTALL"]
if config_args: if config_args:
args.append(f"--configure-args={' '.join(config_args)}") args.append("--configure-args={0}".format(" ".join(config_args)))
if config_vars: if config_vars:
args.append(f"--configure-vars={' '.join(config_vars)}") args.append("--configure-vars={0}".format(" ".join(config_vars)))
args.extend([f"--library={pkg.module.r_lib_dir}", self.stage.source_path]) args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
pkg.module.R(*args) inspect.getmodule(self.pkg).R(*args)
class RPackage(Package): class RPackage(Package):
@@ -81,21 +80,27 @@ class RPackage(Package):
@lang.classproperty @lang.classproperty
def homepage(cls): def homepage(cls):
if cls.cran: if cls.cran:
return f"https://cloud.r-project.org/package={cls.cran}" return "https://cloud.r-project.org/package=" + cls.cran
elif cls.bioc: elif cls.bioc:
return f"https://bioconductor.org/packages/{cls.bioc}" return "https://bioconductor.org/packages/" + cls.bioc
@lang.classproperty @lang.classproperty
def url(cls): def url(cls):
if cls.cran: if cls.cran:
return f"https://cloud.r-project.org/src/contrib/{cls.cran}_{str(list(cls.versions)[0])}.tar.gz" return (
"https://cloud.r-project.org/src/contrib/"
+ cls.cran
+ "_"
+ str(list(cls.versions)[0])
+ ".tar.gz"
)
@lang.classproperty @lang.classproperty
def list_url(cls): def list_url(cls):
if cls.cran: if cls.cran:
return f"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/" return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
@property @property
def git(self): def git(self):
if self.bioc: if self.bioc:
return f"https://git.bioconductor.org/packages/{self.bioc}" return "https://git.bioconductor.org/packages/" + self.bioc

Some files were not shown because too many files have changed in this diff Show More