Compare commits

..

2 Commits

Author SHA1 Message Date
wdconinc
b7a2045c79 [@spackbot] updating style on behalf of wdconinc 2024-06-01 17:59:02 +00:00
Wouter Deconinck
2632106f1e feat: concretizer: prefer_older to force oldest dependency optimization 2024-06-01 09:46:56 -07:00
4484 changed files with 23663 additions and 38194 deletions

View File

@@ -5,10 +5,13 @@ updates:
directory: "/"
schedule:
interval: "daily"
# Requirements to run style checks and build documentation
# Requirements to build documentation
- package-ecosystem: "pip"
directories:
- "/.github/workflows/requirements/style/*"
- "/lib/spack/docs"
directory: "/lib/spack/docs"
schedule:
interval: "daily"
# Requirements to run style checks
- package-ecosystem: "pip"
directory: "/.github/workflows/style"
schedule:
interval: "daily"

View File

@@ -28,8 +28,8 @@ jobs:
run:
shell: ${{ matrix.system.shell }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages
@@ -44,7 +44,6 @@ jobs:
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) audit configs
coverage run $(which spack) -d audit externals
coverage combine
coverage xml
@@ -53,7 +52,6 @@ jobs:
run: |
. share/spack/setup-env.sh
spack -d audit packages
spack -d audit configs
spack -d audit externals
- name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }}
@@ -61,11 +59,9 @@ jobs:
. share/spack/setup-env.sh
spack -d audit packages
./share/spack/qa/validate_last_exit.ps1
spack -d audit configs
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -37,7 +37,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- name: Bootstrap clingo
@@ -53,33 +53,27 @@ jobs:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: |
brew install cmake bison tree
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: "3.12"
- name: Bootstrap clingo
env:
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
run: |
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison
spack -d solve zlib
${{ env.VALIDATE_LAST_EXIT }}
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
tree ~/.spack/bootstrap/store/
gnupg-sources:
runs-on: ${{ matrix.runner }}
@@ -96,7 +90,7 @@ jobs:
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- name: Bootstrap GnuPG
@@ -125,10 +119,10 @@ jobs:
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: |
3.8
@@ -154,7 +148,7 @@ jobs:
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path"
fi
fi
@@ -168,3 +162,4 @@ jobs:
source share/spack/setup-env.sh
spack -d gpg list
tree ~/.spack/bootstrap/store/

View File

@@ -40,7 +40,8 @@ jobs:
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
@@ -55,7 +56,7 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta
@@ -76,7 +77,7 @@ jobs:
env:
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
run: |
.github/workflows/bin/generate_spack_yaml_containerize.sh
.github/workflows/generate_spack_yaml_containerize.sh
. share/spack/setup-env.sh
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
@@ -87,19 +88,19 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
- name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -107,13 +108,13 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
@@ -126,7 +127,7 @@ jobs:
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
with:
name: dockerfiles
pattern: dockerfiles_*

View File

@@ -36,7 +36,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
@@ -53,13 +53,6 @@ jobs:
- 'var/spack/repos/builtin/packages/clingo/**'
- 'var/spack/repos/builtin/packages/python/**'
- 'var/spack/repos/builtin/packages/re2c/**'
- 'var/spack/repos/builtin/packages/gnupg/**'
- 'var/spack/repos/builtin/packages/libassuan/**'
- 'var/spack/repos/builtin/packages/libgcrypt/**'
- 'var/spack/repos/builtin/packages/libgpg-error/**'
- 'var/spack/repos/builtin/packages/libksba/**'
- 'var/spack/repos/builtin/packages/npth/**'
- 'var/spack/repos/builtin/packages/pinentry/**'
- 'lib/spack/**'
- 'share/spack/**'
- '.github/workflows/bootstrap.yml'
@@ -84,8 +77,13 @@ jobs:
needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml
secrets: inherit
windows:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
secrets: inherit
all:
needs: [ unit-tests, bootstrap ]
needs: [ windows, unit-tests, bootstrap ]
runs-on: ubuntu-latest
steps:
- name: Success

8
.github/workflows/install_spack.sh vendored Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env sh
. share/spack/setup-env.sh
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
spack config add "packages:all:target:[x86_64]"
spack compiler find
spack compiler info apple-clang
spack debug report
spack solve zlib

View File

@@ -14,10 +14,10 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,6 +1,6 @@
black==24.8.0
black==24.4.2
clingo==5.7.1
flake8==7.1.1
flake8==7.0.0
isort==5.13.2
mypy==1.8.0
types-six==1.16.21.20240513

View File

@@ -16,34 +16,45 @@ jobs:
matrix:
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
concretizer: ['clingo']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
include:
- python-version: '3.11'
os: ubuntu-latest
concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.6'
os: ubuntu-20.04
concretizer: clingo
on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude:
- python-version: '3.7'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.8'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.9'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.10'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.11'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -61,7 +72,7 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
if: ${{ matrix.concretizer == 'clingo' }}
env:
@@ -74,12 +85,13 @@ jobs:
- name: Run unit tests
env:
SPACK_PYTHON: python
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
SPACK_TEST_PARALLEL: 2
COVERAGE: true
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
@@ -88,10 +100,10 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
- name: Install System packages
@@ -106,13 +118,13 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run shell tests
env:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
@@ -129,13 +141,13 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Run unit tests
@@ -148,10 +160,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
- name: Install System packages
@@ -166,13 +178,14 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run unit tests (full suite with coverage)
env:
COVERAGE: true
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
@@ -185,10 +198,10 @@ jobs:
os: [macos-13, macos-14]
python-version: ["3.11"]
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -200,48 +213,18 @@ jobs:
brew install dash fish gcc gnupg2 kcov
- name: Run unit tests
env:
SPACK_TEST_SOLVER: clingo
SPACK_TEST_PARALLEL: 4
run: |
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
. share/spack/setup-env.sh
$(which spack) bootstrap disable spack-install
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Run unit tests on Windows
windows:
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/bin/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -18,15 +18,15 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python Packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt
pip install -r .github/workflows/style/requirements.txt
- name: vermin (Spack's Core)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
@@ -35,22 +35,22 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt
pip install -r .github/workflows/style/requirements.txt
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run style tests
run: |
share/spack/qa/run-style-tests
@@ -70,13 +70,13 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Bootstrap Spack development environment

83
.github/workflows/windows_python.yml vendored Normal file
View File

@@ -0,0 +1,83 @@
name: windows
on:
workflow_call:
concurrency:
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Command Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage
- name: Build Test
run: |
spack compiler find
spack -d external find cmake ninja
spack -d install abseil-cpp

View File

@@ -1,324 +1,3 @@
# v0.22.0 (2024-05-12)
`v0.22.0` is a major feature release.
## Features in this release
1. **Compiler dependencies**
We are in the process of making compilers proper dependencies in Spack, and a number
of changes in `v0.22` support that effort. You may notice nodes in your dependency
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
may notice that Spack graphs now include `libc`. We've also begun moving compiler
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
other externals. We are trying to do this with the least disruption possible, so
your existing `compilers.yaml` files should still work. We expect to be done with
this transition by the `v0.23` release in November.
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
libraries. This enables gcc runtime libraries to be installed and relocated when
using a build cache. When building minimal Spack-generated container images it is
no longer necessary to install libgfortran, libgomp etc. using the system package
manager.
* #42062: Packages compiled with `%oneapi` now depend on a new package
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
provide virtuals and compilers can inject dependencies on virtuals into compiled
packages. This allows us to model library soname compatibility and allows
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
provided by standalone libraries). Note that until we have an agreement in place
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
and must be downloaded outside of Spack.
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
not try to strictly match compilers or targets of reused specs. Users can still
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
`%oneapi` specs that do not have the runtime libraries as a dependency.
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
cache, we do not require you to also have a local configured compiler to *use* the
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
updates that happen underneath Spack.
* #43190: binary compatibility on `linux` is now based on the `libc` version,
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
`musl`) and add it as an implicit external node in the dependency graph. Binaries
with a `libc` with the same name and a version less than or equal to that of the
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
* #43464: each package that can provide a compiler is now detectable using `spack
external find`. External packages defining compiler paths are effectively used as
compilers, and `spack external find -t compiler` can be used as a substitute for
`spack compiler find`. More details on this transition are in
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
2. **Improved `spack find` UI for Environments**
If you're working in an enviroment, you likely care about:
* What are the roots
* Which ones are installed / not installed
* What's been added that still needs to be concretized
We've tweaked `spack find` in environments to show this information much more
clearly. Installation status is shown next to each root, so you can see what is
installed. Roots are also shown in bold in the list of installed packages. There is
also a new option for `spack find -r` / `--only-roots` that will only show env
roots, if you don't want to look at all the installed specs.
More details in #42334.
3. **Improved command-line string quoting**
We are making some breaking changes to how Spack parses specs on the CLI in order to
respect shell quoting instead of trying to fight it. If you (sadly) had to write
something like this on the command line:
```
spack install zlib cflags=\"-O2 -g\"
```
That will now result in an error, but you can now write what you probably expected
to work in the first place:
```
spack install zlib cflags="-O2 -g"
```
Quoted can also now include special characters, so you can supply flags like:
```
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
```
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
and `==` when for flags or variants. This would not have broken before but will now
result in an error:
```
spack install zlib cflags = "-O2 -g"
```
More details and discussion in #30634.
4. **Revert default `spack install` behavior to `--reuse`**
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
attempt to build a new version of the requested package / any environment roots.
While this is a common ask for *upgrading* and for *developer* workflows, we don't
think it should be the default for a package manager.
We are going to try to stick to this policy:
1. Prioritize reuse and build as little as possible by default.
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
is a known security issue that necessitates an upgrade.
With the install command you now have three options:
* `--reuse` (default): reuse as many existing installations as possible.
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
that it may give you fresh versions. More details in #41302 and #43988.
5. **More control over reused specs**
You can now control which packages to reuse and how. There is a new
`concretizer:reuse` config option, which accepts the following properties:
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
- `exclude`: list of constraints used to select which specs *not* to reuse
- `include`: list of constraints used to select which specs *to* reuse
- `from`: list of sources for reused specs (some combination of `local`,
`buildcache`, or `external`)
For example, to reuse only specs compiled with GCC, you could write:
```yaml
concretizer:
reuse:
roots: true
include:
- "%gcc"
```
Or, if `openmpi` must be used from externals, and it must be the only external used:
```yaml
concretizer:
reuse:
roots: true
from:
- type: local
exclude: ["openmpi"]
- type: buildcache
exclude: ["openmpi"]
- type: external
include: ["openmpi"]
```
6. **New `redistribute()` directive**
Some packages can't be redistributed in source or binary form. We need an explicit
way to say that in a package.
Now there is a `redistribute()` directive so that package authors can write:
```python
class MyPackage(Package):
redistribute(source=False, binary=False)
```
Like other directives, this works with `when=`:
```python
class MyPackage(Package):
# 12.0 and higher are proprietary
redistribute(source=False, binary=False, when="@12.0:")
# can't redistribute when we depend on some proprietary dependency
redistribute(source=False, binary=False, when="^proprietary-dependency")
```
More in #20185.
7. **New `conflict:` and `prefer:` syntax for package preferences**
Previously, you could express conflicts and preferences in `packages.yaml` through
some contortions with `require:`:
```yaml
packages:
zlib-ng:
require:
- one_of: ["%clang", "@:"] # conflict on %clang
- any_of: ["+shared", "@:"] # strong preference for +shared
```
You can now use `require:` and `prefer:` for a much more readable configuration:
```yaml
packages:
zlib-ng:
conflict:
- "%clang"
prefer:
- "+shared"
```
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
and #41832 for more details.
8. **`include_concrete` in environments**
You may want to build on the *concrete* contents of another environment without
changing that environment. You can now include the concrete specs from another
environment's `spack.lock` with `include_concrete`:
```yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /path/to/environment1
- /path/to/environment2
```
Now, when *this* environment is concretized, it will bring in the already concrete
specs from `environment1` and `environment2`, and build on top of them without
changing them. This is useful if you have phased deployments, where old deployments
should not be modified but you want to use as many of them as possible. More details
in #33768.
9. **`python-venv` isolation**
Spack has unique requirements for Python because it:
1. installs every package in its own independent directory, and
2. allows users to register *external* python installations.
External installations may contain their own installed packages that can interfere
with Spack installations, and some distributions (Debian and Ubuntu) even change the
`sysconfig` in ways that alter the installation layout of installed Python packages
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
from these and other issues, we now insert a small `python-venv` package in between
`python` and packages that need to install Python code. This isolates Spack's build
environment, isolates Spack from any issues with an external python, and resolves a
large number of issues we've had with Python installations.
See #40773 for further details.
## New commands, options, and directives
* Allow packages to be pushed to build cache after install from source (#42423)
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
* Don't delete `spack develop` build artifacts after install (#43424)
* `spack find`: add options for local/upstream only (#42999)
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
* `patch`: support reversing patches (#43040)
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
* `spack list`: add `--namesapce` / `--repo` option (#41948)
* directives: add `checked_by` field to `license()`, add some license checks
* `spack gc`: add options for environments and build dependencies (#41731)
* Add `--create` to `spack env activate` (#40896)
## Performance improvements
* environment.py: fix excessive re-reads (#43746)
* ruamel yaml: fix quadratic complexity bug (#43745)
* Refactor to improve `spec format` speed (#43712)
* Do not acquire a write lock on the env post install if no views (#43505)
* asp.py: fewer calls to `spec.copy()` (#43715)
* spec.py: early return in `__str__`
* avoid `jinja2` import at startup unless needed (#43237)
## Other new features of note
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
`neoverse-v2` detection.
* `spack config get`/`blame`: with no args, show entire config
* `spack env create <env>`: dir if dir-like (#44024)
* ASP-based solver: update os compatibility for macOS (#43862)
* Add handling of custom ssl certs in urllib ops (#42953)
* Add ability to rename environments (#43296)
* Add config option and compiler support to reuse across OS's (#42693)
* Support for prereleases (#43140)
* Only reuse externals when configured (#41707)
* Environments: Add support for including views (#42250)
## Binary caches
* Build cache: make signed/unsigned a mirror property (#41507)
* tools stack
## Removals, deprecations, and syntax changes
* remove `dpcpp` compiler and package (#43418)
* spack load: remove --only argument (#42120)
## Notable Bugfixes
* repo.py: drop deleted packages from provider cache (#43779)
* Allow `+` in module file names (#41999)
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
* Show extension commands with spack -h (#41726)
* Support environment variable expansion inside module projections (#42917)
* Alert user to failed concretizations (#42655)
* shell: fix zsh color formatting for PS1 in environments (#39497)
* spack mirror create --all: include patches (#41579)
## Spack community stats
* 7,994 total packages; 525 since `v0.21.0`
* 178 new Python packages, 5 new R packages
* 358 people contributed to this release
* 344 committers to packages
* 45 committers to core
# v0.21.2 (2024-03-01)
## Bugfixes

View File

@@ -22,4 +22,4 @@
#
# This is compatible across platforms.
#
exec spack python "$@"
exec /usr/bin/env spack python "$@"

View File

@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :end_switch
:case_load
if NOT defined _sp_args (
exit /B 0
)
:: If args contain --bat, or -h/--help: just execute.
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
goto :default_case
:: If args contain --sh, --csh, or -h/--help: just execute.
if defined _sp_args (
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
)
)
for /f "tokens=* USEBACKQ" %%I in (
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
) do %%I
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
goto :end_switch
:case_unload
goto :case_load
:default_case
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
goto :end_switch

View File

@@ -170,6 +170,23 @@ config:
# If set to true, Spack will use ccache to cache C compiles.
ccache: false
# The concretization algorithm to use in Spack. Options are:
#
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
# backtracking and optimization for user preferences. Spack will
# try to bootstrap the logic solver, if not already available.
#
# 'original': Spack's original greedy, fixed-point concretizer. This
# algorithm can make decisions too early and will not backtrack
# sufficiently for many specs. This will soon be deprecated in
# favor of clingo.
#
# See `concretizer.yaml` for more settings you can fine-tune when
# using clingo.
concretizer: clingo
# How long to wait to lock the Spack installation database. This lock is used
# when Spack needs to manage its own package metadata and all operations are
# expected to complete within the default time limit. The timeout should

View File

@@ -20,14 +20,11 @@ packages:
awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis]
c: [gcc]
cxx: [gcc]
D: [ldc]
daal: [intel-oneapi-daal]
elf: [elfutils]
fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame]
fortran: [gcc]
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
fuse: [libfuse]
gl: [glx, osmesa]
@@ -64,7 +61,6 @@ packages:
tbb: [intel-tbb]
unwind: [libunwind]
uuid: [util-linux-uuid, libuuid]
wasi-sdk: [wasi-sdk-prebuilt]
xxd: [xxd-standalone, vim]
yacc: [bison, byacc]
ziglang: [zig]

View File

@@ -1,5 +1,6 @@
config:
locks: false
concretizer: clingo
build_stage::
- '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -206,7 +206,6 @@ def setup(sphinx):
("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
("py:class", "concurrent.futures._base.Executor"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@@ -203,9 +203,12 @@ The OS that are currently supported are summarized in the table below:
* - Ubuntu 24.04
- ``ubuntu:24.04``
- ``spack/ubuntu-noble``
* - CentOS Stream9
- ``quay.io/centos/centos:stream9``
- ``spack/centos-stream9``
* - CentOS 7
- ``centos:7``
- ``spack/centos7``
* - CentOS Stream
- ``quay.io/centos/centos:stream``
- ``spack/centos-stream``
* - openSUSE Leap
- ``opensuse/leap``
- ``spack/leap15``

View File

@@ -893,9 +893,8 @@ The valid variables for a ``when`` clause are:
#. ``env``. The user environment (usually ``os.environ`` in Python).
#. ``hostname``. The hostname of the system.
#. ``full_hostname``. The fully qualified hostname of the system.
#. ``hostname``. The hostname of the system (if ``hostname`` is an
executable in the user's PATH).
^^^^^^^^^^^^^^^^^^^^^^^^
SpecLists as Constraints
@@ -932,84 +931,32 @@ This allows for a much-needed reduction in redundancy between packages
and constraints.
-----------------
Environment Views
-----------------
----------------
Filesystem Views
----------------
Spack Environments can have an associated filesystem view, which is a directory
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
in which all files of the installed packages are linked.
By default a view is created for each environment, thanks to the ``view: true``
option in the ``spack.yaml`` manifest file:
.. code-block:: yaml
spack:
specs: [perl, python]
view: true
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
If you've used ``spack env activate``, you may have already interacted with this view. Spack
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
you can directly run executables from all installed packages in the environment.
Views are highly customizable: you can control where they are put, modify their structure,
include and exclude specs, change how files are linked, and you can even generate multiple
views for a single environment.
Spack Environments can define filesystem views, which provide a direct access point
for software similar to the directory hierarchy that might exist under ``/usr/local``.
Filesystem views are updated every time the environment is written out to the lock
file ``spack.lock``, so the concrete environment and the view are always compatible.
The files of the view's installed packages are brought into the view by symbolic or
hard links, referencing the original Spack installation, or by copy.
.. _configuring_environment_views:
^^^^^^^^^^^^^^^^^^^^^^^^^^
Minimal view configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Configuration in ``spack.yaml``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The minimal configuration
.. code-block:: yaml
spack:
# ...
view: true
lets Spack generate a single view with default settings under the
``.spack-env/view`` directory of the environment.
Another short way to configure a view is to specify just where to put it:
.. code-block:: yaml
spack:
# ...
view: /path/to/view
Views can also be disabled by setting ``view: false``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Advanced view configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^
One or more **view descriptors** can be defined under ``view``, keyed by a name.
The example from the previous section with ``view: /path/to/view`` is equivalent
to defining a view descriptor named ``default`` with a ``root`` attribute:
.. code-block:: yaml
spack:
# ...
view:
default: # name of the view
root: /path/to/view # view descriptor attribute
The ``default`` view descriptor name is special: when you ``spack env activate`` your
environment, this view will be used to update (among other things) your ``PATH``
variable.
View descriptors must contain the root of the view, and optionally projections,
``select`` and ``exclude`` lists and link information via ``link`` and
The Spack Environment manifest file has a top-level keyword
``view``. Each entry under that heading is a **view descriptor**, headed
by a name. Any number of views may be defined under the ``view`` heading.
The view descriptor contains the root of the view, and
optionally the projections for the view, ``select`` and
``exclude`` lists for the view and link information via ``link`` and
``link_type``.
As a more advanced example, in the following manifest
For example, in the following manifest
file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given
@@ -1054,10 +1001,59 @@ of ``hardlink`` or ``copy``.
when the environment is not activated, and linked libraries will be located
*outside* of the view thanks to rpaths.
There are two shorthands for environments with a single view. If the
environment at ``/path/to/env`` has a single view, with a root at
``/path/to/env/.spack-env/view``, with default selection and exclusion
and the default projection, we can put ``view: True`` in the
environment manifest. Similarly, if the environment has a view with a
different root, but default selection, exclusion, and projections, the
manifest can say ``view: /path/to/view``. These views are
automatically named ``default``, so that
.. code-block:: yaml
spack:
# ...
view: True
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: .spack-env/view
and
.. code-block:: yaml
spack:
# ...
view: /path/to/view
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: /path/to/view
By default, Spack environments are configured with ``view: True`` in
the manifest. Environments can be configured without views using
``view: False``. For backwards compatibility reasons, environments
with no ``view`` key are treated the same as ``view: True``.
From the command line, the ``spack env create`` command takes an
argument ``--with-view [PATH]`` that sets the path for a single, default
view. If no path is specified, the default path is used (``view:
true``). The argument ``--without-view`` can be used to create an
True``). The argument ``--without-view`` can be used to create an
environment without any view configured.
The ``spack env view`` command can be used to change the manage views
@@ -1123,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
Activating environment views
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack env activate <env>`` has two effects:
1. It activates the environment so that further Spack commands such
as ``spack install`` will run in the context of the environment.
2. It activates the view so that environment variables such as
``PATH`` are updated to include the view.
Without further arguments, the ``default`` view of the environment is
activated. If a view with a different name has to be activated,
``spack env activate --with-view <name> <env>`` can be
used instead. You can also activate the environment without modifying
further environment variables using ``--without-view``.
The ``spack env activate`` command will put the default view for the
environment into the user's path, in addition to activating the
environment for Spack commands. The arguments ``-v,--with-view`` and
``-V,--without-view`` can be used to tune this behavior. The default
behavior is to activate with the environment view if there is one.
The environment variables affected by the ``spack env activate``
command and the paths that are used to update them are determined by
@@ -1157,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
recommended to use non-default projections with the default view of an
environment.
The ``spack env deactivate`` command will remove the active view of
the Spack environment from the user's environment variables.
The ``spack env deactivate`` command will remove the default view of
the environment from the user's path.
.. _env-generate-depfile:
@@ -1317,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
example/push/%: example/install/%
@mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
@touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))

View File

@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
option ``--depth 1`` will be used if the version of git and the specified
transport protocol support it, and ``--single-branch`` will be used if the
version of git supports it.
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
large repositories that have separate portions that can be built independently.
If paths provided are directories then all the subdirectories and associated files
will also be cloned.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -1366,41 +1361,6 @@ Submodules
For more information about git submodules see the manpage of git: ``man
git-submodule``.
Sparse-Checkout
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
sparse-checkout feature. This will only clone the paths that are specified in the
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
This feature allows you to only clone what you need from a large repository.
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
If ``git_sparse_paths`` is supplied and the git version is too old
then a warning will be issued and that package will use the standard cloning operations instead.
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
or a more complex package attribute using the ``@property`` decorator. The return value should be
a list for a callable implementation of ``git_sparse_paths``.
.. code-block:: python
def sparse_path_function(package)
"""a callable function that can be used in side a version"""
# paths can be directories or functions, all subdirectories and files are included
paths = ["doe", "rae", "me/file.cpp"]
if package.spec.version > Version("1.2.0"):
paths.extend(["fae"])
return paths
class MyPackage(package):
# can also be a package attribute that will be used if not specified in versions
git_sparse_paths = ["doe", "rae"]
# use the package attribute
version("1.0.0")
version("1.1.0")
# use the function
version("1.1.5", git_sparse_paths=sparse_path_func)
version("1.2.0", git_sparse_paths=sparse_path_func)
version("1.2.5", git_sparse_paths=sparse_path_func)
version("1.1.5", git_sparse_paths=sparse_path_func)
.. _github-fetch:
^^^^^^
@@ -2384,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
to ``make()`` will be sequential by default, but packagers can call
``make(parallel=True)`` to override it.
Note that the ``--jobs`` option works out of the box for all standard
build systems. If you are using a non-standard build system instead, you
can use the variable ``make_jobs`` to extract the number of jobs specified
by the ``--jobs`` option:
.. code-block:: python
:emphasize-lines: 7, 11
:linenos:
class Xios(Package):
...
def install(self, spec, prefix):
...
options = [
...
'--jobs', str(make_jobs),
]
...
make_xios = Executable("./make_xios")
make_xios(*options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Install-level build parallelism
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -5234,6 +5173,12 @@ installed executable. The check is implemented as follows:
reframe = Executable(self.prefix.bin.reframe)
reframe("-l")
.. warning::
The API for adding tests is not yet considered stable and may change
in future releases.
""""""""""""""""""""""""""""""""
Checking build-time test results
""""""""""""""""""""""""""""""""
@@ -5271,42 +5216,38 @@ be left in the build stage directory as illustrated below:
Stand-alone tests
^^^^^^^^^^^^^^^^^
While build-time tests are integrated with the installation process, stand-alone
While build-time tests are integrated with the build process, stand-alone
tests are expected to run days, weeks, even months after the software is
installed. The goal is to provide a mechanism for gaining confidence that
packages work as installed **and** *continue* to work as the underlying
software evolves. Packages can add and inherit stand-alone tests. The
``spack test`` command is used for stand-alone testing.
`spack test`` command is used to manage stand-alone testing.
.. admonition:: Stand-alone test methods should complete within a few minutes.
.. note::
Execution speed is important since these tests are intended to quickly
assess whether installed specs work on the system. Spack cannot spare
resources for more extensive testing of packages included in CI stacks.
assess whether installed specs work on the system. Consequently, they
should run relatively quickly -- as in on the order of at most a few
minutes -- while ideally executing all, or at least key aspects of the
installed software.
Consequently, stand-alone tests should run relatively quickly -- as in
on the order of at most a few minutes -- while testing at least key aspects
of the installed software. Save more extensive testing for other tools.
.. note::
Failing stand-alone tests indicate problems with the installation and,
therefore, there is no reason to proceed with more resource-intensive
tests until those have been investigated.
Passing stand-alone tests indicate that more thorough testing, such
as running extensive unit or regression tests, or tests that run at
scale can proceed without wasting resources on a problematic installation.
Tests are defined in the package using methods with names beginning ``test_``.
This allows Spack to support multiple independent checks, or parts. Files
needed for testing, such as source, data, and expected outputs, may be saved
from the build and or stored with the package in the repository. Regardless
of origin, these files are automatically copied to the spec's test stage
directory prior to execution of the test method(s). Spack also provides helper
functions to facilitate common processing.
.. tip::
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
Passing stand-alone tests justify performing more thorough testing, such
as running extensive unit or regression tests or tests that run at scale,
when available. These tests are outside of the scope of Spack packaging.
Failing stand-alone tests indicate problems with the installation and,
therefore, no reason to proceed with more resource-intensive tests until
the failures have been investigated.
directory prior to execution of the test method(s). Spack also provides some
helper functions to facilitate processing.
.. _configure-test-stage:
@@ -5314,26 +5255,30 @@ functions to facilitate common processing.
Configuring the test stage directory
""""""""""""""""""""""""""""""""""""
Stand-alone tests utilize a test stage directory to build, run, and track
tests in the same way Spack uses a build stage directory to install software.
The default test stage root directory, ``$HOME/.spack/test``, is defined in
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
changing the ``test_stage`` path such that:
Stand-alone tests utilize a test stage directory for building, running,
and tracking results in the same way Spack uses a build stage directory.
The default test stage root directory, ``~/.spack/test``, is defined in
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
customizable by adding or changing the ``test_stage`` path in the high-level
``config`` of the appropriate ``config.yaml`` file such that:
.. code-block:: yaml
config:
test_stage: /path/to/test/stage
Packages can use the ``self.test_suite.stage`` property to access the path.
Packages can use the ``self.test_suite.stage`` property to access this setting.
Other package properties that provide access to spec-specific subdirectories
and files are described in :ref:`accessing staged files <accessing-files>`.
.. admonition:: Each spec being tested has its own test stage directory.
.. note::
The ``config:test_stage`` option is the path to the root of a
**test suite**'s stage directories.
The test stage path is the root directory for the **entire suite**.
In other words, it is the root directory for **all specs** being
tested by the ``spack test run`` command. Each spec gets its own
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
to access the spec-specific test stage directory.
Other package properties that provide paths to spec-specific subdirectories
and files are described in :ref:`accessing-files`.
.. _adding-standalone-tests:
@@ -5346,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
Each method has access to the information Spack tracks on the package, such
as options, compilers, and dependencies, supporting the customization of tests
to the build. Standard python ``assert`` statements and other error reporting
mechanisms can be used. These exceptions are automatically caught and reported
mechanisms are available. Such exceptions are automatically caught and reported
as test failures.
Each test method is an *implicit test part* named by the method. Its purpose
is the method's docstring. Providing a meaningful purpose for the test gives
context that can aid debugging. Spack outputs both the name and purpose at the
start of test execution so it's also important that the docstring/purpose be
brief.
.. tip::
We recommend naming test methods so it is clear *what* is being tested.
For example, if a test method is building and or running an executable
called ``example``, then call the method ``test_example``. This, together
with a similarly meaningful test purpose, will aid test comprehension,
debugging, and maintainability.
Stand-alone tests run in an environment that provides access to information
on the installed software, such as build options, dependencies, and compilers.
Build options and dependencies are accessed using the same spec checks used
by build recipes. Examples of checking :ref:`variant settings <variants>` and
:ref:`spec constraints <testing-specs>` can be found at the provided links.
.. admonition:: Spack automatically sets up the test stage directory and environment.
Spack automatically creates the test stage directory and copies
relevant files *prior to* running tests. It can also ensure build
dependencies are available **if** necessary.
The path to the test stage is configurable (see :ref:`configure-test-stage`).
Files that Spack knows to copy are those saved from the build (see
:ref:`cache_extra_test_sources`) and those added to the package repository
(see :ref:`cache_custom_files`).
Spack will use the value of the ``test_requires_compiler`` property to
determine whether it needs to also set up build dependencies (see
:ref:`test-build-tests`).
The ``MyPackage`` package below provides two basic test examples:
``test_example`` and ``test_example2``. The first runs the installed
``example`` and ensures its output contains an expected string. The second
runs ``example2`` without checking output so is only concerned with confirming
the executable runs successfully. If the installed spec is not expected to have
``example2``, then the check at the top of the method will raise a special
``SkipTest`` exception, which is captured to facilitate reporting skipped test
parts to tools like CDash.
Each test method is an implicit test part named by the method and whose
purpose is the method's docstring. Providing a purpose gives context for
aiding debugging. A test method may contain embedded test parts. Spack
outputs the test name and purpose prior to running each test method and
any embedded test parts. For example, ``MyPackage`` below provides two basic
examples of installation tests: ``test_always_fails`` and ``test_example``.
As the name indicates, the first always fails. The second simply runs the
installed example.
.. code-block:: python
class MyPackage(Package):
...
def test_always_fails(self):
"""use assert to always fail"""
assert False
def test_example(self):
"""ensure installed example works"""
expected = "Done."
"""run installed example"""
example = which(self.prefix.bin.example)
# Capture stdout and stderr from running the Executable
# and check that the expected output was produced.
out = example(output=str.split, error=str.split)
assert expected in out, f"Expected '{expected}' in the output"
def test_example2(self):
"""run installed example2"""
if self.spec.satisfies("@:1.0"):
# Raise SkipTest to ensure flagging the test as skipped for
# test reporting purposes.
raise SkipTest("Test is only available for v1.1 on")
example2 = which(self.prefix.bin.example2)
example2()
example()
Output showing the identification of each test part after running the tests
is illustrated below.
.. code-block:: console
$ spack test run --alias mypackage mypackage@2.0
$ spack test run --alias mypackage mypackage@1.0
==> Spack test mypackage
...
$ spack test results -l mypackage
==> Results for test suite 'mypackage':
...
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
...
PASSED: MyPackage::test_example
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
FAILED
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
...
PASSED: MyPackage::test_example2
PASSED
.. admonition:: Do NOT implement tests that must run in the installation prefix.
Use of the package spec's installation prefix for building and running
tests is **strongly discouraged**. Doing so causes permission errors for
shared spack instances *and* facilities that install the software in
read-only file systems or directories.
.. note::
Instead, start these test methods by explicitly copying the needed files
from the installation prefix to the test stage directory. Note the test
stage directory is the current directory when the test is executed with
the ``spack test run`` command.
If ``MyPackage`` were a recipe for a library, the tests should build
an example or test program that is then executed.
.. admonition:: Test methods for library packages should build test executables.
A test method can include test parts using the ``test_part`` context manager.
Each part is treated as an independent check to allow subsequent test parts
to execute even after a test part fails.
Stand-alone tests for library packages *should* build test executables
that utilize the *installed* library. Doing so ensures the tests follow
a similar build process that users of the library would follow.
For more information on how to do this, see :ref:`test-build-tests`.
.. tip::
If you want to see more examples from packages with stand-alone tests, run
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _adding-standalone-test-parts:
"""""""""""""""""""""""""""""
Adding stand-alone test parts
"""""""""""""""""""""""""""""
Sometimes dependencies between steps of a test lend themselves to being
broken into parts. Tracking the pass/fail status of each part may aid
debugging. Spack provides a ``test_part`` context manager for use within
test methods.
Each test part is independently run, tracked, and reported. Test parts are
executed in the order they appear. If one fails, subsequent test parts are
still performed even if they would also fail. This allows tools like CDash
to track and report the status of test parts across runs. The pass/fail status
of the enclosing test is derived from the statuses of the embedded test parts.
.. admonition:: Test method and test part names **must** be unique.
Test results reporting requires that test methods and embedded test parts
within a package have unique names.
.. _test-part:
The signature for ``test_part`` is:
@@ -5505,68 +5367,40 @@ where each argument has the following meaning:
* ``work_dir`` is the path to the directory in which the test will run.
The default of ``None``, or ``"."``, corresponds to the the spec's test
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
.. admonition:: Start test part names with the name of the enclosing test.
.. admonition:: Tests should **not** run under the installation directory.
We **highly recommend** starting the names of test parts with the name
of the enclosing test. Doing so helps with the comprehension, readability
and debugging of test results.
Use of the package spec's installation directory for building and running
tests is **strongly** discouraged. Doing so causes permission errors for
shared spack instances *and* facilities that install the software in
read-only file systems or directories.
Suppose ``MyPackage`` installs multiple executables that need to run in a
specific order since the outputs from one are inputs of others. Further suppose
we want to add an integration test that runs the executables in order. We can
accomplish this goal by implementing a stand-alone test method consisting of
test parts for each executable as follows:
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
These checks can be implemented as separate checks or, as illustrated below,
embedded test parts.
.. code-block:: python
class MyPackage(Package):
...
def test_series(self):
"""run setup, perform, and report"""
def test_example(self):
"""run installed examples"""
for example in ["ex1", "ex2"]:
with test_part(
self,
f"test_example_{example}",
purpose=f"run installed {example}",
):
exe = which(join_path(self.prefix.bin, example))
exe()
with test_part(self, "test_series_setup", purpose="setup operation"):
exe = which(self.prefix.bin.setup))
exe()
with test_part(self, "test_series_run", purpose="perform operation"):
exe = which(self.prefix.bin.run))
exe()
with test_part(self, "test_series_report", purpose="generate report"):
exe = which(self.prefix.bin.report))
exe()
The result is ``test_series`` runs the following executable in order: ``setup``,
``run``, and ``report``. In this case no options are passed to any of the
executables and no outputs from running them are checked. Consequently, the
implementation could be simplified with a for-loop as follows:
.. code-block:: python
class MyPackage(Package):
...
def test_series(self):
"""execute series setup, run, and report"""
for exe, reason in [
("setup", "setup operation"),
("run", "perform operation"),
("report", "generate report")
]:
with test_part(self, f"test_series_{exe}", purpose=reason):
exe = which(self.prefix.bin.join(exe))
exe()
In both cases, since we're using a context manager, each test part in
``test_series`` will execute regardless of the status of the other test
parts.
Now let's look at the output from running the stand-alone tests where
the second test part, ``test_series_run``, fails.
In this case, there will be an implicit test part for ``test_example``
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
will be executed regardless of whether the first passes. The test
log for a run where the first executable fails and the second passes
is illustrated below.
.. code-block:: console
@@ -5576,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
$ spack test results -l mypackage
==> Results for test suite 'mypackage':
...
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
...
PASSED: MyPackage::test_series_setup
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
FAILED
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
...
FAILED: MyPackage::test_series_run
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
...
FAILED: MyPackage::test_series_report
FAILED: MyPackage::test_series
PASSED
...
Since test parts depended on the success of previous parts, we see that the
failure of one results in the failure of subsequent checks and the overall
result of the test method, ``test_series``, is failure.
.. warning::
.. tip::
Test results reporting requires that each test method and embedded
test part for a package have a unique name.
If you want to see more examples from packages using ``test_part``, run
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
Stand-alone tests run in an environment that provides access to information
Spack has on how the software was built, such as build options, dependencies,
and compilers. Build options and dependencies are accessed with the normal
spec checks. Examples of checking :ref:`variant settings <variants>` and
:ref:`spec constraints <testing-specs>` can be found at the provided links.
Accessing compilers in stand-alone tests that are used by the build requires
setting a package property as described :ref:`below <test-compilation>`.
.. _test-build-tests:
"""""""""""""""""""""""""""""""""""""
Building and running test executables
"""""""""""""""""""""""""""""""""""""
.. _test-compilation:
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
"""""""""""""""""""""""""
Enabling test compilation
"""""""""""""""""""""""""
We **highly recommend** re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities when they reside
within the software's repository. More information on saving files from
the installation process can be found at :ref:`cache_extra_test_sources`.
If you want to build and run binaries in tests, then you'll need to tell
Spack to load the package's compiler configuration. This is accomplished
by setting the package's ``test_requires_compiler`` property to ``True``.
If that is not possible, you can add test-related files to the package
repository (see :ref:`cache_custom_files`). It will be important to
remember to maintain them so they work across listed or supported versions
of the package.
Setting the property to ``True`` ensures access to the compiler through
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
It also gives access to build dependencies like ``cmake`` through their
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
Packages that build libraries are good examples of cases where you'll want
to build test executables from the installed software before running them.
Doing so requires you to let Spack know it needs to load the package's
compiler configuration. This is accomplished by setting the package's
``test_requires_compiler`` property to ``True``.
.. note::
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
The ``test_requires_compiler`` property should be added at the top of
the package near other attributes, such as the ``homepage`` and ``url``.
Setting the property to ``True`` ensures access to the compiler through
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
It also gives access to build dependencies like ``cmake`` through their
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
Be sure to add the property at the top of the package class under other
properties like the ``homepage``.
The example below, which ignores how ``cxx-example.cpp`` is acquired,
illustrates the basic process of compiling a test executable using the
installed library before running it.
Below illustrates using this feature to compile an example.
.. code-block:: python
@@ -5661,22 +5477,28 @@ installed library before running it.
cxx_example = which(exe)
cxx_example()
Typically the files used to build and or run test executables are either
cached from the installation (see :ref:`cache_extra_test_sources`) or added
to the package repository (see :ref:`cache_custom_files`). There is nothing
preventing the use of both.
.. _cache_extra_test_sources:
""""""""""""""""""""""""""""""""""""
Saving build- and install-time files
""""""""""""""""""""""""""""""""""""
"""""""""""""""""""""""
Saving build-time files
"""""""""""""""""""""""
You can use the ``cache_extra_test_sources`` helper routine to copy
directories and or files from the source build stage directory to the
package's installation directory. Spack will automatically copy these
files for you when it sets up the test stage directory and before it
begins running the tests.
.. note::
We highly recommend re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities since they reside
within the software's repository.
If that is not possible, you can add test-related files to the package
repository (see :ref:`adding custom files <cache_custom_files>`). It
will be important to maintain them so they work across listed or supported
versions of the package.
You can use the ``cache_extra_test_sources`` helper to copy directories
and or files from the source build stage directory to the package's
installation directory.
The signature for ``cache_extra_test_sources`` is:
@@ -5691,69 +5513,46 @@ where each argument has the following meaning:
* ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
.. warning::
The paths must be relative to the staged source directory. Contents of
subdirectories and files are copied to a special test cache subdirectory
of the installation prefix. They are automatically copied to the appropriate
relative paths under the test stage directory prior to executing stand-alone
tests.
Paths provided in the ``srcs`` argument **must be relative** to the
staged source directory. They will be copied to the equivalent relative
location under the test stage directory prior to test execution.
Contents of subdirectories and files are copied to a special test cache
subdirectory of the installation prefix. They are automatically copied to
the appropriate relative paths under the test stage directory prior to
executing stand-alone tests.
.. tip::
*Perform test-related conversions once when copying files.*
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the post-``install`` copy method
**after** the call to ``cache_extra_test_sources``. This will reduce
the amount of unnecessary work in the test method **and** avoid problems
running stand-alone tests in shared instances and facility deployments.
The ``filter_file`` function can be quite useful for such changes
(see :ref:`file-filtering`).
Below is a basic example of a test that relies on files from the installation.
This package method re-uses the contents of the ``examples`` subdirectory,
which is assumed to have all of the files implemented to allow ``make`` to
compile and link ``foo.c`` and ``bar.c`` against the package's installed
library.
For example, a package method for copying everything in the ``tests``
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
and using ``foo.c`` in a test method is illustrated below.
.. code-block:: python
class MyLibPackage(MakefilePackage):
class MyLibPackage(Package):
...
@run_after("install")
def copy_test_files(self):
cache_extra_test_sources(self, "examples")
srcs = ["tests",
join_path("examples", "foo.c"),
join_path("examples", "bar.c")]
cache_extra_test_sources(self, srcs)
def test_example(self):
"""build and run the examples"""
examples_dir = self.test_suite.current_test_cache_dir.examples
with working_dir(examples_dir):
make = which("make")
make()
def test_foo(self):
exe = "foo"
src_dir = self.test_suite.current_test_cache_dir.examples
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.c",
"-o", exe
)
foo = which(exe)
foo()
for program in ["foo", "bar"]:
with test_part(
self,
f"test_example_{program}",
purpose=f"ensure {program} runs"
):
exe = Executable(program)
exe()
In this case, ``copy_test_files`` copies the associated files from the
build stage to the package's test cache directory under the installation
prefix. Running ``spack test run`` for the package results in Spack copying
the directory and its contents to the the test stage directory. The
``working_dir`` context manager ensures the commands within it are executed
from the ``examples_dir``. The test builds the software using ``make`` before
running each executable, ``foo`` and ``bar``, as independent test parts.
In this case, the method copies the associated files from the build
stage, **after** the software is installed, to the package's test
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
before running the program.
.. note::
@@ -5762,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
The key to copying files for stand-alone testing at build time is use
of the ``run_after`` directive, which ensures the associated files are
copied **after** the provided build stage (``install``) when the installation
prefix **and** files are available.
copied **after** the provided build stage where the files **and**
installation prefix are available.
The test method uses the path contained in the package's
``self.test_suite.current_test_cache_dir`` property for the root directory
of the copied files. In this case, that's the ``examples`` subdirectory.
These paths are **automatically copied** from cache to the test stage
directory prior to the execution of any stand-alone tests. Tests access
the files using the ``self.test_suite.current_test_cache_dir`` property.
In our example above, test methods can use the following paths to reference
the copy of each entry listed in ``srcs``, respectively:
.. tip::
* ``self.test_suite.current_test_cache_dir.tests``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
.. admonition:: Library packages should build stand-alone tests
Library developers will want to build the associated tests
against their **installed** libraries before running them.
.. note::
While source and input files are generally recommended, binaries
**may** also be cached by the build process. Only you, as the package
writer or maintainer, know whether these files would be appropriate
for testing the installed software weeks to months later.
.. note::
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the ``copy_test_sources`` method and
***after** the call to ``cache_extra_test_sources()``. This will
reduce the amount of unnecessary work in the test method **and** avoid
problems testing in shared instances and facility deployments.
The ``filter_file`` function can be quite useful for such changes.
See :ref:`file manipulation <file-manipulation>`.
If you want to see more examples from packages that cache build files, run
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _cache_custom_files:
@@ -5781,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
Adding custom files
"""""""""""""""""""
Sometimes it is helpful or necessary to include custom files for building and
or checking the results of tests as part of the package. Examples of the types
of files that might be useful are:
In some cases it can be useful to have files that can be used to build or
check the results of tests. Examples include:
- test source files
- test input files
@@ -5791,15 +5614,17 @@ of files that might be useful are:
- expected test outputs
While obtaining such files from the software repository is preferred (see
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
feasible such as when the software is not being actively maintained. When test
files cannot be obtained from the repository or there is a need to supplement
files that can, Spack supports the inclusion of additional files under the
``test`` subdirectory of the package in the Spack repository.
:ref:`adding build-time files <cache_extra_test_sources>`), there are
circumstances where that is not feasible (e.g., the software is not being
actively maintained). When test files can't be obtained from the repository
or as a supplement to files that can, Spack supports the inclusion of
additional files under the ``test`` subdirectory of the package in the
Spack repository.
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
package's ``test`` subdirectory. It also assumes the program simply needs to
be compiled and linked against the installed ``MyLibrary`` software.
Spack **automatically copies** the contents of that directory to the
test staging directory prior to running stand-alone tests. Test methods
access those files using the ``self.test_suite.current_test_data_dir``
property as shown below.
.. code-block:: python
@@ -5809,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
test_requires_compiler = True
...
def test_custom_example(self):
def test_example(self):
"""build and run custom-example"""
src_dir = self.test_suite.current_test_data_dir
data_dir = self.test_suite.current_test_data_dir
exe = "custom-example"
src = datadir.join(f"{exe}.cpp")
...
# TODO: Build custom-example using src and exe
...
custom_example = which(exe)
custom_example()
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.cpp",
"-o", exe
)
custom_example = Executable(exe)
custom_example()
In this case, ``spack test run`` for the package results in Spack copying
the contents of the ``test`` subdirectory to the test stage directory path
in ``self.test_suite.current_test_data_dir`` before calling
``test_custom_example``. Use of the ``working_dir`` context manager
ensures the commands to build and run the program are performed from
within the appropriate subdirectory of the test stage.
.. _expected_test_output_from_file:
@@ -5840,8 +5653,9 @@ Reading expected output from a file
"""""""""""""""""""""""""""""""""""
The helper function ``get_escaped_text_output`` is available for packages
to retrieve properly formatted text from a file potentially containing
special characters.
to retrieve and properly format the text from a file that contains the
expected output from running an executable that may contain special
characters.
The signature for ``get_escaped_text_output`` is:
@@ -5851,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
where ``filename`` is the path to the file containing the expected output.
The path provided to ``filename`` for one of the copied custom files
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
``self.test_suite.current_test_data_dir``.
The example below shows how to reference both the custom database
(``packages.db``) and expected output (``dump.out``) files Spack copies
to the test stage:
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
accessed by tests using the ``self.test_suite.current_test_data_dir``
property. The example below illustrates how to read a file that was
added to the package's ``test`` subdirectory.
.. code-block:: python
@@ -5879,9 +5690,8 @@ to the test stage:
for exp in expected:
assert re.search(exp, out), f"Expected '{exp}' in output"
If the files were instead cached from installing the software, the paths to the
two files would be found under the ``self.test_suite.current_test_cache_dir``
directory as shown below:
If the file was instead copied from the ``tests`` subdirectory of the staged
source code, the path would be obtained as shown below.
.. code-block:: python
@@ -5889,24 +5699,17 @@ directory as shown below:
"""check example table dump"""
test_cache_dir = self.test_suite.current_test_cache_dir
db_filename = test_cache_dir.join("packages.db")
..
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
...
Alternatively, if both files had been installed by the software into the
``share/tests`` subdirectory of the installation prefix, the paths to the
two files would be referenced as follows:
Alternatively, if the file was copied to the ``share/tests`` subdirectory
as part of the installation process, the test could access the path as
follows:
.. code-block:: python
def test_example(self):
"""check example table dump"""
db_filename = self.prefix.share.tests.join("packages.db")
..
expected = get_escaped_text_output(
self.prefix.share.tests.join("dump.out")
)
...
db_filename = join_path(self.prefix.share.tests, "packages.db")
.. _check_outputs:
@@ -5914,9 +5717,9 @@ two files would be referenced as follows:
Comparing expected to actual outputs
""""""""""""""""""""""""""""""""""""
The ``check_outputs`` helper routine is available for packages to ensure
multiple expected outputs from running an executable are contained within
the actual outputs.
The helper function ``check_outputs`` is available for packages to ensure
the expected outputs from running an executable are contained within the
actual outputs.
The signature for ``check_outputs`` is:
@@ -5942,17 +5745,11 @@ Invoking the method is the equivalent of:
if errors:
raise RuntimeError("\n ".join(errors))
.. tip::
If you want to see more examples from packages that use this helper, run
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _accessing-files:
"""""""""""""""""""""""""""""""""""""""""
Finding package- and test-related files
Accessing package- and test-related files
"""""""""""""""""""""""""""""""""""""""""
You may need to access files from one or more locations when writing
@@ -5961,7 +5758,8 @@ include test source files or includes them but has no way to build the
executables using the installed headers and libraries. In these cases
you may need to reference the files relative to one or more root directory.
The table below lists relevant path properties and provides additional
examples of their use. See :ref:`expected_test_output_from_file` for
examples of their use.
:ref:`Reading expected output <expected_test_output_from_file>` provides
examples of accessing files saved from the software repository, package
repository, and installation.
@@ -5990,6 +5788,7 @@ repository, and installation.
- ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
.. _inheriting-tests:
""""""""""""""""""""""""""""
@@ -6032,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
.. warning::
Any package that implements a test method with the same name as an
inherited method will override the inherited method. If that is not the
inherited method overrides the inherited method. If that is not the
goal and you are not explicitly calling and adding functionality to
the inherited method for the test, then make sure that all test methods
and embedded test parts have unique test names.
@@ -6197,8 +5996,6 @@ running:
This is already part of the boilerplate for packages created with
``spack create``.
.. _file-filtering:
^^^^^^^^^^^^^^^^^^^
Filtering functions
^^^^^^^^^^^^^^^^^^^

View File

@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
spack has a way to know for certain about the status of any concrete spec on
the remote mirror, but can slow down pipeline generation significantly.
The ``--optimize`` argument is experimental and runs the generated pipeline
document through a series of optimization passes designed to reduce the size
of the generated file.
The ``--dependencies`` is also experimental and disables what in Gitlab is
referred to as DAG scheduling, internally using the ``dependencies`` keyword
rather than ``needs`` to list dependency jobs. The drawback of using this option
is that before any job can begin, all jobs in previous stages must first
complete. The benefit is that Gitlab allows more dependencies to be listed
when using ``dependencies`` instead of ``needs``.
The optional ``--output-file`` argument should be an absolute path (including
file name) to the generated pipeline, and if not given, the default is
``./.gitlab-ci.yml``.

View File

@@ -1,13 +1,13 @@
sphinx==7.4.7
sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
docutils==0.20.1
pygments==2.18.0
urllib3==2.2.2
pytest==8.3.2
urllib3==2.2.1
pytest==8.2.1
isort==5.13.2
black==24.8.0
flake8==7.1.1
mypy==1.11.1
black==24.4.2
flake8==7.0.0
mypy==1.10.0

96
lib/spack/env/cc vendored
View File

@@ -174,46 +174,6 @@ preextend() {
unset IFS
}
execute() {
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list
exit
}
# Fail with a clear message if the input contains any bell characters.
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
@@ -271,17 +231,12 @@ fi
# ld link
# ccld compile & link
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
# including version checks (SPACK_XFLAGS variants are not applied
# for version checks).
command="${0##*/}"
comp="CC"
vcheck_flags=""
case "$command" in
cpp)
mode=cpp
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
command="$SPACK_CC"
@@ -289,7 +244,6 @@ case "$command" in
comp="CC"
lang_flags=C
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX"
@@ -297,7 +251,6 @@ case "$command" in
comp="CXX"
lang_flags=CXX
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
command="$SPACK_FC"
@@ -305,7 +258,6 @@ case "$command" in
comp="FC"
lang_flags=F
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;;
f77|xlf|xlf_r|pgf77)
command="$SPACK_F77"
@@ -313,7 +265,6 @@ case "$command" in
comp="F77"
lang_flags=F
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;;
ld|ld.gold|ld.lld)
mode=ld
@@ -414,11 +365,7 @@ unset IFS
export PATH="$new_dirs"
if [ "$mode" = vcheck ]; then
full_command_list="$command"
args="$@"
extend full_command_list vcheck_flags
extend full_command_list args
execute
exec "${command}" "$@"
fi
# Darwin's linker has a -r argument that merges object files together.
@@ -775,7 +722,6 @@ case "$mode" in
cc|ccld)
case $lang_flags in
F)
extend spack_flags_list SPACK_ALWAYS_FFLAGS
extend spack_flags_list SPACK_FFLAGS
;;
esac
@@ -785,7 +731,6 @@ esac
# C preprocessor flags come before any C/CXX flags
case "$mode" in
cpp|as|cc|ccld)
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
extend spack_flags_list SPACK_CPPFLAGS
;;
esac
@@ -796,11 +741,9 @@ case "$mode" in
cc|ccld)
case $lang_flags in
C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS
;;
CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS
;;
esac
@@ -990,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
esac
fi
execute
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
astunparse
----------------

View File

@@ -1265,29 +1265,27 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
with os.scandir(self.etc_dir) as it:
etc_files = [
p.path for p in it
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
]
basenames = [
basename
for basename in os.listdir(self.etc_dir)
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
and os.path.isfile(os.path.join(self.etc_dir, basename))
]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
etc_files.sort()
basenames.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
etc_files = [
os.path.join(self.etc_dir, basename)
for basename in _DISTRO_RELEASE_BASENAMES
]
for filepath in etc_files:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
basenames = _DISTRO_RELEASE_BASENAMES
for basename in basenames:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
if match is None:
continue
filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -231,6 +231,96 @@ def is_host_name(instance):
return True
try:
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
except ImportError:
pass
else:
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
try:
from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(draft7="iri", raises=ValueError)
def is_iri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(draft7="iri-reference", raises=ValueError)
def is_iri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI_reference")
try:
from strict_rfc3339 import validate_rfc3339
except ImportError:
try:
from rfc3339_validator import validate_rfc3339
except ImportError:
validate_rfc3339 = None
if validate_rfc3339:
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3339(instance)
@_checks_drafts(draft7="time")
def is_time(instance):
if not isinstance(instance, str_types):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -250,3 +340,86 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
try:
import webcolors
except ImportError:
pass
else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str_types) or
instance.lower() in webcolors.css21_names_to_hex
):
return True
return is_css_color_code(instance)
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
try:
import jsonpointer
except ImportError:
pass
else:
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str_types):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str_types):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
try:
import uritemplate.exceptions
except ImportError:
pass
else:
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
raises=uritemplate.exceptions.InvalidTemplate,
)
def is_uri_template(
instance,
template_validator=uritemplate.Validator().force_balanced_braces(),
):
template = uritemplate.URITemplate(instance)
return template_validator.validate(template)

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch(
name: str = "",
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(),
compilers={},
generation=generation,
cpu_part=cpu_part,
)
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
)
if architecture in (PPC64LE, PPC64):
@@ -351,10 +345,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures."""
"""Types and functions to manage information
on CPU microarchitectures.
"""
import functools
import platform
import re
@@ -63,24 +65,21 @@ class Microarchitecture:
passed in as argument above.
* versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant.
cpu_part (str): cpu part of the architecture, if relevant.
generation (int): generation of the micro-architecture, if
relevant.
"""
# pylint: disable=too-many-arguments,too-many-instance-attributes
# pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name
self.parents = parents
self.vendor = vendor
self.features = features
self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation
# Only relevant for AArch64
self.cpu_part = cpu_part
# Cache the ancestor computation
self._ancestors = None
@@ -112,7 +111,6 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers
and self.generation == other.generation
and self.cpu_part == other.cpu_part
)
@coerce_target_names
@@ -145,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__
fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
"cpu_part={0.cpu_part!r})"
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
)
return fmt.format(self)
@@ -193,7 +190,6 @@ def to_dict(self):
"generation": self.generation,
"parents": [str(x) for x in self.parents],
"compilers": self.compilers,
"cpupart": self.cpu_part,
}
@staticmethod
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]),
compilers=data.get("compilers", {}),
generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
)
def optimization_flags(self, compiler, version):
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"])
compilers = values.get("compilers", {})
generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture(
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]

View File

@@ -2225,14 +2225,10 @@
],
"nvhpc": [
{
"versions": "21.11:23.8",
"versions": "21.11:",
"name": "zen3",
"flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
}
]
}
@@ -2715,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99"
}
]
},
"cpupart": "0x0af"
}
},
"a64fx": {
"from": ["armv8.2a"],
@@ -2784,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
}
]
},
"cpupart": "0x001"
}
},
"cortex_a72": {
"from": ["aarch64"],
@@ -2822,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72"
}
]
},
"cpupart": "0xd08"
}
},
"neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"],
@@ -2909,8 +2902,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd0c"
}
},
"neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"],
@@ -2934,6 +2926,8 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
@@ -3034,8 +3028,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd40"
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
@@ -3059,10 +3052,13 @@
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
@@ -3070,12 +3066,18 @@
"sb",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
"bf16",
"dgh"
],
"compilers" : {
"gcc": [
@@ -3100,19 +3102,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.3.99",
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{
"versions": "12.0:12.2.99",
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "12.3:",
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
@@ -3147,113 +3145,7 @@
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}
},
"m1": {
"from": ["armv8.4a"],
@@ -3319,8 +3211,7 @@
"flags" : "-mcpu=apple-m1"
}
]
},
"cpupart": "0x022"
}
},
"m2": {
"from": ["m1", "armv8.5a"],
@@ -3398,8 +3289,7 @@
"flags" : "-mcpu=apple-m2"
}
]
},
"cpupart": "0x032"
}
},
"arm": {
"from": [],

View File

@@ -52,9 +52,6 @@
}
}
}
},
"cpupart": {
"type": "string"
}
},
"required": [
@@ -110,4 +107,4 @@
"additionalProperties": false
}
}
}
}

View File

@@ -1,45 +0,0 @@
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
index 89e1868047..50c3b18d4d 100644
--- a/lib/spack/external/_vendoring/distro/distro.py
+++ b/lib/spack/external/_vendoring/distro/distro.py
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
else:
try:
- basenames = [
- basename
- for basename in os.listdir(self.etc_dir)
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
- and os.path.isfile(os.path.join(self.etc_dir, basename))
- ]
+ with os.scandir(self.etc_dir) as it:
+ etc_files = [
+ p.path for p in it
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
+ ]
# We sort for repeatability in cases where there are multiple
# distro specific files; e.g. CentOS, Oracle, Enterprise all
# containing `redhat-release` on top of their own.
- basenames.sort()
+ etc_files.sort()
except OSError:
# This may occur when /etc is not readable but we can't be
# sure about the *-release files. Check common entries of
# /etc for information. If they turn out to not be there the
# error is handled in `_parse_distro_release_file()`.
- basenames = _DISTRO_RELEASE_BASENAMES
- for basename in basenames:
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ etc_files = [
+ os.path.join(self.etc_dir, basename)
+ for basename in _DISTRO_RELEASE_BASENAMES
+ ]
+
+ for filepath in etc_files:
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
if match is None:
continue
- filepath = os.path.join(self.etc_dir, basename)
distro_info = self._parse_distro_release_file(filepath)
# The name is always present if the pattern matches.
if "name" not in distro_info:

View File

@@ -13,191 +13,3 @@ index 6b630cdfbb..1791fe7fbf 100644
-__version__ = metadata.version("jsonschema")
+
+__version__ = "3.2.0"
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
index 281a7cfcff..29061e3661 100644
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
@@ -231,96 +231,6 @@ def is_host_name(instance):
return True
-try:
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
- import idna
-except ImportError:
- pass
-else:
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
- def is_idn_host_name(instance):
- if not isinstance(instance, str_types):
- return True
- idna.encode(instance)
- return True
-
-
-try:
- import rfc3987
-except ImportError:
- try:
- from rfc3986_validator import validate_rfc3986
- except ImportError:
- pass
- else:
- @_checks_drafts(name="uri")
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3986(instance, rule="URI_reference")
-
-else:
- @_checks_drafts(draft7="iri", raises=ValueError)
- def is_iri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI")
-
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
- def is_iri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="IRI_reference")
-
- @_checks_drafts(name="uri", raises=ValueError)
- def is_uri(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI")
-
- @_checks_drafts(
- draft6="uri-reference",
- draft7="uri-reference",
- raises=ValueError,
- )
- def is_uri_reference(instance):
- if not isinstance(instance, str_types):
- return True
- return rfc3987.parse(instance, rule="URI_reference")
-
-
-try:
- from strict_rfc3339 import validate_rfc3339
-except ImportError:
- try:
- from rfc3339_validator import validate_rfc3339
- except ImportError:
- validate_rfc3339 = None
-
-if validate_rfc3339:
- @_checks_drafts(name="date-time")
- def is_datetime(instance):
- if not isinstance(instance, str_types):
- return True
- return validate_rfc3339(instance)
-
- @_checks_drafts(draft7="time")
- def is_time(instance):
- if not isinstance(instance, str_types):
- return True
- return is_datetime("1970-01-01T" + instance)
-
-
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
-
-
-try:
- import webcolors
-except ImportError:
- pass
-else:
- def is_css_color_code(instance):
- return webcolors.normalize_hex(instance)
-
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
- def is_css21_color(instance):
- if (
- not isinstance(instance, str_types) or
- instance.lower() in webcolors.css21_names_to_hex
- ):
- return True
- return is_css_color_code(instance)
-
- def is_css3_color(instance):
- if instance.lower() in webcolors.css3_names_to_hex:
- return True
- return is_css_color_code(instance)
-
-
-try:
- import jsonpointer
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="json-pointer",
- draft7="json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_json_pointer(instance):
- if not isinstance(instance, str_types):
- return True
- return jsonpointer.JsonPointer(instance)
-
- # TODO: I don't want to maintain this, so it
- # needs to go either into jsonpointer (pending
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
- # into a new external library.
- @_checks_drafts(
- draft7="relative-json-pointer",
- raises=jsonpointer.JsonPointerException,
- )
- def is_relative_json_pointer(instance):
- # Definition taken from:
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
- if not isinstance(instance, str_types):
- return True
- non_negative_integer, rest = [], ""
- for i, character in enumerate(instance):
- if character.isdigit():
- non_negative_integer.append(character)
- continue
-
- if not non_negative_integer:
- return False
-
- rest = instance[i:]
- break
- return (rest == "#") or jsonpointer.JsonPointer(rest)
-
-
-try:
- import uritemplate.exceptions
-except ImportError:
- pass
-else:
- @_checks_drafts(
- draft6="uri-template",
- draft7="uri-template",
- raises=uritemplate.exceptions.InvalidTemplate,
- )
- def is_uri_template(
- instance,
- template_validator=uritemplate.Validator().force_balanced_braces(),
- ):
- template = uritemplate.URITemplate(instance)
- return template_validator.validate(template)

View File

@@ -1624,12 +1624,6 @@ def remove_linked_tree(path):
shutil.rmtree(os.path.realpath(path), **kwargs)
os.unlink(path)
else:
if sys.platform == "win32":
# Adding this prefix allows shutil to remove long paths on windows
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
long_path_pfx = "\\\\?\\"
if not path.startswith(long_path_pfx):
path = long_path_pfx + path
shutil.rmtree(path, **kwargs)

View File

@@ -6,6 +6,7 @@
import collections.abc
import contextlib
import functools
import inspect
import itertools
import os
import re
@@ -15,7 +16,7 @@
from typing import Any, Callable, Iterable, List, Tuple
# Ignore emacs backups when listing modules
ignore_modules = r"^\.#|~$"
ignore_modules = [r"^\.#", "~$"]
def index_by(objects, *funcs):
@@ -83,6 +84,20 @@ def index_by(objects, *funcs):
return result
def caller_locals():
"""This will return the locals of the *parent* of the caller.
This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
try:
return stack[2][0].f_locals
finally:
del stack
def attr_setdefault(obj, name, value):
"""Like dict.setdefault, but for objects."""
if not hasattr(obj, name):
@@ -90,6 +105,15 @@ def attr_setdefault(obj, name, value):
return getattr(obj, name)
def has_method(cls, name):
for base in inspect.getmro(cls):
if base is object:
continue
if name in base.__dict__:
return True
return False
def union_dicts(*dicts):
"""Use update() to combine all dicts into one.
@@ -154,22 +178,19 @@ def list_modules(directory, **kwargs):
order."""
list_directories = kwargs.setdefault("directories", True)
ignore = re.compile(ignore_modules)
for name in os.listdir(directory):
if name == "__init__.py":
continue
with os.scandir(directory) as it:
for entry in it:
if entry.name == "__init__.py" or entry.name == "__pycache__":
continue
path = os.path.join(directory, name)
if list_directories and os.path.isdir(path):
init_py = os.path.join(path, "__init__.py")
if os.path.isfile(init_py):
yield name
if (
list_directories
and entry.is_dir()
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
):
yield entry.name
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
yield entry.name[:-3] # strip .py
elif name.endswith(".py"):
if not any(re.search(pattern, name) for pattern in ignore_modules):
yield re.sub(".py$", "", name)
def decorator_with_or_without_args(decorator):
@@ -216,8 +237,8 @@ def setter(name, value):
value.__name__ = name
setattr(cls, name, value)
if not hasattr(cls, "_cmp_key"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
if not has_method(cls, "_cmp_key"):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
@@ -367,8 +388,8 @@ def cd_fun():
TypeError: If the class does not have a ``_cmp_iter`` method
"""
if not hasattr(cls, "_cmp_iter"):
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
if not has_method(cls, "_cmp_iter"):
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
# comparison operators are implemented in terms of lazy_eq and lazy_lt
def eq(self, other):
@@ -843,19 +864,20 @@ def uniq(sequence):
return uniq_list
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements,
replacing intervening elements with '...'. For example::
elide_list(["1", "2", "3", "4", "5", "6"], 4)
elide_list([1,2,3,4,5,6], 4)
gives::
["1", "2", "3", "...", "6"]
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
return [*line_list[: max_num - 1], "...", line_list[-1]]
return line_list
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
else:
return line_list
@contextlib.contextmanager

View File

@@ -10,7 +10,6 @@
import errno
import io
import multiprocessing
import multiprocessing.connection
import os
import re
import select
@@ -34,23 +33,8 @@
pass
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
# Ansi Control Sequence Introducers (CSI) are a well-defined format
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
csi_pre = f"{esc}{lbracket}"
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
# General ansi escape sequences have well-defined prefixes,
# but content and suffixes are less reliable.
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
# with no intervening "<ESC>"/"<BELL>" keys or newlines
esc_pre = f"{esc}[@-_]"
esc_content = f"[^{esc}{bell}{newline}]"
esc_post = f"(?:{esc}{bslash}|{bell})"
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
# Use this to strip escape sequences
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
# control characters for enabling/disabling echo
#

131
lib/spack/spack/abi.py Normal file
View File

@@ -0,0 +1,131 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from llnl.util.lang import memoized
import spack.spec
import spack.version
from spack.compilers.clang import Clang
from spack.util.executable import Executable, ProcessError
class ABI:
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec
) -> bool:
"""Return true if architecture of target spec is ABI compatible
to the architecture of constraint spec. If either the target
or constraint specs have no architecture, target is also defined
as architecture ABI compatible to constraint."""
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
)
@memoized
def _gcc_get_libstdcxx_version(self, version):
"""Returns gcc ABI compatibility info by getting the library version of
a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
spec = spack.spec.CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
return None
compiler = compilers[0]
rungcc = None
libname = None
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
libname = "libgcc_s." + dso_suffix
else:
return None
try:
# Some gcc's are actually clang and don't respond properly to
# --print-file-name (they just print the filename, not the
# full path). Ignore these and expect them to be handled as clang.
if Clang.default_version(rungcc.exe[0]) != "unknown":
return None
output = rungcc("--print-file-name=%s" % libname, output=str)
except ProcessError:
return None
if not output:
return None
libpath = os.path.realpath(output.strip())
if not libpath:
return None
return os.path.basename(libpath)
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
are ABI compatible."""
plib = self._gcc_get_libstdcxx_version(pversion)
clib = self._gcc_get_libstdcxx_version(cversion)
if not plib or not clib:
return False
return plib == clib
def _intel_compiler_compare(
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
) -> bool:
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
# Test major and minor versions. Ignore build version.
pv = pversion.lo
cv = cversion.lo
return pv.up_to(2) == cv.up_to(2)
def compiler_compatible(
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
) -> bool:
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
if parent.compiler.name != child.compiler.name:
# Different compiler families are assumed ABI incompatible
return False
if loose:
return True
# TODO: Can we move the specialized ABI matching stuff
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion
):
return True
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
pversion, cversion
):
return True
return False
def compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
) -> bool:
"""Returns true if target spec is ABI compatible to constraint spec"""
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
target, constraint, loose=loose
)

View File

@@ -39,9 +39,9 @@ def _search_duplicate_compilers(error_cls):
import collections
import collections.abc
import glob
import inspect
import io
import itertools
import os
import pathlib
import pickle
import re
@@ -210,11 +210,6 @@ def _search_duplicate_compilers(error_cls):
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
)
#: Sanity checks on packages.yaml
config_repos = AuditClass(
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
)
@config_packages
def _search_duplicate_specs_in_externals(error_cls):
@@ -356,43 +351,6 @@ def _wrongly_named_spec(error_cls):
return errors
@config_packages
def _ensure_all_virtual_packages_have_default_providers(error_cls):
"""All virtual packages must have a default provider explicitly set."""
configuration = spack.config.create()
defaults = configuration.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
virtuals = spack.repo.PATH.provider_index.providers
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
return [
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
for virtual in virtuals
if virtual not in default_providers
]
@config_repos
def _ensure_no_folders_without_package_py(error_cls):
"""Check that we don't leave any folder without a package.py in repos"""
errors = []
for repository in spack.repo.PATH.repos:
missing = []
for entry in os.scandir(repository.packages_path):
if not entry.is_dir():
continue
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
if not package_py.exists():
missing.append(entry.path)
if missing:
summary = (
f"The '{repository.namespace}' repository misses a package.py file"
f" in the following folders"
)
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
return errors
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
@@ -524,7 +482,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
name_definitions = collections.defaultdict(list)
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
for cls_item in pkg_cls.__mro__:
for cls_item in inspect.getmro(pkg_cls):
for name in RESERVED_NAMES:
current_value = cls_item.__dict__.get(name)
if current_value is None:
@@ -553,7 +511,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
badname_regex, errors = re.compile(r"[_A-Z]"), []
for pkg_name in pkgs:
if badname_regex.search(pkg_name):
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
errors.append(error_cls(error_msg, []))
return errors
@@ -833,7 +791,7 @@ def check_virtual_with_variants(spec, msg):
return
error = error_cls(
f"{pkg_name}: {msg}",
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
f"remove variants from '{spec}' in depends_on directive in {filename}",
)
errors.append(error)

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common basic functions used through the spack.bootstrap package"""
import fnmatch
import importlib
import os.path
import re
import sys
@@ -29,7 +28,7 @@
def _python_import(module: str) -> bool:
try:
importlib.import_module(module)
__import__(module)
except ImportError:
return False
return True

View File

@@ -1,154 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap concrete specs for clingo
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
we need to rely on another mechanism to get a concrete spec that fits the current host.
This module contains the logic to get a concrete spec for clingo, starting from a prototype
JSON file for a similar platform.
"""
import pathlib
import sys
from typing import Dict, Optional, Tuple
import archspec.cpu
import spack.compiler
import spack.compilers
import spack.platforms
import spack.spec
import spack.traverse
from .config import spec_for_current_python
class ClingoBootstrapConcretizer:
def __init__(self, configuration):
self.host_platform = spack.platforms.host()
self.host_os = self.host_platform.operating_system("frontend")
self.host_target = archspec.cpu.host().family
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
self.host_architecture.target = str(self.host_target)
self.host_compiler = self._valid_compiler_or_raise()
self.host_python = self.python_external_spec()
if str(self.host_platform) == "linux":
self.host_libc = self.libc_external_spec()
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
if str(self.host_platform) == "linux":
compiler_name = "gcc"
elif str(self.host_platform) == "darwin":
compiler_name = "apple-clang"
elif str(self.host_platform) == "windows":
compiler_name = "msvc"
elif str(self.host_platform) == "freebsd":
compiler_name = "clang"
else:
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
candidates = spack.compilers.compilers_for_spec(
compiler_name, arch_spec=self.host_architecture
)
if not candidates:
raise RuntimeError(
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
)
candidates.sort(key=lambda x: x.spec.version, reverse=True)
return candidates[0]
def _externals_from_yaml(
self, configuration: "spack.config.Configuration"
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
packages_yaml = configuration.get("packages")
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
for pkg_name in ["cmake", "bison"]:
if pkg_name not in packages_yaml:
continue
candidates = packages_yaml[pkg_name].get("externals", [])
for candidate in candidates:
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
if not s.satisfies(requirements[pkg_name]):
continue
if not s.intersects(f"%{self.host_compiler.spec}"):
continue
if not s.intersects(f"arch={self.host_architecture}"):
continue
selected[pkg_name] = self._external_spec(s)
break
return selected["cmake"], selected["bison"]
def prototype_path(self) -> pathlib.Path:
"""Path to a prototype concrete specfile for clingo"""
parent_dir = pathlib.Path(__file__).parent
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
if str(self.host_platform) == "linux":
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
if not result.exists():
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
elif str(self.host_platform) == "freebsd":
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
elif not result.exists():
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
return result
def concretize(self) -> "spack.spec.Spec":
# Read the prototype and mark it NOT concrete
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
s._mark_concrete(False)
# Tweak it to conform to the host architecture
for node in s.traverse():
node.architecture.os = str(self.host_os)
node.compiler = self.host_compiler.spec
node.architecture = self.host_architecture
if node.name == "gcc-runtime":
node.versions = self.host_compiler.spec.versions
for edge in spack.traverse.traverse_edges([s], cover="edges"):
if edge.spec.name == "python":
edge.spec = self.host_python
if edge.spec.name == "bison" and self.external_bison:
edge.spec = self.external_bison
if edge.spec.name == "cmake" and self.external_cmake:
edge.spec = self.external_cmake
if "libc" in edge.virtuals:
edge.spec = self.host_libc
s._finalize_concretization()
# Work around the fact that the installer calls Spec.dependents() and
# we modified edges inconsistently
return s.copy()
def python_external_spec(self) -> "spack.spec.Spec":
"""Python external spec corresponding to the current running interpreter"""
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
return self._external_spec(result)
def libc_external_spec(self) -> "spack.spec.Spec":
result = self.host_compiler.default_libc
return self._external_spec(result)
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
initial_spec.namespace = "builtin"
initial_spec.compiler = self.host_compiler.spec
initial_spec.architecture = self.host_architecture
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
initial_spec.compiler_flags[flag_type] = []
return spack.spec.parse_with_version_concrete(initial_spec)

View File

@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
for name, path in configuration_paths:
platform = spack.platforms.host().name
platform_scope = spack.config.DirectoryConfigScope(
f"{name}/{platform}", os.path.join(path, platform)
platform_scope = spack.config.ConfigScope(
"/".join([name, platform]), os.path.join(path, platform)
)
generic_scope = spack.config.DirectoryConfigScope(name, path)
generic_scope = spack.config.ConfigScope(name, path)
config_scopes.extend([generic_scope, platform_scope])
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
tty.debug(msg.format(generic_scope.name, generic_scope.path))
@@ -143,7 +143,11 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
spack.compilers.find_compilers()
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers)
@contextlib.contextmanager
@@ -152,7 +156,7 @@ def _ensure_bootstrap_configuration() -> Generator:
bootstrap_store_path = store_path()
user_configuration = _read_and_sanitize_configuration()
with spack.environment.no_active_environment():
with spack.platforms.use_platform(
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
spack.platforms.real_host()
), spack.repo.use_repositories(spack.paths.packages_path):
# Default configuration scopes excluding command line

View File

@@ -54,7 +54,6 @@
import spack.version
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
from .clingo import ClingoBootstrapConcretizer
from .config import spack_python_interpreter, spec_for_current_python
#: Name of the file containing metadata about the bootstrapping source
@@ -269,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Try to build and install from sources
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
if module == "clingo":
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
concrete_spec = bootstrapper.concretize()
else:
concrete_spec = spack.spec.Spec(
abstract_spec_str + " ^" + spec_for_current_python()
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
concrete_spec._old_concretize( # pylint: disable=protected-access
deprecation_warning=False
)
else:
concrete_spec.concretize()
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
@@ -302,7 +303,14 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
# might reduce compilation time by a fair amount
_add_externals_if_missing()
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
concrete_spec = spack.spec.Spec(abstract_spec_str)
if concrete_spec.name == "patchelf":
concrete_spec._old_concretize( # pylint: disable=protected-access
deprecation_warning=False
)
else:
concrete_spec.concretize()
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
tty.debug(msg.format(abstract_spec_str))
with spack.config.override(self.mirror_scope):

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
# Ensure we trigger environment modifications if we have an environment
if BootstrapEnvironment.spack_yaml().exists():
with BootstrapEnvironment() as env:
env.load()
env.update_syspath_and_environ()
return [
_required_executable(

View File

@@ -72,7 +72,6 @@
import spack.store
import spack.subprocess_context
import spack.user_environment
import spack.util.executable
import spack.util.path
import spack.util.pattern
from spack import traverse
@@ -457,12 +456,12 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
else:
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
env.set("CCACHE_DISABLE", "1")
ccache = Executable("ccache")
if not ccache:
raise RuntimeError("No ccache binary found in PATH")
env.set(SPACK_CCACHE_BINARY, ccache)
# Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
@@ -741,9 +740,7 @@ def get_rpaths(pkg):
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
if mod_rpath:
rpaths.append(mod_rpath)
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
return list(dedupe(filter_system_paths(rpaths)))
@@ -1476,7 +1473,7 @@ def long_message(self):
out.write(" {0}\n".format(self.log_name))
# Also output the test log path IF it exists
if self.context != "test" and have_log:
if self.context != "test":
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
if os.path.isfile(test_log):
out.write("\nSee test log for details:\n")
@@ -1559,7 +1556,7 @@ def __init__(self, package):
#: Modules for the classes in the MRO up to PackageBase
modules_in_mro = []
for cls in type(package).__mro__:
for cls in inspect.getmro(type(package)):
module = cls.module
if module == self.current_module:

View File

@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
ld_flags = " ".join(flags["ldflags"])
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
# CMake has separate linker arguments for types of builds.
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
# is used by the archiver, so don't include "STATIC" in this loop:
for ld_type in ["EXE", "MODULE", "SHARED"]:
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
ld_string = ld_format_string.format(ld_type)
entries.append(cmake_cache_string(ld_string, ld_flags))

View File

@@ -108,11 +108,6 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
if _supports_compilation_databases(pkg):
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
def generator(*names: str, default: Optional[str] = None):
"""The build system generator to use.

View File

@@ -124,8 +124,6 @@ def cuda_flags(arch_list):
# minimum supported versions
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
# maximum supported version
# NOTE:
@@ -138,14 +136,14 @@ def cuda_flags(arch_list):
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.6")
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
@@ -213,16 +211,12 @@ def cuda_flags(arch_list):
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
# XL is mostly relevant for ppc64le Linux
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
# PowerPC.
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
# Darwin.
# TODO: add missing conflicts for %apple-clang cuda@:10
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")

View File

@@ -72,7 +72,7 @@ def build_directory(self):
def build_args(self):
"""Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
@property
def check_args(self):

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Common utilities for managing intel oneapi packages."""
import getpass
import os
import platform
import shutil
@@ -12,7 +13,6 @@
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
from llnl.util.link_tree import LinkTree
import spack.util.path
from spack.build_environment import dso_suffix
from spack.directives import conflicts, license, redistribute, variant
from spack.package_base import InstallError
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
# with other install depends on the userid. For root, we
# delete the installercache before and after install. For
# non root we redefine the HOME environment variable.
if spack.util.path.get_user() == "root":
if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
bash = Executable("bash")
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
self.prefix,
)
if spack.util.path.get_user() == "root":
if getpass.getuser() == "root":
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing

View File

@@ -17,7 +17,7 @@
import llnl.util.filesystem as fs
import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import HeaderList, LibraryList, join_path
from llnl.util.filesystem import HeaderList, LibraryList
import spack.builder
import spack.config
@@ -120,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
"""
return []
@property
def bindir(self) -> str:
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
windows = self.spec.satisfies("platform=windows")
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
Specifically, this does not report errors for duplicate

View File

@@ -139,10 +139,6 @@ def configure(self, pkg, spec, prefix):
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
args.extend(self.configure_args())
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
if spec["py-sip"].satisfies("@6.1.0:"):
args.extend(["--scripts-dir", pkg.prefix.bin])
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
sip_build(*args)

View File

@@ -12,7 +12,6 @@
from llnl.util import lang
import spack.build_environment
import spack.multimethod
#: Builder classes, as registered by the "builder" decorator
BUILDER_CLS = {}
@@ -296,11 +295,7 @@ def _decorator(fn):
return _decorator
class BuilderMeta(
PhaseCallbacksMeta,
spack.multimethod.MultiMethodMeta,
type(collections.abc.Sequence), # type: ignore
):
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
pass

View File

@@ -9,11 +9,11 @@
import llnl.util.lang
from llnl.util.filesystem import mkdirp
from llnl.util.symlink import symlink
import spack.config
import spack.error
import spack.fetch_strategy
import spack.mirror
import spack.paths
import spack.util.file_cache
import spack.util.path
@@ -34,8 +34,6 @@ def _misc_cache():
return spack.util.file_cache.FileCache(path)
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
#: Spack's cache for small data
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)
@@ -74,6 +72,23 @@ def store(self, fetcher, relative_dest):
mkdirp(os.path.dirname(dst))
fetcher.archive(dst)
def symlink(self, mirror_ref):
"""Symlink a human readible path in our mirror to the actual
storage location."""
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
# In this case the link itself exists but it is broken: remove
# it and recreate it (in order to fix any symlinks broken prior
# to https://github.com/spack/spack/pull/13908)
os.unlink(cosmetic_path)
mkdirp(os.path.dirname(cosmetic_path))
symlink(relative_dst, cosmetic_path)
#: Spack's local cache for downloaded source archives
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (

View File

@@ -22,8 +22,6 @@
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
import ruamel.yaml
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
@@ -38,7 +36,6 @@
import spack.paths
import spack.repo
import spack.spec
import spack.stage
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
@@ -72,7 +69,7 @@
# TODO: Remove this in Spack 0.23
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
JOB_NAME_FORMAT = (
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
)
IS_WINDOWS = sys.platform == "win32"
spack_gpg = spack.main.SpackCommand("gpg")
@@ -554,9 +551,10 @@ def generate_gitlab_ci_yaml(
env,
print_summary,
output_file,
*,
prune_dag=False,
check_index_only=False,
run_optimizer=False,
use_dependencies=False,
artifacts_root=None,
remote_mirror_override=None,
):
@@ -577,6 +575,12 @@ def generate_gitlab_ci_yaml(
this mode results in faster yaml generation time). Otherwise, also
check each spec directly by url (useful if there is no index or it
might be out of date).
run_optimizer (bool): If True, post-process the generated yaml to try
try to reduce the size (attempts to collect repeated configuration
and replace with definitions).)
use_dependencies (bool): If true, use "dependencies" rather than "needs"
("needs" allows DAG scheduling). Useful if gitlab instance cannot
be configured to handle more than a few "needs" per job.
artifacts_root (str): Path where artifacts like logs, environment
files (spack.yaml, spack.lock), etc should be written. GitLab
requires this to be within the project directory.
@@ -810,8 +814,7 @@ def ensure_expected_target_path(path):
cli_scopes = [
os.path.relpath(s.path, concrete_env_dir)
for s in cfg.scopes().values()
if not s.writable
and isinstance(s, (cfg.DirectoryConfigScope))
if isinstance(s, cfg.ImmutableConfigScope)
and s.path not in env_includes
and os.path.exists(s.path)
]
@@ -1108,10 +1111,9 @@ def main_script_replacements(cmd):
if cdash_handler and cdash_handler.auth_token:
try:
cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err:
except (SpackError, HTTPError, URLError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
elif cdash_config:
# warn only if there was actually a CDash configuration.
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
service_job_retries = {
@@ -1269,6 +1271,17 @@ def main_script_replacements(cmd):
with open(copy_specs_file, "w") as fd:
fd.write(json.dumps(buildcache_copies))
# TODO(opadron): remove this or refactor
if run_optimizer:
import spack.ci_optimization as ci_opt
output_object = ci_opt.optimizer(output_object)
# TODO(opadron): remove this or refactor
if use_dependencies:
import spack.ci_needs_workaround as cinw
output_object = cinw.needs_to_dependencies(output_object)
else:
# No jobs were generated
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
@@ -1297,11 +1310,8 @@ def main_script_replacements(cmd):
if not rebuild_everything:
sys.exit(1)
# Minimize yaml output size through use of anchors
syaml.anchorify(sorted_output)
with open(output_file, "w") as f:
ruamel.yaml.YAML().dump(sorted_output, f)
with open(output_file, "w") as outf:
outf.write(syaml.dump(sorted_output, default_flow_style=True))
def _url_encode_string(input_string):
@@ -1372,6 +1382,15 @@ def can_verify_binaries():
return len(gpg_util.public_keys()) >= 1
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
"""Unchecked version of the public API, for easier mocking"""
bindist.push_or_raise(
spec,
spack.mirror.Mirror.from_url(mirror_url).push_url,
bindist.PushOptions(force=True, unsigned=not sign_binaries),
)
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
"""Push one or more binary packages to the mirror.
@@ -1382,15 +1401,20 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
sign_binaries: If True, spack will attempt to sign binary package before pushing.
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
mirror = spack.mirror.Mirror.from_url(mirror_url)
try:
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
uploader.push_or_raise([spec])
_push_to_build_cache(spec, sign_binaries, mirror_url)
return True
except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}")
tty.error(str(e))
return False
except Exception as e:
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
# exception instead of parsing str(e)...
msg = str(e)
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
return False
raise
def remove_other_mirrors(mirrors_to_keep, scope=None):
@@ -1436,6 +1460,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
job_log_dir: path into which build log should be copied
"""
tty.debug(f"job spec: {job_spec}")
if not job_spec:
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
tty.error(msg)
return
try:
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
@@ -2067,7 +2095,7 @@ def read_broken_spec(broken_spec_url):
"""
try:
_, _, fs = web_util.read_from_url(broken_spec_url)
except web_util.SpackWebError:
except (URLError, web_util.SpackWebError, HTTPError):
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None

View File

@@ -0,0 +1,34 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry if isinstance(needs_entry, str) else None
)
def convert_job(job_entry):
if not isinstance(job_entry, collections.abc.Mapping):
return job_entry
needs = job_entry.get("needs")
if needs is None:
return job_entry
new_job = {}
new_job.update(job_entry)
del new_job["needs"]
new_job["dependencies"] = list(
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
)
return new_job
def needs_to_dependencies(yaml):
return dict((k, convert_job(v)) for k, v in yaml.items())

View File

@@ -0,0 +1,363 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import collections.abc
import copy
import hashlib
import spack.util.spack_yaml as syaml
def sort_yaml_obj(obj):
if isinstance(obj, collections.abc.Mapping):
return syaml.syaml_dict(
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
)
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
return obj
def matches(obj, proto):
"""Returns True if the test object "obj" matches the prototype object
"proto".
If obj and proto are mappings, obj matches proto if (key in obj) and
(obj[key] matches proto[key]) for every key in proto.
If obj and proto are sequences, obj matches proto if they are of the same
length and (a matches b) for every (a,b) in zip(obj, proto).
Otherwise, obj matches proto if obj == proto.
Precondition: proto must not have any reference cycles
"""
if isinstance(obj, collections.abc.Mapping):
if not isinstance(proto, collections.abc.Mapping):
return False
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
return False
if len(obj) != len(proto):
return False
return all(matches(obj[index], val) for index, val in enumerate(proto))
return obj == proto
def subkeys(obj, proto):
"""Returns the test mapping "obj" after factoring out the items it has in
common with the prototype mapping "proto".
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
returns a mapping, m, whose keys are the union of the keys of a and b, and
for every such key, "k", its corresponding value is:
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
- b[key] if (key in b) and not matches(a[key], b[key]),
or
- a[key] otherwise
If obj and proto are mappings, the returned object is the smallest object,
"a", such that merge(a, proto) matches obj.
Otherwise, obj is returned.
"""
if not (
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
):
return obj
new_obj = {}
for key, value in obj.items():
if key not in proto:
new_obj[key] = value
continue
if matches(value, proto[key]) and matches(proto[key], value):
continue
if isinstance(value, collections.abc.Mapping):
new_obj[key] = subkeys(value, proto[key])
continue
new_obj[key] = value
return new_obj
def add_extends(yaml, key):
"""Modifies the given object "yaml" so that it includes an "extends" key
whose value features "key".
If "extends" is not in yaml, then yaml is modified such that
yaml["extends"] == key.
If yaml["extends"] is a str, then yaml is modified such that
yaml["extends"] == [yaml["extends"], key]
If yaml["extends"] is a list that does not include key, then key is
appended to the list.
Otherwise, yaml is left unchanged.
"""
has_key = "extends" in yaml
extends = yaml.get("extends")
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
return
if extends is None:
yaml["extends"] = key
return
if isinstance(extends, str):
if extends != key:
yaml["extends"] = [extends, key]
return
if key not in extends:
extends.append(key)
def common_subobject(yaml, sub):
"""Factor prototype object "sub" out of the values of mapping "yaml".
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
- Otherwise, new[key] = yaml[key].
If the above match criteria is not satisfied for any such key, then (yaml,
None) is returned. The yaml object is returned unchanged.
Otherwise, each matching value in new is modified as in
add_extends(new[key], common_key), and then new[common_key] is set to sub.
The common_key value is chosen such that it does not match any preexisting
key in new. In this case, (new, common_key) is returned.
"""
match_list = set(k for k, v in yaml.items() if matches(v, sub))
if not match_list:
return yaml, None
common_prefix = ".c"
common_index = 0
while True:
common_key = "".join((common_prefix, str(common_index)))
if common_key not in yaml:
break
common_index += 1
new_yaml = {}
for key, val in yaml.items():
new_yaml[key] = copy.deepcopy(val)
if not matches(val, sub):
continue
new_yaml[key] = subkeys(new_yaml[key], sub)
add_extends(new_yaml[key], common_key)
new_yaml[common_key] = sub
return new_yaml, common_key
def print_delta(name, old, new, applied=None):
delta = new - old
reldelta = (1000 * delta) // old
reldelta = (reldelta // 10, reldelta % 10)
if applied is None:
applied = new <= old
print(
"\n".join(
(
"{0} {1}:",
" before: {2: 10d}",
" after : {3: 10d}",
" delta : {4:+10d} ({5:=+3d}.{6}%)",
)
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
)
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
"""Try applying an optimization pass and return information about the
result
"name" is a string describing the nature of the pass. If it is a non-empty
string, summary statistics are also printed to stdout.
"yaml" is the object to apply the pass to.
"optimization_pass" is the function implementing the pass to be applied.
"args" and "kwargs" are the additional arguments to pass to optimization
pass. The pass is applied as
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
The pass's results are greedily rejected if it does not modify the original
yaml document, or if it produces a yaml document that serializes to a
larger string.
Returns (new_yaml, yaml, applied, other_results) if applied, or
(yaml, new_yaml, applied, other_results) otherwise.
"""
result = optimization_pass(yaml, *args, **kwargs)
new_yaml, other_results = result[0], result[1:]
if new_yaml is yaml:
# pass was not applied
return (yaml, new_yaml, False, other_results)
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
# pass makes the size worse: not applying
applied = post_size <= pre_size
if applied:
yaml, new_yaml = new_yaml, yaml
if name:
print_delta(name, pre_size, post_size, applied)
return (yaml, new_yaml, applied, other_results)
def build_histogram(iterator, key):
"""Builds a histogram of values given an iterable of mappings and a key.
For each mapping "m" with key "key" in iterator, the value m[key] is
considered.
Returns a list of tuples (hash, count, proportion, value), where
- "hash" is a sha1sum hash of the value.
- "count" is the number of occurences of values that hash to "hash".
- "proportion" is the proportion of all values considered above that
hash to "hash".
- "value" is one of the values considered above that hash to "hash".
Which value is chosen when multiple values hash to the same "hash" is
undefined.
The list is sorted in descending order by count, yielding the most
frequently occuring hashes first.
"""
buckets = collections.defaultdict(int)
values = {}
num_objects = 0
for obj in iterator:
num_objects += 1
try:
val = obj[key]
except (KeyError, TypeError):
continue
value_hash = hashlib.sha1()
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
value_hash = value_hash.hexdigest()
buckets[value_hash] += 1
values[value_hash] = val
return [
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
]
def optimizer(yaml):
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
# try factoring out commonly repeated portions
common_job = {
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
"after_script": ['rm -rf "./spack"'],
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
}
# look for a list of tags that appear frequently
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
# If a list of tags is found, and there are more than one job that uses it,
# *and* the jobs that do use it represent at least 70% of all jobs, then
# add the list to the prototype object.
if tags and count > 1 and proportion >= 0.70:
common_job["tags"] = tags
# apply common object factorization
yaml, other, applied, rest = try_optimization_pass(
"general common object factorization", yaml, common_subobject, common_job
)
# look for a common script, and try factoring that out
_, count, proportion, script = next(
iter(build_histogram(yaml.values(), "script")), (None,) * 4
)
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
"script factorization", yaml, common_subobject, {"script": script}
)
# look for a common before_script, and try factoring that out
_, count, proportion, script = next(
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
)
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
"before_script factorization", yaml, common_subobject, {"before_script": script}
)
# Look specifically for the SPACK_ROOT_SPEC environment variables.
# Try to factor them out.
h = build_histogram(
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
"SPACK_ROOT_SPEC",
)
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
# environment variable; not just the one that appears with the greatest
# frequency. We only require that more than 1 job uses a given instance's
# value, because we expect the value to be very large, and so expect even
# few-to-one factorizations to yield large space savings.
counter = 0
for _, count, proportion, spec in h:
if count <= 1:
continue
counter += 1
yaml, other, applied, rest = try_optimization_pass(
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
yaml,
common_subobject,
{"variables": {"SPACK_ROOT_SPEC": spec}},
)
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
print("\n")
print_delta("overall summary", original_size, new_size)
print("\n")
return yaml

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import importlib
import os
import re
import sys
@@ -115,8 +114,8 @@ def get_module(cmd_name):
try:
# Try to import the command from the built-in directory
module_name = f"{__name__}.{pname}"
module = importlib.import_module(module_name)
module_name = "%s.%s" % (__name__, pname)
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)
@@ -238,7 +237,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
if len(matching_specs) <= 1:
return
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
args = ["%s matches multiple packages." % spec, "Matching packages:"]
args += [
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
@@ -337,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
groups (bool): display specs grouped by arch/compiler (default True)
decorator (typing.Callable): function to call to decorate specs
all_headers (bool): show headers even when arch/compiler aren't defined
status_fn (typing.Callable): if provided, prepend install-status info
output (typing.IO): A file object to write to. Default is ``sys.stdout``
"""
@@ -361,7 +359,6 @@ def get_arg(name, default=None):
groups = get_arg("groups", True)
all_headers = get_arg("all_headers", False)
output = get_arg("output", sys.stdout)
status_fn = get_arg("status_fn", None)
decorator = get_arg("decorator", None)
if decorator is None:
@@ -389,13 +386,6 @@ def get_arg(name, default=None):
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""
if status_fn:
# This was copied from spec.tree's colorization logic
# then shortened because it seems like status_fn should
# always return an InstallStatus
string += colorize(status_fn(s).value)
if hashes:
string += gray_hash(s, hlen) + " "
string += depth * " "
@@ -454,7 +444,7 @@ def format_list(specs):
def filter_loaded_specs(specs):
"""Filter a list of specs returning only those that are
currently loaded."""
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
return [x for x in specs if x.dag_hash() in hashes]

View File

@@ -165,7 +165,7 @@ def _reset(args):
if not ok_to_continue:
raise RuntimeError("Aborting")
for scope in spack.config.CONFIG.writable_scopes:
for scope in spack.config.CONFIG.file_scopes:
# The default scope should stay untouched
if scope.name == "defaults":
continue

View File

@@ -3,24 +3,28 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import copy
import glob
import hashlib
import json
import multiprocessing
import multiprocessing.pool
import os
import shutil
import sys
import tempfile
from typing import List, Tuple
from typing import Dict, List, Optional, Tuple, Union
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.lang import elide_list, stable_partition
from llnl.util.lang import elide_list
import spack.binary_distribution as bindist
import spack.cmd
import spack.config
import spack.deptypes as dt
import spack.environment as ev
import spack.error
import spack.hash_types as ht
import spack.mirror
import spack.oci.oci
import spack.oci.opener
@@ -31,12 +35,28 @@
import spack.store
import spack.user_environment
import spack.util.crypto
import spack.util.parallel
import spack.util.url as url_util
import spack.util.web as web_util
from spack import traverse
from spack.build_environment import determine_number_of_jobs
from spack.cmd import display_specs
from spack.cmd.common import arguments
from spack.oci.image import (
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
list_tags,
upload_blob_with_retry,
upload_manifest_with_retry,
)
from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages"
@@ -50,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
push.add_argument(
"--allow-root",
"-a",
action="store_true",
help="allow install root string in binary files after RPATH substitution",
)
push_sign = push.add_mutually_exclusive_group(required=False)
push_sign.add_argument(
"--unsigned",
@@ -92,17 +118,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
"Alternatively, one can decide to build a cache for only the package or only the "
"dependencies",
)
with_or_without_build_deps = push.add_mutually_exclusive_group()
with_or_without_build_deps.add_argument(
"--with-build-dependencies",
action="store_true",
help="include build dependencies in the buildcache",
)
with_or_without_build_deps.add_argument(
"--without-build-dependencies",
action="store_true",
help="exclude build dependencies from the buildcache",
)
push.add_argument(
"--fail-fast",
action="store_true",
@@ -175,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
keys.set_defaults(func=keys_fn)
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
arguments.add_common_arguments(preview, ["installed_specs"])
preview.set_defaults(func=preview_fn)
# Check if binaries need to be rebuilt on remote mirror
check = subparsers.add_parser("check", help=check_fn.__doc__)
check.add_argument(
@@ -320,6 +339,39 @@ def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
def _progress(i: int, total: int):
if total > 1:
digits = len(str(total))
return f"[{i+1:{digits}}/{total}] "
return ""
class NoPool:
def map(self, func, args):
return [func(a) for a in args]
def starmap(self, func, args):
return [func(*a) for a in args]
def __enter__(self):
return self
def __exit__(self, *args):
pass
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
def _make_pool() -> MaybePool:
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
That leaves only forking"""
if multiprocessing.get_start_method() == "fork":
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
else:
return NoPool()
def _skip_no_redistribute_for_public(specs):
remaining_specs = list()
removed_specs = list()
@@ -339,45 +391,6 @@ def _skip_no_redistribute_for_public(specs):
return remaining_specs
class PackagesAreNotInstalledError(spack.error.SpackError):
"""Raised when a list of specs is not installed but picked to be packaged."""
def __init__(self, specs: List[Spec]):
super().__init__(
"Cannot push non-installed packages",
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
)
class PackageNotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
def _specs_to_be_packaged(
requested: List[Spec], things_to_install: str, build_deps: bool
) -> List[Spec]:
"""Collect all non-external with or without roots and dependencies"""
if "dependencies" not in things_to_install:
deptype = dt.NONE
elif build_deps:
deptype = dt.ALL
else:
deptype = dt.RUN | dt.LINK | dt.TEST
specs = [
s
for s in traverse.traverse_nodes(
requested,
root="package" in things_to_install,
deptype=deptype,
order="breadth",
key=traverse.by_dag_hash,
)
if not s.external
]
specs.reverse()
return specs
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
@@ -391,8 +404,18 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
mirror = args.mirror
assert isinstance(mirror, spack.mirror.Mirror)
if args.allow_root:
tty.warn(
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
)
mirror: spack.mirror.Mirror = args.mirror
# Check if this is an OCI image.
try:
target_image = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
target_image = None
push_url = mirror.push_url
@@ -403,52 +426,92 @@ def push_fn(args):
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
if mirror.push_url.startswith("oci://") and not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
unsigned = True
if target_image:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
# Select a signing key, or None if unsigned.
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
specs = _specs_to_be_packaged(
# This is a list of installed, non-external specs.
specs = bindist.specs_to_be_packaged(
roots,
things_to_install=args.things_to_install,
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
if not args.private:
specs = _skip_no_redistribute_for_public(specs)
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
# Pushing not installed specs is an error. Either fail fast or populate the error list and
# push installed package in best effort mode.
failed: List[Tuple[Spec, BaseException]] = []
with spack.store.STORE.db.read_transaction():
if any(not s.installed for s in specs):
specs, not_installed = stable_partition(specs, lambda s: s.installed)
if args.fail_fast:
raise PackagesAreNotInstalledError(not_installed)
else:
failed.extend(
(s, PackageNotInstalledError("package not installed")) for s in not_installed
failed = []
# TODO: unify this logic in the future.
if target_image:
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
skipped, base_images, checksums = _push_oci(
target_image=target_image,
base_image=base_image,
installed_specs_with_deps=specs,
force=args.force,
tmpdir=tmpdir,
pool=pool,
)
# Apart from creating manifests for each individual spec, we allow users to create a
# separate image tag for all root specs and their runtime dependencies.
if args.tag:
tagged_image = target_image.with_tag(args.tag)
# _push_oci may not populate base_images if binaries were already in the registry
for spec in roots:
_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
tty.info(f"Tagged {tagged_image}")
else:
skipped = []
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
push_url,
bindist.PushOptions(
force=args.force,
unsigned=unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
with bindist.make_uploader(
mirror=mirror,
force=args.force,
update_index=args.update_index,
signing_key=signing_key,
base_image=args.base_image,
) as uploader:
skipped, upload_errors = uploader.push(specs=specs)
failed.extend(upload_errors)
if not upload_errors and args.tag:
uploader.tag(args.tag, roots)
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
if len(specs) == 1:
msg += f" to {push_url}"
tty.info(msg)
except bindist.NoOverwriteException:
skipped.append(_format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(
e, (bindist.PickKeyException, bindist.NoKeyException)
):
raise
failed.append((_format_spec(spec), e))
if skipped:
if len(specs) == 1:
@@ -460,7 +523,7 @@ def push_fn(args):
"The following {} specs were skipped as they already exist in the buildcache:\n"
" {}\n"
" Use --force to overwrite them.".format(
len(skipped), ", ".join(elide_list([_format_spec(s) for s in skipped], 5))
len(skipped), ", ".join(elide_list(skipped, 5))
)
)
@@ -471,16 +534,390 @@ def push_fn(args):
raise spack.error.SpackError(
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
"\n".join(
elide_list(
[
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
for spec, e in failed
],
5,
)
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
),
)
# Update the index if requested
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
# not once per spec.
if target_image and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(target_image, tmpdir, pool)
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
"""Get the spack tarball layer digests and size if it exists"""
try:
manifest, config = get_manifest_and_config_with_retry(image_ref)
return spack.oci.oci.Blob(
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
size=manifest["layers"][-1]["size"],
)
except Exception:
return None
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
# Create an oci.image.layer aka tarball of the package
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
blob = spack.oci.oci.Blob(
Digest.from_sha256(compressed_tarfile_checksum),
Digest.from_sha256(tarfile_checksum),
os.path.getsize(filename),
)
# Upload the blob
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
# delete the file
os.unlink(filename)
return blob
def _retrieve_env_dict_from_config(config: dict) -> dict:
"""Retrieve the environment variables from the image config file.
Sets a default value for PATH if it is not present.
Args:
config (dict): The image config file.
Returns:
dict: The environment variables.
"""
env = {"PATH": "/bin:/usr/bin"}
if "Env" in config.get("config", {}):
for entry in config["config"]["Env"]:
key, value = entry.split("=", 1)
env[key] = value
return env
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
name = spec.target.family.name
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
return name_map.get(name, name)
def _put_manifest(
base_images: Dict[str, Tuple[dict, dict]],
checksums: Dict[str, spack.oci.oci.Blob],
image_ref: ImageReference,
tmpdir: str,
extra_config: Optional[dict],
annotations: Optional[dict],
*specs: spack.spec.Spec,
):
architecture = _archspec_to_gooarch(specs[0])
dependencies = list(
reversed(
list(
s
for s in traverse.traverse_nodes(
specs, order="topo", deptype=("link", "run"), root=True
)
if not s.external
)
)
)
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
# This is because Singularity / Apptainer is very strict about not mixing them.
base_manifest_mediaType = base_manifest.get(
"mediaType", "application/vnd.oci.image.manifest.v1+json"
)
use_docker_format = (
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
)
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
# Create an oci.image.config file
config = copy.deepcopy(base_config)
# Add the diff ids of the dependencies
for s in dependencies:
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
if extra_config:
# From the OCI v1.0 spec:
# > Any extra fields in the Image JSON struct are considered implementation
# > specific and MUST be ignored by any implementations which are unable to
# > interpret them.
config.update(extra_config)
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, config_file)
)
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
manifest = {
"mediaType": base_manifest_mediaType,
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
"digest": str(config_file_checksum),
"size": os.path.getsize(config_file),
},
"layers": [
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": (
"application/vnd.docker.image.rootfs.diff.tar.gzip"
if use_docker_format
else "application/vnd.oci.image.layer.v1.tar+gzip"
),
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
for s in dependencies
),
],
}
if not use_docker_format and annotations:
manifest["annotations"] = annotations
# Finally upload the manifest
upload_manifest_with_retry(image_ref, manifest=manifest)
# delete the config file
os.unlink(config_file)
def _update_base_images(
*,
base_image: Optional[ImageReference],
target_image: ImageReference,
spec: spack.spec.Spec,
base_image_cache: Dict[str, Tuple[dict, dict]],
):
"""For a given spec and base image, copy the missing layers of the base image with matching
arch to the registry of the target image. If no base image is specified, create a dummy
manifest and config file."""
architecture = _archspec_to_gooarch(spec)
if architecture in base_image_cache:
return
if base_image is None:
base_image_cache[architecture] = (
default_manifest(),
default_config(architecture, "linux"),
)
else:
base_image_cache[architecture] = copy_missing_layers_with_retry(
base_image, target_image, architecture
)
def _push_oci(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
installed_specs_with_deps: List[Spec],
tmpdir: str,
pool: MaybePool,
force: bool = False,
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
"""Push specs to an OCI registry
Args:
image_ref: The target OCI image
base_image: Optional base image, which will be copied to the target registry.
installed_specs_with_deps: The installed specs to push, excluding externals,
including deps, ordered from roots to leaves.
force: Whether to overwrite existing layers and manifests in the buildcache.
Returns:
A tuple consisting of the list of skipped specs already in the build cache,
a dictionary mapping architectures to base image manifests and configs,
and a dictionary mapping each spec's dag hash to a blob.
"""
# Reverse the order
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
# arch -> (manifest, config)
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped = []
if not force:
tty.info("Checking for existing specs in the buildcache")
to_be_uploaded = []
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
if maybe_blob is not None:
checksums[spec.dag_hash()] = maybe_blob
skipped.append(_format_spec(spec))
else:
to_be_uploaded.append(spec)
else:
to_be_uploaded = installed_specs_with_deps
if not to_be_uploaded:
return skipped, base_images, checksums
tty.info(
f"{len(to_be_uploaded)} specs need to be pushed to "
f"{target_image.domain}/{target_image.name}"
)
# Upload blobs
new_blobs = pool.starmap(
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
)
# And update the spec to blob mapping
for spec, blob in zip(to_be_uploaded, new_blobs):
checksums[spec.dag_hash()] = blob
# Copy base images if necessary
for spec in to_be_uploaded:
_update_base_images(
base_image=base_image,
target_image=target_image,
spec=spec,
base_image_cache=base_images,
)
def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
}
return spec_dict
# Upload manifests
tty.info("Uploading manifests")
pool.starmap(
_put_manifest,
(
(
base_images,
checksums,
target_image.with_tag(default_tag(spec)),
tmpdir,
extra_config(spec),
{"org.opencontainers.image.description": spec.format()},
spec,
)
for spec in to_be_uploaded
),
)
# Print the image names of the top-level specs
for spec in to_be_uploaded:
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
return skipped, base_images, checksums
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
# Don't allow recursion here, since Spack itself always uploads
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
# Do very basic validation: if "spec" is a key in the config, it
# must be a Spec object too.
return config if "spec" in config else None
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
tags = list_tags(image_ref)
# Fetch all image config files in parallel
spec_dicts = pool.starmap(
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
)
# Populate the database
db_root_dir = os.path.join(tmpdir, "db_root")
db = bindist.BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = Spec.from_dict(spec_dict)
db.add(spec, directory_layout=None)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
empty_config_json_path = os.path.join(tmpdir, "config.json")
with open(empty_config_json_path, "wb") as f:
f.write(b"{}")
# Upload the index.json file
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
# Upload the config.json file
empty_config_digest = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
)
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
# Push a manifest file that references the index.json file as a layer
# Notice that we push this as if it is an image, which it of course is not.
# When the ORAS spec becomes official, we can use that instead of a fake image.
# For now we just use the OCI image spec, so that we don't run into issues with
# automatic garbage collection of blobs that are not referenced by any image manifest.
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
# Config is just an empty {} file for now, and irrelevant
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"digest": str(empty_config_digest),
"size": os.path.getsize(empty_config_json_path),
},
# The buildcache index is the only layer, and is not a tarball, we lie here.
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(index_shasum),
"size": os.path.getsize(index_json_path),
}
],
}
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
def install_fn(args):
"""install from a binary package"""
@@ -523,6 +960,14 @@ def keys_fn(args):
bindist.get_keys(args.install, args.trust, args.force)
def preview_fn(args):
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
tty.warn(
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
"now the default. This command will be removed in Spack 0.22"
)
def check_fn(args: argparse.Namespace):
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
@@ -760,15 +1205,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
bindist._oci_update_index(image_ref, tmpdir, executor)
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
return
# Otherwise, assume a normal mirror.
url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist._url_generate_package_index(url, tmpdir)
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
if update_keys:
keys_url = url_util.join(
@@ -776,8 +1220,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
)
try:
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
bindist.generate_key_index(keys_url, tmpdir)
bindist.generate_key_index(keys_url)
except bindist.CannotListKeys as e:
# Do not error out if listing keys went wrong. This usually means that the _gpg path
# does not exist. TODO: distinguish between this and other errors.

View File

@@ -6,7 +6,6 @@
import json
import os
import shutil
import warnings
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
@@ -74,7 +73,7 @@ def setup_parser(subparser):
"--optimize",
action="store_true",
default=False,
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
help="(experimental) optimize the gitlab yaml file for size\n\n"
"run the generated document through a series of optimization passes "
"designed to reduce the size of the generated file",
)
@@ -82,7 +81,7 @@ def setup_parser(subparser):
"--dependencies",
action="store_true",
default=False,
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
)
generate.add_argument(
"--buildcache-destination",
@@ -201,18 +200,6 @@ def ci_generate(args):
before invoking this command. the value must be the CDash authorization token needed to create
a build group and register all generated jobs under it
"""
if args.optimize:
warnings.warn(
"The --optimize option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
if args.dependencies:
warnings.warn(
"The --dependencies option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
env = spack.cmd.require_active_env(cmd_name="ci generate")
if args.copy_to:
@@ -225,6 +212,8 @@ def ci_generate(args):
output_file = args.output_file
copy_yaml_to = args.copy_to
run_optimizer = args.optimize
use_dependencies = args.dependencies
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
@@ -245,6 +234,8 @@ def ci_generate(args):
output_file,
prune_dag=prune_dag,
check_index_only=index_only,
run_optimizer=run_optimizer,
use_dependencies=use_dependencies,
artifacts_root=artifacts_root,
remote_mirror_override=buildcache_destination,
)

View File

@@ -11,6 +11,7 @@
from argparse import ArgumentParser, Namespace
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
from llnl.util.tty.colify import colify
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
prepend_header(args, f)
formatter(args, f)
if args.update_completion:
fs.set_executable(args.update)
else:
prepend_header(args, sys.stdout)
formatter(args, sys.stdout)

View File

@@ -50,7 +50,6 @@ def setup_parser(subparser):
default=lambda: spack.config.default_modify_scope("compilers"),
help="configuration scope to modify",
)
arguments.add_common_arguments(find_parser, ["jobs"])
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
@@ -79,21 +78,25 @@ def setup_parser(subparser):
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
add them to Spack's configuration.
"""
# None signals spack.compiler.find_compilers to use its default logic
paths = args.add_paths or None
new_compilers = spack.compilers.find_compilers(
path_hints=paths,
scope=args.scope,
mixed_toolchain=args.mixed_toolchain,
max_workers=args.jobs,
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
n = len(new_compilers)
s = "s" if n > 1 else ""
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
colify(reversed(compiler_strs), indent=4)
config = spack.config.CONFIG
filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
else:
tty.msg("Found no new compilers")
tty.msg("Compilers are defined in the following files:")

View File

@@ -3,9 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
from llnl.string import plural
import spack.cmd
import spack.cmd.common.arguments
import spack.environment as ev
@@ -46,9 +43,5 @@ def concretize(parser, args):
with env.write_transaction():
concretized_specs = env.concretize(force=args.force, tests=tests)
if not args.quiet:
if concretized_specs:
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
ev.display_specs([concrete for _, concrete in concretized_specs])
else:
tty.msg("No new specs to concretize.")
ev.display_specs(concretized_specs)
env.write()

View File

@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
"""
env = ev.active_environment()
if env is not None:
pristine = env.manifest.yaml_content
pristine = env.manifest.pristine_yaml_content
flattened = pristine.copy()
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
else:
@@ -264,9 +264,7 @@ def config_remove(args):
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
if isinstance(scope, spack.config.SingleFileScope):
return fs.can_access(cfg_file)
elif isinstance(scope, spack.config.DirectoryConfigScope):
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
return False
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
def _config_change_requires_scope(path, spec, scope, match_spec=None):
@@ -364,11 +362,14 @@ def config_change(args):
def config_update(args):
# Read the configuration files
spack.config.CONFIG.get_config(args.section, scope=args.scope)
updates: List[spack.config.ConfigScope] = [
x
for x in spack.config.CONFIG.format_updates[args.section]
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
]
updates: List[spack.config.ConfigScope] = list(
filter(
lambda s: not isinstance(
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
),
spack.config.CONFIG.format_updates[args.section],
)
)
cannot_overwrite, skip_system_scope = [], False
for scope in updates:
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
def config_revert(args):
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
# Search for backup files in the configuration scopes
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])

View File

@@ -2,11 +2,11 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
import sys
import urllib.parse
from typing import List
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -15,15 +15,9 @@
import spack.stage
import spack.util.web
from spack.spec import Spec
from spack.url import (
UndetectableNameError,
UndetectableVersionError,
find_versions_of_archive,
parse_name,
parse_version,
)
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
from spack.util.editor import editor
from spack.util.executable import which
from spack.util.executable import ProcessError, which
from spack.util.format import get_version_lines
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
@@ -96,20 +90,14 @@ class BundlePackageTemplate:
url_def = " # There is no URL since there is no code to download."
body_def = " # There is no need for install() since there is no code."
def __init__(self, name: str, versions, languages: List[str]):
def __init__(self, name, versions):
self.name = name
self.class_name = mod_to_class(name)
self.versions = versions
self.languages = languages
def write(self, pkg_path):
"""Writes the new package file."""
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
if all_deps and self.dependencies:
all_deps.append("")
all_deps.append(self.dependencies)
# Write out a template for the file
with open(pkg_path, "w") as pkg_file:
pkg_file.write(
@@ -119,7 +107,7 @@ def write(self, pkg_path):
base_class_name=self.base_class_name,
url_def=self.url_def,
versions=self.versions,
dependencies="\n".join(all_deps),
dependencies=self.dependencies,
body_def=self.body_def,
)
)
@@ -138,8 +126,8 @@ def install(self, spec, prefix):
url_line = ' url = "{url}"'
def __init__(self, name, url, versions, languages: List[str]):
super().__init__(name, versions, languages)
def __init__(self, name, url, versions):
super().__init__(name, versions)
self.url_def = self.url_line.format(url=url)
@@ -227,13 +215,13 @@ def luarocks_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
if not name.startswith("lua-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to lua-{0}".format(name))
name = "lua-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class MesonPackageTemplate(PackageTemplate):
@@ -334,14 +322,14 @@ class RacketPackageTemplate(PackageTemplate):
# subdirectory = None
"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
if not name.startswith("rkt-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
name = "rkt-{0}".format(name)
self.body_def = self.body_def.format(name[4:])
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
@@ -374,7 +362,7 @@ def config_settings(self, spec, prefix):
settings = {}
return settings"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
@@ -428,7 +416,7 @@ def __init__(self, name, url, versions, languages: List[str]):
+ self.url_line
)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class RPackageTemplate(PackageTemplate):
@@ -447,7 +435,7 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
@@ -467,7 +455,7 @@ def __init__(self, name, url, versions, languages: List[str]):
if bioc:
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
super().__init__(name, url, versions, languages)
super().__init__(name, url, *args, **kwargs)
class PerlmakePackageTemplate(PackageTemplate):
@@ -487,14 +475,14 @@ def configure_args(self):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
@@ -519,7 +507,7 @@ class OctavePackageTemplate(PackageTemplate):
# FIXME: Add additional dependencies if required.
# depends_on("octave-foo", type=("build", "run"))"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
@@ -527,7 +515,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to octave-{0}".format(name))
name = "octave-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class RubyPackageTemplate(PackageTemplate):
@@ -547,7 +535,7 @@ def build(self, spec, prefix):
# FIXME: If not needed delete this function
pass"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name ruby-numpy`, don't rename it
# ruby-ruby-numpy
if not name.startswith("ruby-"):
@@ -555,7 +543,7 @@ def __init__(self, name, url, versions, languages: List[str]):
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
name = "ruby-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
class MakefilePackageTemplate(PackageTemplate):
@@ -593,14 +581,14 @@ def configure_args(self, spec, prefix):
args = []
return args"""
def __init__(self, name, url, versions, languages: List[str]):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super().__init__(name, url, versions, languages)
super().__init__(name, *args, **kwargs)
templates = {
@@ -671,48 +659,8 @@ def setup_parser(subparser):
)
#: C file extensions
C_EXT = {".c"}
#: C++ file extensions
CXX_EXT = {
".C",
".c++",
".cc",
".ccm",
".cpp",
".CPP",
".cxx",
".h++",
".hh",
".hpp",
".hxx",
".inl",
".ipp",
".ixx",
".tcc",
".tpp",
}
#: Fortran file extensions
FORTRAN_EXT = {
".f77",
".F77",
".f90",
".F90",
".f95",
".F95",
".f",
".F",
".for",
".FOR",
".ftn",
".FTN",
}
class BuildSystemAndLanguageGuesser:
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
class BuildSystemGuesser:
"""An instance of BuildSystemGuesser provides a callable object to be used
during ``spack create``. By passing this object to ``spack checksum``, we
can take a peek at the fetched tarball and discern the build system it uses
"""
@@ -720,119 +668,81 @@ class BuildSystemAndLanguageGuesser:
def __init__(self):
"""Sets the default build system."""
self.build_system = "generic"
self._c = False
self._cxx = False
self._fortran = False
# List of files in the archive ordered by their depth in the directory tree.
self._file_entries: List[str] = []
def __call__(self, archive: str, url: str) -> None:
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
if url is not None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
return
if url.endswith(".gem"):
self.build_system = "ruby"
return
if url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
return
if url.endswith(".rock"):
self.build_system = "lua"
return
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
]
# Peek inside the compressed file.
if archive.endswith(".zip") or ".zip#" in archive:
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
try:
unzip = which("unzip")
assert unzip is not None
output = unzip("-lq", archive, output=str)
except Exception:
output = unzip("-lq", stage.archive_file, output=str)
except ProcessError:
output = ""
else:
try:
tar = which("tar")
assert tar is not None
output = tar("tf", archive, output=str)
except Exception:
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
output = ""
self._file_entries[:] = output.splitlines()
lines = output.splitlines()
# Files closest to the root should be considered first when determining build system.
self._file_entries.sort(key=lambda p: p.count("/"))
self._determine_build_system(url)
self._determine_language()
def _determine_build_system(self, url: str) -> None:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
if "downloads.sourceforge.net/octave/" in url:
self.build_system = "octave"
elif url.endswith(".gem"):
self.build_system = "ruby"
elif url.endswith(".whl") or ".whl#" in url:
self.build_system = "python"
elif url.endswith(".rock"):
self.build_system = "lua"
elif self._file_entries:
# A list of clues that give us an idea of the build system a package
# uses. If the regular expression matches a file contained in the
# archive, the corresponding build system is assumed.
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
(re.compile(pattern), build_system)
for pattern, build_system in (
(r"/CMakeLists\.txt$", "cmake"),
(r"/NAMESPACE$", "r"),
(r"/Cargo\.toml$", "cargo"),
(r"/go\.mod$", "go"),
(r"/configure$", "autotools"),
(r"/configure\.(in|ac)$", "autoreconf"),
(r"/Makefile\.am$", "autoreconf"),
(r"/pom\.xml$", "maven"),
(r"/SConstruct$", "scons"),
(r"/waf$", "waf"),
(r"/pyproject.toml", "python"),
(r"/setup\.(py|cfg)$", "python"),
(r"/WORKSPACE$", "bazel"),
(r"/Build\.PL$", "perlbuild"),
(r"/Makefile\.PL$", "perlmake"),
(r"/.*\.gemspec$", "ruby"),
(r"/Rakefile$", "ruby"),
(r"/setup\.rb$", "ruby"),
(r"/.*\.pro$", "qmake"),
(r"/.*\.rockspec$", "lua"),
(r"/(GNU)?[Mm]akefile$", "makefile"),
(r"/DESCRIPTION$", "octave"),
(r"/meson\.build$", "meson"),
(r"/configure\.py$", "sip"),
)
]
# Determine the build system based on the files contained in the archive.
for file in self._file_entries:
for pattern, build_system in clues:
if pattern.search(file):
self.build_system = build_system
return
def _determine_language(self):
for entry in self._file_entries:
_, ext = os.path.splitext(entry)
if not self._c and ext in C_EXT:
self._c = True
elif not self._cxx and ext in CXX_EXT:
self._cxx = True
elif not self._fortran and ext in FORTRAN_EXT:
self._fortran = True
if self._c and self._cxx and self._fortran:
return
@property
def languages(self) -> List[str]:
langs: List[str] = []
if self._c:
langs.append("c")
if self._cxx:
langs.append("cxx")
if self._fortran:
langs.append("fortran")
return langs
# Determine the build system based on the files contained
# in the archive.
for pattern, bs in clues:
if any(re.search(pattern, line) for line in lines):
self.build_system = bs
break
def get_name(name, url):
@@ -902,7 +812,7 @@ def get_url(url):
def get_versions(args, name):
"""Returns a list of versions and hashes for a package.
Also returns a BuildSystemAndLanguageGuesser object.
Also returns a BuildSystemGuesser object.
Returns default values if no URL is provided.
@@ -911,7 +821,7 @@ def get_versions(args, name):
name (str): The name of the package
Returns:
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
tuple: versions and hashes, and a BuildSystemGuesser object
"""
# Default version with hash
@@ -925,7 +835,7 @@ def get_versions(args, name):
# version("1.2.4")"""
# Default guesser
guesser = BuildSystemAndLanguageGuesser()
guesser = BuildSystemGuesser()
valid_url = True
try:
@@ -938,7 +848,7 @@ def get_versions(args, name):
if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = find_versions_of_archive(args.url)
url_dict = spack.url.find_versions_of_archive(args.url)
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
if url_dict_filtered is None:
@@ -965,7 +875,7 @@ def get_versions(args, name):
return versions, guesser
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
def get_build_system(template, url, guesser):
"""Determine the build system template.
If a template is specified, always use that. Otherwise, if a URL
@@ -973,10 +883,11 @@ def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGue
build system it uses. Otherwise, use a generic template by default.
Args:
template: ``--template`` argument given to ``spack create``
url: ``url`` argument given to ``spack create``
guesser: The first_stage_function given to ``spack checksum`` which records the build
system it detects
template (str): ``--template`` argument given to ``spack create``
url (str): ``url`` argument given to ``spack create``
args (argparse.Namespace): The arguments given to ``spack create``
guesser (BuildSystemGuesser): The first_stage_function given to
``spack checksum`` which records the build system it detects
Returns:
str: The name of the build system template to use
@@ -1023,7 +934,7 @@ def get_repository(args, name):
# Figure out where the new package should live
repo_path = args.repo
if repo_path is not None:
repo = spack.repo.from_path(repo_path)
repo = spack.repo.Repo(repo_path)
if spec.namespace and spec.namespace != repo.namespace:
tty.die(
"Can't create package with namespace {0} in repo with "
@@ -1031,7 +942,9 @@ def get_repository(args, name):
)
else:
if spec.namespace:
repo = spack.repo.PATH.get_repo(spec.namespace)
repo = spack.repo.PATH.get_repo(spec.namespace, None)
if not repo:
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
else:
repo = spack.repo.PATH.first_repo()
@@ -1050,7 +963,7 @@ def create(parser, args):
build_system = get_build_system(args.template, url, guesser)
# Create the package template object
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
constr_args = {"name": name, "versions": versions}
package_class = templates[build_system]
if package_class != BundlePackageTemplate:
constr_args["url"] = url

View File

@@ -6,7 +6,6 @@
import os
import platform
import re
import sys
from datetime import datetime
from glob import glob
@@ -63,10 +62,9 @@ def create_db_tarball(args):
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
@@ -92,6 +90,7 @@ def report(args):
print("* **Spack:**", get_version())
print("* **Python:**", platform.python_version())
print("* **Platform:**", architecture)
print("* **Concretizer:**", spack.config.get("config:concretizer"))
def debug(parser, args):

View File

@@ -47,6 +47,16 @@ def inverted_dependencies():
dependents of, e.g., `mpi`, but virtuals are not included as
actual dependents.
"""
dag = {}
for pkg_cls in spack.repo.PATH.all_package_classes():
dag.setdefault(pkg_cls.name, set())
for dep in pkg_cls.dependencies_by_name():
deps = [dep]
# expand virtuals if necessary
if spack.repo.PATH.is_virtual(dep):
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
dag = collections.defaultdict(set)
for pkg_cls in spack.repo.PATH.all_package_classes():
for _, deps_by_name in pkg_cls.dependencies.items():

View File

@@ -9,11 +9,7 @@
import spack.cmd
import spack.config
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.stage
import spack.util.path
import spack.version
from spack.cmd.common import arguments
@@ -64,7 +60,7 @@ def change_fn(section):
spack.config.change_or_add("develop", find_fn, change_fn)
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
def _retrieve_develop_source(spec, abspath):
# "steal" the source code via staging API. We ask for a stage
# to be created, then copy it afterwards somewhere else. It would be
# better if we can create the `source_path` directly into its final
@@ -73,15 +69,13 @@ def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
source_stage: spack.stage.Stage = package.stage[0]
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
source_stage.fetcher.get_full_repo = True
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
package.stage[0].fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
source_stage.default_fetcher_only = True
package.stage[0].disable_mirrors()
source_stage.fetcher.set_package(package)
package.stage.steal_source(abspath)

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import glob
import os
@@ -12,13 +11,43 @@
import spack.cmd
import spack.paths
import spack.repo
import spack.util.editor
from spack.spec import Spec
from spack.util.editor import editor
description = "open package files in $EDITOR"
section = "packaging"
level = "short"
def edit_package(name, repo_path, namespace):
"""Opens the requested package file in your favorite $EDITOR.
Args:
name (str): The name of the package
repo_path (str): The path to the repository containing this package
namespace (str): A valid namespace registered with Spack
"""
# Find the location of the package
if repo_path:
repo = spack.repo.Repo(repo_path)
elif namespace:
repo = spack.repo.PATH.get_repo(namespace)
else:
repo = spack.repo.PATH
path = repo.filename_for_package_name(name)
spec = Spec(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something is wrong. '{0}' is not a file!".format(path))
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
raise spack.repo.UnknownPackageError(spec.name)
editor(path)
def setup_parser(subparser):
excl_args = subparser.add_mutually_exclusive_group()
@@ -69,67 +98,41 @@ def setup_parser(subparser):
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
subparser.add_argument("package", nargs="*", default=None, help="package name")
def locate_package(name: str, repo: spack.repo.Repo) -> str:
path = repo.filename_for_package_name(name)
try:
with open(path, "r"):
return path
except OSError as e:
if e.errno == errno.ENOENT:
raise spack.repo.UnknownPackageError(name) from e
tty.die(f"Cannot edit package: {e}")
def locate_file(name: str, path: str) -> str:
# convert command names to python module name
if path == spack.paths.command_path:
name = spack.cmd.python_name(name)
file_path = os.path.join(path, name)
# Try to open direct match.
try:
with open(file_path, "r"):
return file_path
except OSError as e:
if e.errno != errno.ENOENT:
tty.die(f"Cannot edit file: {e}")
pass
# Otherwise try to find a file that starts with the name
candidates = glob.glob(file_path + "*")
exclude_list = [".pyc", "~"] # exclude binaries and backups
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
if len(files) > 1:
tty.die(
f"Multiple files start with `{name}`:\n"
+ "\n".join(f" {os.path.basename(f)}" for f in files)
)
elif not files:
tty.die(f"No file for '{name}' was found in {path}")
return files[0]
subparser.add_argument("package", nargs="?", default=None, help="package name")
def edit(parser, args):
names = args.package
name = args.package
# By default, edit package files
path = spack.paths.packages_path
# If `--command`, `--test`, or `--module` is chosen, edit those instead
if args.path:
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
spack.util.editor.editor(*paths)
elif names:
if args.repo:
repo = spack.repo.from_path(args.repo)
elif args.namespace:
repo = spack.repo.PATH.get_repo(args.namespace)
else:
repo = spack.repo.PATH
paths = [locate_package(name, repo) for name in names]
spack.util.editor.editor(*paths)
path = args.path
if name:
# convert command names to python module name
if path == spack.paths.command_path:
name = spack.cmd.python_name(name)
path = os.path.join(path, name)
if not os.path.exists(path):
files = glob.glob(path + "*")
exclude_list = [".pyc", "~"] # exclude binaries and backups
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
if len(files) > 1:
m = "Multiple files exist with the name {0}.".format(name)
m += " Please specify a suffix. Files are:\n\n"
for f in files:
m += " " + os.path.basename(f) + "\n"
tty.die(m)
if not files:
tty.die("No file for '{0}' was found in {1}".format(name, path))
path = files[0] # already confirmed only one entry in files
editor(path)
elif name:
edit_package(name, args.repo, args.namespace)
else:
# By default open the directory where packages live
spack.util.editor.editor(spack.paths.packages_path)
editor(path)

View File

@@ -468,30 +468,32 @@ def env_remove(args):
This removes an environment managed by Spack. Directory environments
and manifests embedded in repositories should be removed manually.
"""
remove_envs = []
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
try:
env = ev.read(env_name)
valid_envs.append(env)
valid_envs.append(env_name)
if env_name in args.rm_env:
remove_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
if name == env_name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
@@ -504,7 +506,7 @@ def env_remove(args):
if not answer:
tty.die("Will not remove any environments")
for env in remove_envs:
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")

View File

@@ -7,7 +7,7 @@
import os
import re
import sys
from typing import List, Optional, Set
from typing import List, Optional
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -19,7 +19,6 @@
import spack.detection
import spack.error
import spack.repo
import spack.spec
import spack.util.environment
from spack.cmd.common import arguments
@@ -139,26 +138,14 @@ def external_find(args):
candidate_packages, path_hints=args.path, max_workers=args.jobs
)
new_specs = spack.detection.update_configuration(
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
)
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
# to avoid that the concretizer picks a different mpi provider.
if new_specs and args.not_buildable:
virtuals: Set[str] = {
virtual.name
for new_spec in new_specs
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
for virtual in virtual_specs
}
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
if new_specs:
if new_entries:
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
tty.msg(f"The following specs have been detected on this system and added to {path}")
spack.cmd.display_specs(new_specs)
msg = "The following specs have been detected on this system and added to {0}"
tty.msg(msg.format(path))
spack.cmd.display_specs(new_entries)
else:
tty.msg("No new external packages detected")

View File

@@ -46,10 +46,6 @@ def setup_parser(subparser):
help="output specs as machine-readable json records",
)
subparser.add_argument(
"-I", "--install-status", action="store_true", help="show install status of packages"
)
subparser.add_argument(
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
)
@@ -297,24 +293,25 @@ def root_decorator(spec, string):
)
print()
if args.show_concretized:
tty.msg("Concretized roots")
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
print()
# Display a header for the installed packages section IF there are installed
# packages. If there aren't any, we'll just end up printing "0 installed packages"
# later.
if results and not args.only_roots:
tty.msg("Installed packages")
def find(parser, args):
env = ev.active_environment()
q_args = query_arguments(args)
results = args.specs(**q_args)
env = ev.active_environment()
if not env and args.only_roots:
tty.die("-r / --only-roots requires an active environment")
if not env and args.show_concretized:
tty.die("-c / --show-concretized requires an active environment")
if env:
if args.constraint:
init_specs = spack.cmd.parse_specs(args.constraint)
results = env.all_matching_specs(*init_specs)
else:
results = env.all_specs()
else:
q_args = query_arguments(args)
results = args.specs(**q_args)
decorator = make_env_decorator(env) if env else lambda s, f: f
@@ -335,11 +332,6 @@ def find(parser, args):
if args.loaded:
results = spack.cmd.filter_loaded_specs(results)
if args.install_status or args.show_concretized:
status_fn = spack.spec.Spec.install_status
else:
status_fn = None
# Display the result
if args.json:
cmd.display_specs_as_json(results, deps=args.deps)
@@ -348,34 +340,12 @@ def find(parser, args):
if env:
display_env(env, args, decorator, results)
count_suffix = " (not shown)"
if not args.only_roots:
display_results = results
if not args.show_concretized:
display_results = list(x for x in results if x.installed)
cmd.display_specs(
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
)
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
count_suffix = ""
# print number of installed packages last (as the list may be long)
if sys.stdout.isatty() and args.groups:
installed_suffix = ""
concretized_suffix = " to be installed"
if args.only_roots:
installed_suffix += " (not shown)"
concretized_suffix += " (not shown)"
else:
if env and not args.show_concretized:
concretized_suffix += " (show with `spack find -c`)"
pkg_type = "loaded" if args.loaded else "installed"
spack.cmd.print_how_many_pkgs(
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
)
if env:
spack.cmd.print_how_many_pkgs(
list(x for x in results if not x.installed),
"concretized",
suffix=concretized_suffix,
)
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)

View File

@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
# -e says "also preserve things needed by this particular env"
for env_name_or_dir in args.except_environment:
print("HMM", env_name_or_dir)
if ev.exists(env_name_or_dir):
env = ev.read(env_name_or_dir)
elif ev.is_env_dir(env_name_or_dir):

View File

@@ -5,12 +5,10 @@
import argparse
import os
import tempfile
import spack.binary_distribution
import spack.mirror
import spack.paths
import spack.stage
import spack.util.gpg
import spack.util.url
from spack.cmd.common import arguments
@@ -117,7 +115,6 @@ def setup_parser(subparser):
help="URL of the mirror where keys will be published",
)
publish.add_argument(
"--update-index",
"--rebuild-index",
action="store_true",
default=False,
@@ -223,10 +220,9 @@ def gpg_publish(args):
elif args.mirror_url:
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
spack.binary_distribution._url_push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
)
spack.binary_distribution.push_keys(
mirror, keys=args.keys, regenerate_index=args.rebuild_index
)
def gpg(parser, args):

View File

@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
def info(parser, args):
spec = spack.spec.Spec(args.package)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
pkg = pkg_cls(spec)
# Output core package information

View File

@@ -10,7 +10,6 @@
from typing import List
import llnl.util.filesystem as fs
from llnl.string import plural
from llnl.util import lang, tty
import spack.build_environment
@@ -376,9 +375,7 @@ def _maybe_add_and_concretize(args, env, specs):
# `spack concretize`
tests = compute_tests_install_kwargs(env.user_specs, args.test)
concretized_specs = env.concretize(tests=tests)
if concretized_specs:
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
ev.display_specs([concrete for _, concrete in concretized_specs])
ev.display_specs(concretized_specs)
# save view regeneration for later, so that we only do it
# once, as it can be slow.

View File

@@ -169,9 +169,7 @@ def pkg_hash(args):
def get_grep(required=False):
"""Get a grep command to use with ``spack pkg grep``."""
grep = exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
grep.ignore_quotes = True # allow `spack pkg grep '"quoted string"'` without warning
return grep
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
def pkg_grep(args, unknown_args):

View File

@@ -91,7 +91,7 @@ def repo_add(args):
tty.die("Not a Spack repository: %s" % path)
# Make sure it's actually a spack repository by constructing it.
repo = spack.repo.from_path(canon_path)
repo = spack.repo.Repo(canon_path)
# If that succeeds, finally add it to the configuration.
repos = spack.config.get("repos", scope=args.scope)
@@ -124,7 +124,7 @@ def repo_remove(args):
# If it is a namespace, remove corresponding repo
for path in repos:
try:
repo = spack.repo.from_path(path)
repo = spack.repo.Repo(path)
if repo.namespace == namespace_or_path:
repos.remove(path)
spack.config.set("repos", repos, args.scope)
@@ -142,7 +142,7 @@ def repo_list(args):
repos = []
for r in roots:
try:
repos.append(spack.repo.from_path(r))
repos.append(spack.repo.Repo(r))
except spack.repo.RepoError:
continue

View File

@@ -114,16 +114,15 @@ def _process_result(result, show, required_format, kwargs):
# dump the solutions as concretized specs
if "solutions" in show:
if required_format:
for spec in result.specs:
# With -y, just print YAML to output.
if required_format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
elif required_format == "json":
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
for spec in result.specs:
# With -y, just print YAML to output.
if required_format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
elif required_format == "json":
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
print()
if result.unsolved_specs and "solutions" in show:

View File

@@ -105,19 +105,11 @@ def spec(parser, args):
if env:
env.concretize()
specs = env.concretized_specs()
# environments are printed together in a combined tree() invocation,
# except when using --yaml or --json, which we print spec by spec below.
if not args.format:
tree_kwargs["key"] = spack.traverse.by_dag_hash
tree_kwargs["hashes"] = args.long or args.very_long
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
return
else:
tty.die("spack spec requires at least one spec or an active environment")
for input, output in specs:
# With --yaml or --json, just print the raw specs to output
# With -y, just print YAML to output.
if args.format:
if args.format == "yaml":
# use write because to_yaml already has a newline.

View File

@@ -71,7 +71,7 @@ def unload(parser, args):
"Cannot specify specs on command line when unloading all specs with '--all'"
)
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
if args.specs:
specs = [
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)

View File

@@ -339,7 +339,7 @@ def add(self, pkg_name, fetcher):
for pkg_cls in spack.repo.PATH.all_package_classes():
npkgs += 1
for v in list(pkg_cls.versions):
for v in pkg_cls.versions:
try:
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
fetcher = fs.for_package_version(pkg, v)

View File

@@ -23,6 +23,11 @@ def setup_parser(subparser):
output.add_argument(
"-s", "--safe", action="store_true", help="only list safe versions of the package"
)
output.add_argument(
"--safe-only",
action="store_true",
help="[deprecated] only list safe versions of the package",
)
output.add_argument(
"-r", "--remote", action="store_true", help="only list remote versions of the package"
)
@@ -42,13 +47,17 @@ def versions(parser, args):
safe_versions = pkg.versions
if args.safe_only:
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
args.safe = args.safe_only
if not (args.remote or args.new):
if sys.stdout.isatty():
tty.msg("Safe versions (already checksummed):")
if not safe_versions:
if sys.stdout.isatty():
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug("Manually add versions to the package.")
else:
colify(sorted(safe_versions, reverse=True), indent=2)
@@ -74,12 +83,12 @@ def versions(parser, args):
if not remote_versions:
if sys.stdout.isatty():
if not fetched_versions:
tty.warn(f"Found no versions for {pkg.name}")
tty.warn("Found no versions for {0}".format(pkg.name))
tty.debug(
"Check the list_url and list_depth attributes of "
"the package to help Spack find versions."
)
else:
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
else:
colify(sorted(remote_versions, reverse=True), indent=2)

View File

@@ -38,10 +38,10 @@
import spack.cmd
import spack.environment as ev
import spack.filesystem_view as fsv
import spack.schema.projections
import spack.store
from spack.config import validate
from spack.filesystem_view import YamlFilesystemView, view_func_parser
from spack.util import spack_yaml as s_yaml
description = "project packages to a compact naming scheme on the filesystem"
@@ -193,13 +193,17 @@ def view(parser, args):
ordered_projections = {}
# What method are we using for this view
link_type = args.action if args.action in actions_link else "symlink"
view = fsv.YamlFilesystemView(
if args.action in actions_link:
link_fn = view_func_parser(args.action)
else:
link_fn = view_func_parser("symlink")
view = YamlFilesystemView(
path,
spack.store.STORE.layout,
projections=ordered_projections,
ignore_conflicts=getattr(args, "ignore_conflicts", False),
link_type=link_type,
link=link_fn,
verbose=args.verbose,
)

View File

@@ -18,6 +18,7 @@
import llnl.util.tty as tty
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
import spack.compilers
import spack.error
import spack.schema.environment
import spack.spec
@@ -29,9 +30,6 @@
__all__ = ["Compiler"]
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
@@ -281,6 +279,11 @@ def debug_flags(self):
def opt_flags(self):
return ["-O", "-O0", "-O1", "-O2", "-O3"]
# Cray PrgEnv name that can be used to load this compiler
PrgEnv: Optional[str] = None
# Name of module used to switch versions of this compiler
PrgEnv_compiler: Optional[str] = None
def __init__(
self,
cspec,
@@ -703,30 +706,6 @@ def compiler_environment(self):
os.environ.clear()
os.environ.update(backup_env)
def to_dict(self):
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
flags_dict.update(
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
)
result = {
"spec": str(self.spec),
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
"flags": flags_dict,
"operating_system": str(self.operating_system),
"target": str(self.target),
"modules": self.modules or [],
"environment": self.environment or {},
"extra_rpaths": self.extra_rpaths or [],
}
if self.enable_implicit_rpaths is not None:
result["implicit_rpaths"] = self.enable_implicit_rpaths
if self.alias:
result["alias"] = self.alias
return result
class CompilerAccessError(spack.error.SpackError):
def __init__(self, compiler, paths):

View File

@@ -6,11 +6,12 @@
"""This module contains functions related to finding compilers on the
system and configuring Spack to use multiple compilers.
"""
import importlib
import collections
import itertools
import multiprocessing.pool
import os
import sys
import warnings
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -21,15 +22,16 @@
import spack.compiler
import spack.config
import spack.error
import spack.operating_systems
import spack.paths
import spack.platforms
import spack.repo
import spack.spec
import spack.version
from spack.operating_systems import windows_os
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
_path_instance_vars = ["cc", "cxx", "f77", "fc"]
_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
_other_instance_vars = [
"modules",
"operating_system",
@@ -61,10 +63,6 @@
}
#: Tag used to identify packages providing a compiler
COMPILER_TAG = "compiler"
def pkg_spec_for_compiler(cspec):
"""Return the spec of the package that provides the compiler."""
for spec, package in _compiler_to_pkg.items():
@@ -87,7 +85,29 @@ def converter(cspec_like, *args, **kwargs):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
return {"compiler": compiler.to_dict()}
d = {}
d["spec"] = str(compiler.spec)
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
d["flags"].update(
dict(
(attr, getattr(compiler, attr, None))
for attr in _flags_instance_vars
if hasattr(compiler, attr)
)
)
d["operating_system"] = str(compiler.operating_system)
d["target"] = str(compiler.target)
d["modules"] = compiler.modules or []
d["environment"] = compiler.environment or {}
d["extra_rpaths"] = compiler.extra_rpaths or []
if compiler.enable_implicit_rpaths is not None:
d["implicit_rpaths"] = compiler.enable_implicit_rpaths
if compiler.alias:
d["alias"] = compiler.alias
return {"compiler": d}
def get_compiler_config(
@@ -107,7 +127,7 @@ def get_compiler_config(
# Do not init config because there is a non-empty scope
return config
find_compilers(scope=scope)
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
return config
@@ -116,14 +136,131 @@ def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
"""Return the compiler configuration from packages.yaml"""
packages_yaml = configuration.get("packages", scope=scope)
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
config = configuration.get("packages", scope=scope)
if not config:
return []
packages = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in config.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
packages.extend(_compiler_config_from_package_config(externals_config))
return packages
def _compiler_config_from_package_config(config):
compilers = []
for entry in config:
compiler = _compiler_config_from_external(entry)
if compiler:
compilers.append(compiler)
return compilers
def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"])
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
err_header = f"The external spec '{spec}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if extra_attributes_key not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": config.get("modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
config_files = list()
config = spack.config.CONFIG
for scope in config.writable_scopes:
for scope in config.file_scopes:
name = scope.name
compiler_config = config.get("compilers", scope=name)
if compiler_config:
@@ -141,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -190,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -239,77 +380,79 @@ def all_compiler_specs(scope=None, init_config=True):
def find_compilers(
path_hints: Optional[List[str]] = None,
*,
scope: Optional[str] = None,
mixed_toolchain: bool = False,
max_workers: Optional[int] = None,
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
) -> List["spack.compiler.Compiler"]:
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
configuration is updated, and the list of new compiler objects is returned.
"""Return the list of compilers found in the paths given as arguments.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: configuration scope to modify
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
max_workers: number of processes used to search for compilers
"""
import spack.detection
known_compilers = set(all_compilers(init_config=False))
if path_hints is None:
path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
if sys.platform == "win32":
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
detected_packages = spack.detection.by_path(
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
# To detect the version of the compilers, we dispatch a certain number
# of function calls to different workers. Here we construct the list
# of arguments for each call.
arguments = []
for o in all_os_classes():
search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
tp = multiprocessing.pool.ThreadPool()
try:
detected_versions = tp.map(detect_version, arguments)
finally:
tp.close()
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
value, error = item
if error is None:
return True
try:
# This will fail on Python 2.6 if a non ascii
# character is in the error
tty.debug(error)
except UnicodeEncodeError:
pass
return False
def remove_errors(
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
) -> DetectVersionArgs:
value, _ = item
assert value is not None
return value
return make_compiler_list(
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
mixed_toolchain=mixed_toolchain,
)
valid_compilers = {}
for name, detected in detected_packages.items():
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x.spec)]
if not compilers:
continue
valid_compilers[name] = compilers
def _has_fortran_compilers(x):
if "compilers" not in x.spec.extra_attributes:
return False
def find_new_compilers(
path_hints: Optional[List[str]] = None,
scope: Optional[str] = None,
*,
mixed_toolchain: bool = False,
):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
return "fortran" in x.spec.extra_attributes["compilers"]
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: scope to look for a compiler. If None consider the merged configuration.
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
"""
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
if mixed_toolchain:
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
if gccs:
best_gcc = sorted(
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x.spec).version
)[-1]
gfortran = best_gcc.spec.extra_attributes["compilers"]["fortran"]
for name in ("llvm", "apple-clang"):
if name not in valid_compilers:
continue
candidates = valid_compilers[name]
for candidate in candidates:
if _has_fortran_compilers(candidate):
continue
candidate.spec.extra_attributes["compilers"]["fortran"] = gfortran
new_compilers = []
for name, detected in valid_compilers.items():
for config in CompilerConfigFactory.from_specs([x.spec for x in detected]):
c = _compiler_from_config_entry(config["compiler"])
if c in known_compilers:
continue
new_compilers.append(c)
add_compilers_to_config(new_compilers, scope=scope)
return new_compilers
return select_new_compilers(compilers, scope)
def select_new_compilers(compilers, scope=None):
@@ -319,9 +462,7 @@ def select_new_compilers(compilers, scope=None):
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
same_specs = compilers_for_spec(
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
)
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
if not same_specs:
compilers_not_in_config.append(c)
@@ -347,7 +488,7 @@ def supported_compilers_for_host_platform() -> List[str]:
return supported_compilers_for_platform(host_plat)
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
"""Return a set of compiler class objects supported by Spack
that are also supported by the provided platform
@@ -369,9 +510,8 @@ def replace_apple_clang(name):
return [replace_apple_clang(name) for name in all_compiler_module_names()]
@llnl.util.lang.memoized
def all_compiler_module_names() -> List[str]:
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
@_auto_compiler_spec
@@ -391,12 +531,7 @@ def find(compiler_spec, scope=None, init_config=True):
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
compiler spec. Return an empty list if nothing found."""
return [
c.spec
for c in compilers_for_spec(
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
)
]
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
def all_compilers(scope=None, init_config=True):
@@ -418,11 +553,14 @@ def all_compilers_from(configuration, scope=None, init_config=True):
@_auto_compiler_spec
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
def compilers_for_spec(
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
for cspec in matches:
@@ -431,7 +569,7 @@ def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
config = all_compilers_config(spack.config.CONFIG, scope=scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -463,15 +601,13 @@ def compiler_from_dict(items):
os = items.get("operating_system", None)
target = items.get("target", None)
if not (
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
):
if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in spack.compiler.PATH_INSTANCE_VARS:
for c in _path_instance_vars:
compiler_path = items["paths"][c]
if compiler_path != "None":
compiler_paths.append(compiler_path)
@@ -599,6 +735,24 @@ def compiler_for_spec(compiler_spec, arch_spec):
return compilers[0]
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
config = spack.config.CONFIG
scope_to_compilers = {}
for scope in config.scopes:
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
if compilers:
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = {}
for scope, compilers in scope_to_compilers.items():
config_file = config.get_config_filename(scope, "compilers")
cfg_file_to_duplicates[config_file] = compilers
return cfg_file_to_duplicates
@llnl.util.lang.memoized
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
@@ -612,7 +766,7 @@ def class_for_compiler_name(compiler_name):
submodule_name = compiler_name.replace("-", "_")
module_name = ".".join(["spack", "compilers", submodule_name])
module_obj = importlib.import_module(module_name)
module_obj = __import__(module_name, fromlist=[None])
cls = getattr(module_obj, mod_to_class(compiler_name))
# make a note of the name in the module so we can get to it easily.
@@ -621,10 +775,272 @@ def class_for_compiler_name(compiler_name):
return cls
def all_os_classes():
"""
Return the list of classes for all operating systems available on
this platform
"""
classes = []
platform = spack.platforms.host()
for os_class in platform.operating_sys.values():
classes.append(os_class)
return classes
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
#: Gathers the attribute values by which a detected compiler is considered
#: unique in Spack.
#:
#: - os: the operating system
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
#: - version: the version of the compiler
#:
CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
#: Variations on a matched compiler name
NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
#: Groups together the arguments needed by `detect_version`. The four entries
#: in the tuple are:
#:
#: - id: An instance of the CompilerID named tuple (version can be set to None
#: as it will be detected later)
#: - variation: a NameVariation for file being tested
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
#: - path: full path to the executable being tested
#:
DetectVersionArgs = collections.namedtuple(
"DetectVersionArgs", ["id", "variation", "language", "path"]
)
def arguments_to_detect_version_fn(
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
) -> List[DetectVersionArgs]:
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
The ``operating_system`` instance can customize the behavior of this
function by providing a method called with the same name.
Args:
operating_system: the operating system on which we are looking for compilers
paths: paths to search for compilers
Returns:
List of DetectVersionArgs tuples. Each item in the list will be later
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
command_arguments: List[DetectVersionArgs] = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
compiler_id = CompilerID(operating_system, compiler_name, None)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
language=language,
path=full_path,
)
command_arguments.append(detect_version_args)
return command_arguments
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
return fn(paths)
def detect_version(
detect_version_args: DetectVersionArgs,
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
"""Computes the version of a compiler and adds it to the information
passed as input.
As this function is meant to be executed by worker processes it won't
raise any exception but instead will return a (value, error) tuple that
needs to be checked by the code dispatching the calls.
Args:
detect_version_args: information on the compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
version of the compiler was computed correctly and the first argument
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, f"{language}_version")
try:
version = callback(path)
if version and str(version).strip() and version != "unknown":
value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = f"Couldn't get version for compiler {path}".format(path)
except spack.util.executable.ProcessError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except spack.util.executable.ProcessTimeoutError as e:
error = f"Couldn't get version for compiler {path}\n" + str(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
path, e.__class__.__name__, str(e)
)
return None, error
operating_system = detect_version_args.id.os
fn = getattr(operating_system, "detect_version", _default)
return fn(detect_version_args)
def make_compiler_list(
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
) -> List["spack.compiler.Compiler"]:
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions: list of DetectVersionArgs containing a valid version
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
Returns:
list: list of Compiler objects
"""
group_fn = lambda x: (x.id, x.variation, x.language)
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
by_name_variation = by_compiler_id.setdefault(name_variation, {})
by_name_variation[language] = next(x.path for x in group)
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
# https://github.com/spack/spack/pull/33385/files#r1040036318
target = archspec.cpu.host()
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
return [compiler]
# For compilers with the same compiler id:
#
# - Prefer with C compiler to without
# - Prefer with C++ compiler to without
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
"cc" not in by_compiler_id[variation], # None last
"cxx" not in by_compiler_id[variation], # None last
getattr(variation, "prefix", None),
getattr(variation, "suffix", None),
)
# Flatten to a list of compiler id, primary variation and compiler dictionary
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# Fill any missing parts from subsequent entries (without mixing toolchains)
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
)
if next_lang:
selected[lang] = next_lang
flat_compilers.append((compiler_id, selected_variation, selected))
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
if mixed_toolchain:
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
return compilers
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
"""Add missing compilers across toolchains when they are missing for a particular language.
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
fortran compiler (no flang)."""
# First collect the clangs that are missing a fortran compiler
clangs_without_flang = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name in ("clang", "apple-clang")
and "f77" not in compiler
and "fc" not in compiler
]
if not clangs_without_flang:
return
# Filter on GCCs with fortran compiler
gccs_with_fortran = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
]
# Sort these GCCs by "best variation" (no prefix / suffix first)
gccs_with_fortran.sort(
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
)
# Attach the optimal GCC fortran compiler to the clangs that don't have one
for clang_id, _, clang_compiler in clangs_without_flang:
gcc_compiler = next(
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
)
if not gcc_compiler:
continue
# Update the fc / f77 entries
clang_compiler["f77"] = gcc_compiler["f77"]
clang_compiler["fc"] = gcc_compiler["fc"]
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.
@@ -671,164 +1087,20 @@ def name_matches(name, name_list):
return False
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
_COMPILERS_KEY = "compilers"
_C_KEY = "c"
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
class CompilerConfigFactory:
"""Class aggregating all ways of constructing a list of compiler config entries."""
@staticmethod
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
result = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for s in specs:
if s.name not in compiler_package_names:
continue
candidate = CompilerConfigFactory.from_external_spec(s)
if candidate is None:
continue
result.append(candidate)
return result
@staticmethod
def from_packages_yaml(packages_yaml) -> List[dict]:
compiler_specs = []
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
for name, entry in packages_yaml.items():
if name not in compiler_package_names:
continue
externals_config = entry.get("externals", None)
if not externals_config:
continue
current_specs = []
for current_external in externals_config:
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
if compiler:
current_specs.append(compiler)
compiler_specs.extend(current_specs)
return CompilerConfigFactory.from_specs(compiler_specs)
@staticmethod
def _spec_from_external_config(config):
# Allow `@x.y.z` instead of `@=x.y.z`
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if _EXTRA_ATTRIBUTES_KEY not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
return None
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
result = spack.spec.Spec(
str(spack.spec.parse_with_version_concrete(config["spec"])),
external_modules=config.get("modules"),
)
result.extra_attributes = extra_attributes
return result
@staticmethod
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
spec = spack.spec.parse_with_version_concrete(spec)
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
if extra_attributes is None:
return None
paths = CompilerConfigFactory._extract_compiler_paths(spec)
if paths is None:
return None
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
compiler_entry = {
"compiler": {
"spec": str(compiler_spec),
"paths": paths,
"flags": extra_attributes.get("flags", {}),
"operating_system": str(operating_system),
"target": str(target.family),
"modules": getattr(spec, "external_modules", []),
"environment": extra_attributes.get("environment", {}),
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
}
}
return compiler_entry
@staticmethod
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
err_header = f"The external spec '{spec}' cannot be used as a compiler"
extra_attributes = spec.extra_attributes
# If I have 'extra_attributes' warn if 'compilers' is missing,
# or we don't have a C compiler
if _COMPILERS_KEY not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
)
return None
attribute_compilers = extra_attributes[_COMPILERS_KEY]
if _C_KEY not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
)
return None
c_compiler = attribute_compilers[_C_KEY]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if _CXX_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if _FORTRAN_KEY not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
return {
"cc": c_compiler,
"cxx": attribute_compilers.get(_CXX_KEY, None),
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
}
@staticmethod
def _extract_os_and_target(spec: "spack.spec.Spec"):
if not spec.architecture:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture
else:
target = spec.architecture.target
if not target:
target = spack.platforms.host().target("default_target")
target = target.microarchitecture
operating_system = spec.os
if not operating_system:
host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os")
return operating_system, target
class InvalidCompilerConfigurationError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__(
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
f"Compiler configuration must contain entries for "
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
% _path_instance_vars,
)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super().__init__("Spack could not find any compilers!")
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
@@ -839,3 +1111,25 @@ def __init__(self, compiler_spec, target):
super().__init__(
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
)
class CompilerDuplicateError(spack.error.SpackError):
def __init__(self, compiler_spec, arch_spec):
config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
descriptor = lambda num: "time" if num == 1 else "times"
duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
cfgfile, str(count), descriptor(count)
)
msg = (
"Compiler configuration contains entries with duplicate"
+ " specification ({0}, {1})".format(compiler_spec, arch_spec)
+ " in the following files:\n\t"
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
)
super().__init__(msg)
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)

Some files were not shown because too many files have changed in this diff Show More