Compare commits
1 Commits
packages/r
...
fix/fewer-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c563e3cf0 |
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@@ -5,10 +5,13 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks and build documentation
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directories:
|
||||
- "/.github/workflows/requirements/style/*"
|
||||
- "/lib/spack/docs"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
10
.github/workflows/audit.yaml
vendored
10
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -44,7 +44,6 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
@@ -53,7 +52,6 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
@@ -61,11 +59,9 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
||||
29
.github/workflows/bootstrap.yml
vendored
29
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -53,33 +53,27 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -96,7 +90,7 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -125,10 +119,10 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -154,7 +148,7 @@ jobs:
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
@@ -168,3 +162,4 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -40,7 +40,8 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -55,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
@@ -87,19 +88,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,13 +108,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +127,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
||||
16
.github/workflows/ci.yaml
vendored
16
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -53,13 +53,6 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
@@ -84,8 +77,13 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
||||
8
.github/workflows/install_spack.sh
vendored
Executable file
8
.github/workflows/install_spack.sh
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
spack compiler find
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
spack solve zlib
|
||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
black==24.8.0
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
81
.github/workflows/unit_tests.yaml
vendored
81
.github/workflows/unit_tests.yaml
vendored
@@ -16,34 +16,45 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -61,7 +72,7 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
@@ -74,12 +85,13 @@ jobs:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -88,10 +100,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -106,13 +118,13 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -129,13 +141,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
@@ -148,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -166,13 +178,14 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -185,10 +198,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -200,48 +213,18 @@ jobs:
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,22 +35,22 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
@@ -70,13 +70,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
|
||||
83
.github/workflows/windows_python.yml
vendored
Normal file
83
.github/workflows/windows_python.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
||||
321
CHANGELOG.md
321
CHANGELOG.md
@@ -1,324 +1,3 @@
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Compiler dependencies**
|
||||
|
||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||
other externals. We are trying to do this with the least disruption possible, so
|
||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||
this transition by the `v0.23` release in November.
|
||||
|
||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||
using a build cache. When building minimal Spack-generated container images it is
|
||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||
manager.
|
||||
|
||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||
packages. This allows us to model library soname compatibility and allows
|
||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||
provided by standalone libraries). Note that until we have an agreement in place
|
||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||
and must be downloaded outside of Spack.
|
||||
|
||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||
not try to strictly match compilers or targets of reused specs. Users can still
|
||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||
|
||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||
cache, we do not require you to also have a local configured compiler to *use* the
|
||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||
updates that happen underneath Spack.
|
||||
|
||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||
with a `libc` with the same name and a version less than or equal to that of the
|
||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||
|
||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||
external find`. External packages defining compiler paths are effectively used as
|
||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||
`spack compiler find`. More details on this transition are in
|
||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||
|
||||
2. **Improved `spack find` UI for Environments**
|
||||
|
||||
If you're working in an enviroment, you likely care about:
|
||||
|
||||
* What are the roots
|
||||
* Which ones are installed / not installed
|
||||
* What's been added that still needs to be concretized
|
||||
|
||||
We've tweaked `spack find` in environments to show this information much more
|
||||
clearly. Installation status is shown next to each root, so you can see what is
|
||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||
roots, if you don't want to look at all the installed specs.
|
||||
|
||||
More details in #42334.
|
||||
|
||||
3. **Improved command-line string quoting**
|
||||
|
||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||
something like this on the command line:
|
||||
|
||||
```
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error, but you can now write what you probably expected
|
||||
to work in the first place:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, so you can supply flags like:
|
||||
|
||||
```
|
||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||
and `==` when for flags or variants. This would not have broken before but will now
|
||||
result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
```
|
||||
|
||||
More details and discussion in #30634.
|
||||
|
||||
4. **Revert default `spack install` behavior to `--reuse`**
|
||||
|
||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||
attempt to build a new version of the requested package / any environment roots.
|
||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||
think it should be the default for a package manager.
|
||||
|
||||
We are going to try to stick to this policy:
|
||||
1. Prioritize reuse and build as little as possible by default.
|
||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||
is a known security issue that necessitates an upgrade.
|
||||
|
||||
With the install command you now have three options:
|
||||
|
||||
* `--reuse` (default): reuse as many existing installations as possible.
|
||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||
|
||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||
that it may give you fresh versions. More details in #41302 and #43988.
|
||||
|
||||
5. **More control over reused specs**
|
||||
|
||||
You can now control which packages to reuse and how. There is a new
|
||||
`concretizer:reuse` config option, which accepts the following properties:
|
||||
|
||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||
- `include`: list of constraints used to select which specs *to* reuse
|
||||
- `from`: list of sources for reused specs (some combination of `local`,
|
||||
`buildcache`, or `external`)
|
||||
|
||||
For example, to reuse only specs compiled with GCC, you could write:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
```
|
||||
|
||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
exclude: ["openmpi"]
|
||||
- type: buildcache
|
||||
exclude: ["openmpi"]
|
||||
- type: external
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **New `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
|
||||
Now there is a `redistribute()` directive so that package authors can write:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
redistribute(source=False, binary=False)
|
||||
```
|
||||
|
||||
Like other directives, this works with `when=`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
# 12.0 and higher are proprietary
|
||||
redistribute(source=False, binary=False, when="@12.0:")
|
||||
|
||||
# can't redistribute when we depend on some proprietary dependency
|
||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||
```
|
||||
|
||||
More in #20185.
|
||||
|
||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||
|
||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||
some contortions with `require:`:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
require:
|
||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||
```
|
||||
|
||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
conflict:
|
||||
- "%clang"
|
||||
prefer:
|
||||
- "+shared"
|
||||
```
|
||||
|
||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||
and #41832 for more details.
|
||||
|
||||
8. **`include_concrete` in environments**
|
||||
|
||||
You may want to build on the *concrete* contents of another environment without
|
||||
changing that environment. You can now include the concrete specs from another
|
||||
environment's `spack.lock` with `include_concrete`:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /path/to/environment1
|
||||
- /path/to/environment2
|
||||
```
|
||||
|
||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||
specs from `environment1` and `environment2`, and build on top of them without
|
||||
changing them. This is useful if you have phased deployments, where old deployments
|
||||
should not be modified but you want to use as many of them as possible. More details
|
||||
in #33768.
|
||||
|
||||
9. **`python-venv` isolation**
|
||||
|
||||
Spack has unique requirements for Python because it:
|
||||
1. installs every package in its own independent directory, and
|
||||
2. allows users to register *external* python installations.
|
||||
|
||||
External installations may contain their own installed packages that can interfere
|
||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||
from these and other issues, we now insert a small `python-venv` package in between
|
||||
`python` and packages that need to install Python code. This isolates Spack's build
|
||||
environment, isolates Spack from any issues with an external python, and resolves a
|
||||
large number of issues we've had with Python installations.
|
||||
|
||||
See #40773 for further details.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
|
||||
## Binary caches
|
||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||
* tools stack
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* spack load: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,994 total packages; 525 since `v0.21.0`
|
||||
* 178 new Python packages, 5 new R packages
|
||||
* 358 people contributed to this release
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
@@ -22,4 +22,4 @@
|
||||
#
|
||||
# This is compatible across platforms.
|
||||
#
|
||||
exec spack python "$@"
|
||||
exec /usr/bin/env spack python "$@"
|
||||
|
||||
@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
||||
@@ -170,6 +170,23 @@ config:
|
||||
# If set to true, Spack will use ccache to cache C compiles.
|
||||
ccache: false
|
||||
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
|
||||
@@ -20,14 +20,11 @@ packages:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -64,7 +61,6 @@ packages:
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
wasi-sdk: [wasi-sdk-prebuilt]
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
||||
@@ -206,7 +206,6 @@ def setup(sphinx):
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
("py:class", "concurrent.futures._base.Executor"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
||||
@@ -203,9 +203,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
||||
@@ -931,84 +931,32 @@ This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
-----------------
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
|
||||
Spack Environments can have an associated filesystem view, which is a directory
|
||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
||||
in which all files of the installed packages are linked.
|
||||
|
||||
By default a view is created for each environment, thanks to the ``view: true``
|
||||
option in the ``spack.yaml`` manifest file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: [perl, python]
|
||||
view: true
|
||||
|
||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
||||
you can directly run executables from all installed packages in the environment.
|
||||
|
||||
Views are highly customizable: you can control where they are put, modify their structure,
|
||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
||||
views for a single environment.
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Minimal view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The minimal configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: true
|
||||
|
||||
lets Spack generate a single view with default settings under the
|
||||
``.spack-env/view`` directory of the environment.
|
||||
|
||||
Another short way to configure a view is to specify just where to put it:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
Views can also be disabled by setting ``view: false``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Advanced view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default: # name of the view
|
||||
root: /path/to/view # view descriptor attribute
|
||||
|
||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
||||
environment, this view will be used to update (among other things) your ``PATH``
|
||||
variable.
|
||||
|
||||
View descriptors must contain the root of the view, and optionally projections,
|
||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
As a more advanced example, in the following manifest
|
||||
For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
@@ -1053,10 +1001,59 @@ of ``hardlink`` or ``copy``.
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||
and the default projection, we can put ``view: True`` in the
|
||||
environment manifest. Similarly, if the environment has a view with a
|
||||
different root, but default selection, exclusion, and projections, the
|
||||
manifest can say ``view: /path/to/view``. These views are
|
||||
automatically named ``default``, so that
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
|
||||
and
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
||||
By default, Spack environments are configured with ``view: True`` in
|
||||
the manifest. Environments can be configured without views using
|
||||
``view: False``. For backwards compatibility reasons, environments
|
||||
with no ``view`` key are treated the same as ``view: True``.
|
||||
|
||||
From the command line, the ``spack env create`` command takes an
|
||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||
view. If no path is specified, the default path is used (``view:
|
||||
true``). The argument ``--without-view`` can be used to create an
|
||||
True``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
@@ -1122,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack env activate <env>`` has two effects:
|
||||
|
||||
1. It activates the environment so that further Spack commands such
|
||||
as ``spack install`` will run in the context of the environment.
|
||||
2. It activates the view so that environment variables such as
|
||||
``PATH`` are updated to include the view.
|
||||
|
||||
Without further arguments, the ``default`` view of the environment is
|
||||
activated. If a view with a different name has to be activated,
|
||||
``spack env activate --with-view <name> <env>`` can be
|
||||
used instead. You can also activate the environment without modifying
|
||||
further environment variables using ``--without-view``.
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||
``-V,--without-view`` can be used to tune this behavior. The default
|
||||
behavior is to activate with the environment view if there is one.
|
||||
|
||||
The environment variables affected by the ``spack env activate``
|
||||
command and the paths that are used to update them are determined by
|
||||
@@ -1156,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
|
||||
recommended to use non-default projections with the default view of an
|
||||
environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the active view of
|
||||
the Spack environment from the user's environment variables.
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
@@ -1316,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
|
||||
@@ -2344,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
options = [
|
||||
...
|
||||
'--jobs', str(make_jobs),
|
||||
]
|
||||
...
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(*options)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -5194,6 +5173,12 @@ installed executable. The check is implemented as follows:
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
|
||||
The API for adding tests is not yet considered stable and may change
|
||||
in future releases.
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""""""
|
||||
Checking build-time test results
|
||||
""""""""""""""""""""""""""""""""
|
||||
@@ -5231,42 +5216,38 @@ be left in the build stage directory as illustrated below:
|
||||
Stand-alone tests
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
While build-time tests are integrated with the installation process, stand-alone
|
||||
While build-time tests are integrated with the build process, stand-alone
|
||||
tests are expected to run days, weeks, even months after the software is
|
||||
installed. The goal is to provide a mechanism for gaining confidence that
|
||||
packages work as installed **and** *continue* to work as the underlying
|
||||
software evolves. Packages can add and inherit stand-alone tests. The
|
||||
``spack test`` command is used for stand-alone testing.
|
||||
`spack test`` command is used to manage stand-alone testing.
|
||||
|
||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
||||
.. note::
|
||||
|
||||
Execution speed is important since these tests are intended to quickly
|
||||
assess whether installed specs work on the system. Spack cannot spare
|
||||
resources for more extensive testing of packages included in CI stacks.
|
||||
assess whether installed specs work on the system. Consequently, they
|
||||
should run relatively quickly -- as in on the order of at most a few
|
||||
minutes -- while ideally executing all, or at least key aspects of the
|
||||
installed software.
|
||||
|
||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
||||
on the order of at most a few minutes -- while testing at least key aspects
|
||||
of the installed software. Save more extensive testing for other tools.
|
||||
.. note::
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, there is no reason to proceed with more resource-intensive
|
||||
tests until those have been investigated.
|
||||
|
||||
Passing stand-alone tests indicate that more thorough testing, such
|
||||
as running extensive unit or regression tests, or tests that run at
|
||||
scale can proceed without wasting resources on a problematic installation.
|
||||
|
||||
Tests are defined in the package using methods with names beginning ``test_``.
|
||||
This allows Spack to support multiple independent checks, or parts. Files
|
||||
needed for testing, such as source, data, and expected outputs, may be saved
|
||||
from the build and or stored with the package in the repository. Regardless
|
||||
of origin, these files are automatically copied to the spec's test stage
|
||||
directory prior to execution of the test method(s). Spack also provides helper
|
||||
functions to facilitate common processing.
|
||||
|
||||
.. tip::
|
||||
|
||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
||||
|
||||
Passing stand-alone tests justify performing more thorough testing, such
|
||||
as running extensive unit or regression tests or tests that run at scale,
|
||||
when available. These tests are outside of the scope of Spack packaging.
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, no reason to proceed with more resource-intensive tests until
|
||||
the failures have been investigated.
|
||||
directory prior to execution of the test method(s). Spack also provides some
|
||||
helper functions to facilitate processing.
|
||||
|
||||
.. _configure-test-stage:
|
||||
|
||||
@@ -5274,26 +5255,30 @@ functions to facilitate common processing.
|
||||
Configuring the test stage directory
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
||||
tests in the same way Spack uses a build stage directory to install software.
|
||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
||||
changing the ``test_stage`` path such that:
|
||||
Stand-alone tests utilize a test stage directory for building, running,
|
||||
and tracking results in the same way Spack uses a build stage directory.
|
||||
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||
``config`` of the appropriate ``config.yaml`` file such that:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
test_stage: /path/to/test/stage
|
||||
|
||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
||||
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||
Other package properties that provide access to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||
|
||||
.. admonition:: Each spec being tested has its own test stage directory.
|
||||
.. note::
|
||||
|
||||
The ``config:test_stage`` option is the path to the root of a
|
||||
**test suite**'s stage directories.
|
||||
The test stage path is the root directory for the **entire suite**.
|
||||
In other words, it is the root directory for **all specs** being
|
||||
tested by the ``spack test run`` command. Each spec gets its own
|
||||
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
to access the spec-specific test stage directory.
|
||||
|
||||
Other package properties that provide paths to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing-files`.
|
||||
|
||||
.. _adding-standalone-tests:
|
||||
|
||||
@@ -5306,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
|
||||
Each method has access to the information Spack tracks on the package, such
|
||||
as options, compilers, and dependencies, supporting the customization of tests
|
||||
to the build. Standard python ``assert`` statements and other error reporting
|
||||
mechanisms can be used. These exceptions are automatically caught and reported
|
||||
mechanisms are available. Such exceptions are automatically caught and reported
|
||||
as test failures.
|
||||
|
||||
Each test method is an *implicit test part* named by the method. Its purpose
|
||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
||||
start of test execution so it's also important that the docstring/purpose be
|
||||
brief.
|
||||
|
||||
.. tip::
|
||||
|
||||
We recommend naming test methods so it is clear *what* is being tested.
|
||||
For example, if a test method is building and or running an executable
|
||||
called ``example``, then call the method ``test_example``. This, together
|
||||
with a similarly meaningful test purpose, will aid test comprehension,
|
||||
debugging, and maintainability.
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
on the installed software, such as build options, dependencies, and compilers.
|
||||
Build options and dependencies are accessed using the same spec checks used
|
||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
|
||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
||||
|
||||
Spack automatically creates the test stage directory and copies
|
||||
relevant files *prior to* running tests. It can also ensure build
|
||||
dependencies are available **if** necessary.
|
||||
|
||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
||||
|
||||
Files that Spack knows to copy are those saved from the build (see
|
||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
||||
(see :ref:`cache_custom_files`).
|
||||
|
||||
Spack will use the value of the ``test_requires_compiler`` property to
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
the executable runs successfully. If the installed spec is not expected to have
|
||||
``example2``, then the check at the top of the method will raise a special
|
||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
||||
parts to tools like CDash.
|
||||
Each test method is an implicit test part named by the method and whose
|
||||
purpose is the method's docstring. Providing a purpose gives context for
|
||||
aiding debugging. A test method may contain embedded test parts. Spack
|
||||
outputs the test name and purpose prior to running each test method and
|
||||
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||
As the name indicates, the first always fails. The second simply runs the
|
||||
installed example.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""ensure installed example works"""
|
||||
expected = "Done."
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
|
||||
# Capture stdout and stderr from running the Executable
|
||||
# and check that the expected output was produced.
|
||||
out = example(output=str.split, error=str.split)
|
||||
assert expected in out, f"Expected '{expected}' in the output"
|
||||
|
||||
def test_example2(self):
|
||||
"""run installed example2"""
|
||||
if self.spec.satisfies("@:1.0"):
|
||||
# Raise SkipTest to ensure flagging the test as skipped for
|
||||
# test reporting purposes.
|
||||
raise SkipTest("Test is only available for v1.1 on")
|
||||
|
||||
example2 = which(self.prefix.bin.example2)
|
||||
example2()
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test run --alias mypackage mypackage@2.0
|
||||
$ spack test run --alias mypackage mypackage@1.0
|
||||
==> Spack test mypackage
|
||||
...
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
||||
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||
...
|
||||
PASSED: MyPackage::test_example
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||
...
|
||||
PASSED: MyPackage::test_example2
|
||||
PASSED
|
||||
|
||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
||||
|
||||
Use of the package spec's installation prefix for building and running
|
||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
.. note::
|
||||
|
||||
Instead, start these test methods by explicitly copying the needed files
|
||||
from the installation prefix to the test stage directory. Note the test
|
||||
stage directory is the current directory when the test is executed with
|
||||
the ``spack test run`` command.
|
||||
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||
an example or test program that is then executed.
|
||||
|
||||
.. admonition:: Test methods for library packages should build test executables.
|
||||
A test method can include test parts using the ``test_part`` context manager.
|
||||
Each part is treated as an independent check to allow subsequent test parts
|
||||
to execute even after a test part fails.
|
||||
|
||||
Stand-alone tests for library packages *should* build test executables
|
||||
that utilize the *installed* library. Doing so ensures the tests follow
|
||||
a similar build process that users of the library would follow.
|
||||
|
||||
For more information on how to do this, see :ref:`test-build-tests`.
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages with stand-alone tests, run
|
||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _adding-standalone-test-parts:
|
||||
|
||||
"""""""""""""""""""""""""""""
|
||||
Adding stand-alone test parts
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
Sometimes dependencies between steps of a test lend themselves to being
|
||||
broken into parts. Tracking the pass/fail status of each part may aid
|
||||
debugging. Spack provides a ``test_part`` context manager for use within
|
||||
test methods.
|
||||
|
||||
Each test part is independently run, tracked, and reported. Test parts are
|
||||
executed in the order they appear. If one fails, subsequent test parts are
|
||||
still performed even if they would also fail. This allows tools like CDash
|
||||
to track and report the status of test parts across runs. The pass/fail status
|
||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
||||
|
||||
.. admonition:: Test method and test part names **must** be unique.
|
||||
|
||||
Test results reporting requires that test methods and embedded test parts
|
||||
within a package have unique names.
|
||||
.. _test-part:
|
||||
|
||||
The signature for ``test_part`` is:
|
||||
|
||||
@@ -5465,68 +5367,40 @@ where each argument has the following meaning:
|
||||
* ``work_dir`` is the path to the directory in which the test will run.
|
||||
|
||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||
|
||||
.. admonition:: Start test part names with the name of the enclosing test.
|
||||
.. admonition:: Tests should **not** run under the installation directory.
|
||||
|
||||
We **highly recommend** starting the names of test parts with the name
|
||||
of the enclosing test. Doing so helps with the comprehension, readability
|
||||
and debugging of test results.
|
||||
Use of the package spec's installation directory for building and running
|
||||
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
|
||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
||||
specific order since the outputs from one are inputs of others. Further suppose
|
||||
we want to add an integration test that runs the executables in order. We can
|
||||
accomplish this goal by implementing a stand-alone test method consisting of
|
||||
test parts for each executable as follows:
|
||||
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||
These checks can be implemented as separate checks or, as illustrated below,
|
||||
embedded test parts.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""run setup, perform, and report"""
|
||||
def test_example(self):
|
||||
"""run installed examples"""
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
||||
exe = which(self.prefix.bin.setup))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
||||
exe = which(self.prefix.bin.run))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_report", purpose="generate report"):
|
||||
exe = which(self.prefix.bin.report))
|
||||
exe()
|
||||
|
||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
||||
``run``, and ``report``. In this case no options are passed to any of the
|
||||
executables and no outputs from running them are checked. Consequently, the
|
||||
implementation could be simplified with a for-loop as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""execute series setup, run, and report"""
|
||||
|
||||
for exe, reason in [
|
||||
("setup", "setup operation"),
|
||||
("run", "perform operation"),
|
||||
("report", "generate report")
|
||||
]:
|
||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
||||
exe = which(self.prefix.bin.join(exe))
|
||||
exe()
|
||||
|
||||
In both cases, since we're using a context manager, each test part in
|
||||
``test_series`` will execute regardless of the status of the other test
|
||||
parts.
|
||||
|
||||
Now let's look at the output from running the stand-alone tests where
|
||||
the second test part, ``test_series_run``, fails.
|
||||
In this case, there will be an implicit test part for ``test_example``
|
||||
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||
will be executed regardless of whether the first passes. The test
|
||||
log for a run where the first executable fails and the second passes
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -5536,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
||||
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||
...
|
||||
PASSED: MyPackage::test_series_setup
|
||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||
...
|
||||
FAILED: MyPackage::test_series_run
|
||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
||||
...
|
||||
FAILED: MyPackage::test_series_report
|
||||
FAILED: MyPackage::test_series
|
||||
PASSED
|
||||
...
|
||||
|
||||
Since test parts depended on the success of previous parts, we see that the
|
||||
failure of one results in the failure of subsequent checks and the overall
|
||||
result of the test method, ``test_series``, is failure.
|
||||
.. warning::
|
||||
|
||||
.. tip::
|
||||
Test results reporting requires that each test method and embedded
|
||||
test part for a package have a unique name.
|
||||
|
||||
If you want to see more examples from packages using ``test_part``, run
|
||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
Spack has on how the software was built, such as build options, dependencies,
|
||||
and compilers. Build options and dependencies are accessed with the normal
|
||||
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
Accessing compilers in stand-alone tests that are used by the build requires
|
||||
setting a package property as described :ref:`below <test-compilation>`.
|
||||
|
||||
.. _test-build-tests:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Building and running test executables
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
.. _test-compilation:
|
||||
|
||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
||||
"""""""""""""""""""""""""
|
||||
Enabling test compilation
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
We **highly recommend** re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities when they reside
|
||||
within the software's repository. More information on saving files from
|
||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
||||
If you want to build and run binaries in tests, then you'll need to tell
|
||||
Spack to load the package's compiler configuration. This is accomplished
|
||||
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`cache_custom_files`). It will be important to
|
||||
remember to maintain them so they work across listed or supported versions
|
||||
of the package.
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||
|
||||
Packages that build libraries are good examples of cases where you'll want
|
||||
to build test executables from the installed software before running them.
|
||||
Doing so requires you to let Spack know it needs to load the package's
|
||||
compiler configuration. This is accomplished by setting the package's
|
||||
``test_requires_compiler`` property to ``True``.
|
||||
.. note::
|
||||
|
||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
||||
The ``test_requires_compiler`` property should be added at the top of
|
||||
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
||||
|
||||
Be sure to add the property at the top of the package class under other
|
||||
properties like the ``homepage``.
|
||||
|
||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
||||
illustrates the basic process of compiling a test executable using the
|
||||
installed library before running it.
|
||||
Below illustrates using this feature to compile an example.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5621,22 +5477,28 @@ installed library before running it.
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
|
||||
Typically the files used to build and or run test executables are either
|
||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
||||
preventing the use of both.
|
||||
|
||||
.. _cache_extra_test_sources:
|
||||
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Saving build- and install-time files
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
"""""""""""""""""""""""
|
||||
Saving build-time files
|
||||
"""""""""""""""""""""""
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
||||
directories and or files from the source build stage directory to the
|
||||
package's installation directory. Spack will automatically copy these
|
||||
files for you when it sets up the test stage directory and before it
|
||||
begins running the tests.
|
||||
.. note::
|
||||
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
@@ -5651,69 +5513,46 @@ where each argument has the following meaning:
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
.. warning::
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
relative paths under the test stage directory prior to executing stand-alone
|
||||
tests.
|
||||
|
||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
||||
staged source directory. They will be copied to the equivalent relative
|
||||
location under the test stage directory prior to test execution.
|
||||
|
||||
Contents of subdirectories and files are copied to a special test cache
|
||||
subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the post-``install`` copy method
|
||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
||||
the amount of unnecessary work in the test method **and** avoid problems
|
||||
running stand-alone tests in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes
|
||||
(see :ref:`file-filtering`).
|
||||
|
||||
Below is a basic example of a test that relies on files from the installation.
|
||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
||||
which is assumed to have all of the files implemented to allow ``make`` to
|
||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
||||
library.
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
and using ``foo.c`` in a test method is illustrated below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyLibPackage(MakefilePackage):
|
||||
class MyLibPackage(Package):
|
||||
...
|
||||
|
||||
@run_after("install")
|
||||
def copy_test_files(self):
|
||||
cache_extra_test_sources(self, "examples")
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_example(self):
|
||||
"""build and run the examples"""
|
||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(examples_dir):
|
||||
make = which("make")
|
||||
make()
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
|
||||
for program in ["foo", "bar"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{program}",
|
||||
purpose=f"ensure {program} runs"
|
||||
):
|
||||
exe = Executable(program)
|
||||
exe()
|
||||
|
||||
In this case, ``copy_test_files`` copies the associated files from the
|
||||
build stage to the package's test cache directory under the installation
|
||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
||||
the directory and its contents to the the test stage directory. The
|
||||
``working_dir`` context manager ensures the commands within it are executed
|
||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
In this case, the method copies the associated files from the build
|
||||
stage, **after** the software is installed, to the package's test
|
||||
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||
before running the program.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -5722,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
|
||||
The key to copying files for stand-alone testing at build time is use
|
||||
of the ``run_after`` directive, which ensures the associated files are
|
||||
copied **after** the provided build stage (``install``) when the installation
|
||||
prefix **and** files are available.
|
||||
copied **after** the provided build stage where the files **and**
|
||||
installation prefix are available.
|
||||
|
||||
The test method uses the path contained in the package's
|
||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
||||
These paths are **automatically copied** from cache to the test stage
|
||||
directory prior to the execution of any stand-alone tests. Tests access
|
||||
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
.. tip::
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
While source and input files are generally recommended, binaries
|
||||
**may** also be cached by the build process. Only you, as the package
|
||||
writer or maintainer, know whether these files would be appropriate
|
||||
for testing the installed software weeks to months later.
|
||||
|
||||
.. note::
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes.
|
||||
See :ref:`file manipulation <file-manipulation>`.
|
||||
|
||||
If you want to see more examples from packages that cache build files, run
|
||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
@@ -5741,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
Adding custom files
|
||||
"""""""""""""""""""
|
||||
|
||||
Sometimes it is helpful or necessary to include custom files for building and
|
||||
or checking the results of tests as part of the package. Examples of the types
|
||||
of files that might be useful are:
|
||||
In some cases it can be useful to have files that can be used to build or
|
||||
check the results of tests. Examples include:
|
||||
|
||||
- test source files
|
||||
- test input files
|
||||
@@ -5751,15 +5614,17 @@ of files that might be useful are:
|
||||
- expected test outputs
|
||||
|
||||
While obtaining such files from the software repository is preferred (see
|
||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
||||
feasible such as when the software is not being actively maintained. When test
|
||||
files cannot be obtained from the repository or there is a need to supplement
|
||||
files that can, Spack supports the inclusion of additional files under the
|
||||
``test`` subdirectory of the package in the Spack repository.
|
||||
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||
circumstances where that is not feasible (e.g., the software is not being
|
||||
actively maintained). When test files can't be obtained from the repository
|
||||
or as a supplement to files that can, Spack supports the inclusion of
|
||||
additional files under the ``test`` subdirectory of the package in the
|
||||
Spack repository.
|
||||
|
||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
||||
be compiled and linked against the installed ``MyLibrary`` software.
|
||||
Spack **automatically copies** the contents of that directory to the
|
||||
test staging directory prior to running stand-alone tests. Test methods
|
||||
access those files using the ``self.test_suite.current_test_data_dir``
|
||||
property as shown below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5769,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
|
||||
test_requires_compiler = True
|
||||
...
|
||||
|
||||
def test_custom_example(self):
|
||||
def test_example(self):
|
||||
"""build and run custom-example"""
|
||||
src_dir = self.test_suite.current_test_data_dir
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
custom_example = which(exe)
|
||||
custom_example()
|
||||
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
|
||||
custom_example = Executable(exe)
|
||||
custom_example()
|
||||
|
||||
In this case, ``spack test run`` for the package results in Spack copying
|
||||
the contents of the ``test`` subdirectory to the test stage directory path
|
||||
in ``self.test_suite.current_test_data_dir`` before calling
|
||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
||||
ensures the commands to build and run the program are performed from
|
||||
within the appropriate subdirectory of the test stage.
|
||||
|
||||
.. _expected_test_output_from_file:
|
||||
|
||||
@@ -5800,8 +5653,9 @@ Reading expected output from a file
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve properly formatted text from a file potentially containing
|
||||
special characters.
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
@@ -5811,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The path provided to ``filename`` for one of the copied custom files
|
||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
||||
``self.test_suite.current_test_data_dir``.
|
||||
|
||||
The example below shows how to reference both the custom database
|
||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
||||
to the test stage:
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||
property. The example below illustrates how to read a file that was
|
||||
added to the package's ``test`` subdirectory.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5839,9 +5690,8 @@ to the test stage:
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the files were instead cached from installing the software, the paths to the
|
||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
||||
directory as shown below:
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5849,24 +5699,17 @@ directory as shown below:
|
||||
"""check example table dump"""
|
||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
||||
...
|
||||
|
||||
Alternatively, if both files had been installed by the software into the
|
||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
||||
two files would be referenced as follows:
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_example(self):
|
||||
"""check example table dump"""
|
||||
db_filename = self.prefix.share.tests.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(
|
||||
self.prefix.share.tests.join("dump.out")
|
||||
)
|
||||
...
|
||||
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||
|
||||
|
||||
.. _check_outputs:
|
||||
|
||||
@@ -5874,9 +5717,9 @@ two files would be referenced as follows:
|
||||
Comparing expected to actual outputs
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
The ``check_outputs`` helper routine is available for packages to ensure
|
||||
multiple expected outputs from running an executable are contained within
|
||||
the actual outputs.
|
||||
The helper function ``check_outputs`` is available for packages to ensure
|
||||
the expected outputs from running an executable are contained within the
|
||||
actual outputs.
|
||||
|
||||
The signature for ``check_outputs`` is:
|
||||
|
||||
@@ -5902,17 +5745,11 @@ Invoking the method is the equivalent of:
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that use this helper, run
|
||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
Finding package- and test-related files
|
||||
Accessing package- and test-related files
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You may need to access files from one or more locations when writing
|
||||
@@ -5921,7 +5758,8 @@ include test source files or includes them but has no way to build the
|
||||
executables using the installed headers and libraries. In these cases
|
||||
you may need to reference the files relative to one or more root directory.
|
||||
The table below lists relevant path properties and provides additional
|
||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
||||
examples of their use.
|
||||
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||
examples of accessing files saved from the software repository, package
|
||||
repository, and installation.
|
||||
|
||||
@@ -5950,6 +5788,7 @@ repository, and installation.
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
|
||||
|
||||
.. _inheriting-tests:
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
@@ -5992,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
.. warning::
|
||||
|
||||
Any package that implements a test method with the same name as an
|
||||
inherited method will override the inherited method. If that is not the
|
||||
inherited method overrides the inherited method. If that is not the
|
||||
goal and you are not explicitly calling and adding functionality to
|
||||
the inherited method for the test, then make sure that all test methods
|
||||
and embedded test parts have unique test names.
|
||||
@@ -6157,8 +5996,6 @@ running:
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
||||
.. _file-filtering:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
sphinx==7.4.7
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.2
|
||||
pytest==8.3.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.1
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
|
||||
96
lib/spack/env/cc
vendored
96
lib/spack/env/cc
vendored
@@ -174,46 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
execute() {
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exit
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -271,17 +231,12 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
|
||||
# including version checks (SPACK_XFLAGS variants are not applied
|
||||
# for version checks).
|
||||
command="${0##*/}"
|
||||
comp="CC"
|
||||
vcheck_flags=""
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||
command="$SPACK_CC"
|
||||
@@ -289,7 +244,6 @@ case "$command" in
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
@@ -297,7 +251,6 @@ case "$command" in
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||
command="$SPACK_FC"
|
||||
@@ -305,7 +258,6 @@ case "$command" in
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
@@ -313,7 +265,6 @@ case "$command" in
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
@@ -414,11 +365,7 @@ unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
full_command_list="$command"
|
||||
args="$@"
|
||||
extend full_command_list vcheck_flags
|
||||
extend full_command_list args
|
||||
execute
|
||||
exec "${command}" "$@"
|
||||
fi
|
||||
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
@@ -775,7 +722,6 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_ALWAYS_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -785,7 +731,6 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -796,11 +741,9 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -990,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
esac
|
||||
fi
|
||||
|
||||
execute
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
@@ -33,23 +33,8 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
|
||||
@@ -351,22 +351,6 @@ def _wrongly_named_spec(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
"""All virtual packages must have a default provider explicitly set."""
|
||||
configuration = spack.config.create()
|
||||
defaults = configuration.get("packages", scope="defaults")
|
||||
default_providers = defaults["all"]["providers"]
|
||||
virtuals = spack.repo.PATH.provider_index.providers
|
||||
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
|
||||
|
||||
return [
|
||||
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
|
||||
for virtual in virtuals
|
||||
if virtual not in default_providers
|
||||
]
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -807,7 +791,7 @@ def check_virtual_with_variants(spec, msg):
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,154 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap concrete specs for clingo
|
||||
|
||||
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
|
||||
we need to rely on another mechanism to get a concrete spec that fits the current host.
|
||||
|
||||
This module contains the logic to get a concrete spec for clingo, starting from a prototype
|
||||
JSON file for a similar platform.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
if str(self.host_platform) == "linux":
|
||||
self.host_libc = self.libc_external_spec()
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
compiler_name = "apple-clang"
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
self, configuration: "spack.config.Configuration"
|
||||
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
|
||||
packages_yaml = configuration.get("packages")
|
||||
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
|
||||
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
|
||||
for pkg_name in ["cmake", "bison"]:
|
||||
if pkg_name not in packages_yaml:
|
||||
continue
|
||||
|
||||
candidates = packages_yaml[pkg_name].get("externals", [])
|
||||
for candidate in candidates:
|
||||
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
selected[pkg_name] = self._external_spec(s)
|
||||
break
|
||||
return selected["cmake"], selected["bison"]
|
||||
|
||||
def prototype_path(self) -> pathlib.Path:
|
||||
"""Path to a prototype concrete specfile for clingo"""
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
|
||||
if str(self.host_platform) == "linux":
|
||||
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
|
||||
if not result.exists():
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
|
||||
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
|
||||
|
||||
elif not result.exists():
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
return result
|
||||
|
||||
def concretize(self) -> "spack.spec.Spec":
|
||||
# Read the prototype and mark it NOT concrete
|
||||
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
|
||||
s._mark_concrete(False)
|
||||
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
edge.spec = self.host_python
|
||||
|
||||
if edge.spec.name == "bison" and self.external_bison:
|
||||
edge.spec = self.external_bison
|
||||
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
s._finalize_concretization()
|
||||
|
||||
# Work around the fact that the installer calls Spec.dependents() and
|
||||
# we modified edges inconsistently
|
||||
return s.copy()
|
||||
|
||||
def python_external_spec(self) -> "spack.spec.Spec":
|
||||
"""Python external spec corresponding to the current running interpreter"""
|
||||
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
return spack.spec.parse_with_version_concrete(initial_spec)
|
||||
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform)
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
|
||||
@@ -54,7 +54,6 @@
|
||||
import spack.version
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -269,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
@@ -302,7 +303,14 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -72,7 +72,6 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
@@ -459,7 +458,10 @@ def set_wrapper_variables(pkg, env):
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
ccache = Executable("ccache")
|
||||
if not ccache:
|
||||
raise RuntimeError("No ccache binary found in PATH")
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -738,9 +740,7 @@ def get_rpaths(pkg):
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
@@ -828,7 +828,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
return env_base
|
||||
|
||||
|
||||
class EnvironmentVisitor:
|
||||
class EnvironmentVisitor(traverse.AbstractVisitor):
|
||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
# For the roots (well, marked specs) we follow different edges
|
||||
# than for their deps, depending on the context.
|
||||
@@ -846,7 +846,7 @@ def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||
self.root_depflag = dt.RUN | dt.LINK
|
||||
|
||||
def neighbors(self, item):
|
||||
spec = item.edge.spec
|
||||
spec = item[0].spec
|
||||
if spec.dag_hash() in self.root_hashes:
|
||||
depflag = self.root_depflag
|
||||
else:
|
||||
@@ -1473,7 +1473,7 @@ def long_message(self):
|
||||
out.write(" {0}\n".format(self.log_name))
|
||||
|
||||
# Also output the test log path IF it exists
|
||||
if self.context != "test" and have_log:
|
||||
if self.context != "test":
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
|
||||
@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
|
||||
ld_flags = " ".join(flags["ldflags"])
|
||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||
# CMake has separate linker arguments for types of builds.
|
||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
ld_string = ld_format_string.format(ld_type)
|
||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||
|
||||
|
||||
@@ -124,8 +124,6 @@ def cuda_flags(arch_list):
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
# NOTE:
|
||||
@@ -138,14 +136,14 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
@@ -213,16 +211,12 @@ def cuda_flags(arch_list):
|
||||
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
||||
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
||||
|
||||
# PowerPC.
|
||||
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
|
||||
|
||||
@@ -72,7 +72,7 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common utilities for managing intel oneapi packages."""
|
||||
import getpass
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
@@ -12,7 +13,6 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
@@ -99,7 +99,7 @@ def install_component(self, installer_path):
|
||||
# with other install depends on the userid. For root, we
|
||||
# delete the installercache before and after install. For
|
||||
# non root we redefine the HOME environment variable.
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
bash = Executable("bash")
|
||||
@@ -122,7 +122,7 @@ def install_component(self, installer_path):
|
||||
self.prefix,
|
||||
)
|
||||
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
|
||||
@@ -85,28 +85,20 @@ def homepage(cls):
|
||||
return "https://bioconductor.org/packages/" + cls.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def urls(cls):
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return [
|
||||
return (
|
||||
"https://cloud.r-project.org/src/contrib/"
|
||||
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
]
|
||||
elif cls.bioc:
|
||||
return [
|
||||
"https://bioconductor.org/packages/release/bioc/src/contrib/"
|
||||
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
"https://bioconductor.org/packages/release/data/annotation/src/contrib/"
|
||||
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
]
|
||||
else:
|
||||
return [cls.url]
|
||||
+ cls.cran
|
||||
+ "_"
|
||||
+ str(list(cls.versions)[0])
|
||||
+ ".tar.gz"
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return "https://cloud.r-project.org/src/contrib/"
|
||||
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
|
||||
@@ -139,10 +139,6 @@ def configure(self, pkg, spec, prefix):
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
if spec["py-sip"].satisfies("@6.1.0:"):
|
||||
args.extend(["--scripts-dir", pkg.prefix.bin])
|
||||
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
|
||||
@@ -34,8 +34,6 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -22,8 +22,6 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -38,7 +36,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -72,7 +69,7 @@
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
)
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
@@ -554,9 +551,10 @@ def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
output_file,
|
||||
*,
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
run_optimizer=False,
|
||||
use_dependencies=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
@@ -577,6 +575,12 @@ def generate_gitlab_ci_yaml(
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
@@ -810,8 +814,7 @@ def ensure_expected_target_path(path):
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if not s.writable
|
||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -1108,7 +1111,7 @@ def main_script_replacements(cmd):
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
@@ -1268,6 +1271,17 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1296,11 +1310,8 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -1371,6 +1382,15 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
spack.mirror.Mirror.from_url(mirror_url).push_url,
|
||||
bindist.PushOptions(force=True, unsigned=not sign_binaries),
|
||||
)
|
||||
|
||||
|
||||
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
|
||||
"""Push one or more binary packages to the mirror.
|
||||
|
||||
@@ -1381,13 +1401,20 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
sign_binaries: If True, spack will attempt to sign binary package before pushing.
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
try:
|
||||
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
|
||||
_push_to_build_cache(spec, sign_binaries, mirror_url)
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(f"Problem writing to {mirror_url}: {e}")
|
||||
tty.error(str(e))
|
||||
return False
|
||||
except Exception as e:
|
||||
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
|
||||
# exception instead of parsing str(e)...
|
||||
msg = str(e)
|
||||
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
@@ -2068,7 +2095,7 @@ def read_broken_spec(broken_spec_url):
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except web_util.SpackWebError:
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
|
||||
34
lib/spack/spack/ci_needs_workaround.py
Normal file
34
lib/spack/spack/ci_needs_workaround.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections.abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get("needs")
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job["needs"]
|
||||
|
||||
new_job["dependencies"] = list(
|
||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||
)
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
||||
363
lib/spack/spack/ci_optimization.py
Normal file
363
lib/spack/spack/ci_optimization.py
Normal file
@@ -0,0 +1,363 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||
)
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
|
||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||
(obj[key] matches proto[key]) for every key in proto.
|
||||
|
||||
If obj and proto are sequences, obj matches proto if they are of the same
|
||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||
|
||||
Otherwise, obj matches proto if obj == proto.
|
||||
|
||||
Precondition: proto must not have any reference cycles
|
||||
"""
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
if not isinstance(proto, collections.abc.Mapping):
|
||||
return False
|
||||
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
if len(obj) != len(proto):
|
||||
return False
|
||||
|
||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||
|
||||
return obj == proto
|
||||
|
||||
|
||||
def subkeys(obj, proto):
|
||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||
common with the prototype mapping "proto".
|
||||
|
||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||
for every such key, "k", its corresponding value is:
|
||||
|
||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||
or
|
||||
- a[key] otherwise
|
||||
|
||||
|
||||
If obj and proto are mappings, the returned object is the smallest object,
|
||||
"a", such that merge(a, proto) matches obj.
|
||||
|
||||
Otherwise, obj is returned.
|
||||
"""
|
||||
if not (
|
||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||
):
|
||||
return obj
|
||||
|
||||
new_obj = {}
|
||||
for key, value in obj.items():
|
||||
if key not in proto:
|
||||
new_obj[key] = value
|
||||
continue
|
||||
|
||||
if matches(value, proto[key]) and matches(proto[key], value):
|
||||
continue
|
||||
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
new_obj[key] = subkeys(value, proto[key])
|
||||
continue
|
||||
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
def add_extends(yaml, key):
|
||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||
whose value features "key".
|
||||
|
||||
If "extends" is not in yaml, then yaml is modified such that
|
||||
yaml["extends"] == key.
|
||||
|
||||
If yaml["extends"] is a str, then yaml is modified such that
|
||||
yaml["extends"] == [yaml["extends"], key]
|
||||
|
||||
If yaml["extends"] is a list that does not include key, then key is
|
||||
appended to the list.
|
||||
|
||||
Otherwise, yaml is left unchanged.
|
||||
"""
|
||||
|
||||
has_key = "extends" in yaml
|
||||
extends = yaml.get("extends")
|
||||
|
||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||
return
|
||||
|
||||
if extends is None:
|
||||
yaml["extends"] = key
|
||||
return
|
||||
|
||||
if isinstance(extends, str):
|
||||
if extends != key:
|
||||
yaml["extends"] = [extends, key]
|
||||
return
|
||||
|
||||
if key not in extends:
|
||||
extends.append(key)
|
||||
|
||||
|
||||
def common_subobject(yaml, sub):
|
||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||
|
||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||
|
||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||
- Otherwise, new[key] = yaml[key].
|
||||
|
||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||
None) is returned. The yaml object is returned unchanged.
|
||||
|
||||
Otherwise, each matching value in new is modified as in
|
||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||
The common_key value is chosen such that it does not match any preexisting
|
||||
key in new. In this case, (new, common_key) is returned.
|
||||
"""
|
||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||
|
||||
if not match_list:
|
||||
return yaml, None
|
||||
|
||||
common_prefix = ".c"
|
||||
common_index = 0
|
||||
|
||||
while True:
|
||||
common_key = "".join((common_prefix, str(common_index)))
|
||||
if common_key not in yaml:
|
||||
break
|
||||
common_index += 1
|
||||
|
||||
new_yaml = {}
|
||||
|
||||
for key, val in yaml.items():
|
||||
new_yaml[key] = copy.deepcopy(val)
|
||||
|
||||
if not matches(val, sub):
|
||||
continue
|
||||
|
||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||
add_extends(new_yaml[key], common_key)
|
||||
|
||||
new_yaml[common_key] = sub
|
||||
|
||||
return new_yaml, common_key
|
||||
|
||||
|
||||
def print_delta(name, old, new, applied=None):
|
||||
delta = new - old
|
||||
reldelta = (1000 * delta) // old
|
||||
reldelta = (reldelta // 10, reldelta % 10)
|
||||
|
||||
if applied is None:
|
||||
applied = new <= old
|
||||
|
||||
print(
|
||||
"\n".join(
|
||||
(
|
||||
"{0} {1}:",
|
||||
" before: {2: 10d}",
|
||||
" after : {3: 10d}",
|
||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||
)
|
||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||
)
|
||||
|
||||
|
||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
"""Try applying an optimization pass and return information about the
|
||||
result
|
||||
|
||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||
string, summary statistics are also printed to stdout.
|
||||
|
||||
"yaml" is the object to apply the pass to.
|
||||
|
||||
"optimization_pass" is the function implementing the pass to be applied.
|
||||
|
||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||
pass. The pass is applied as
|
||||
|
||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||
|
||||
The pass's results are greedily rejected if it does not modify the original
|
||||
yaml document, or if it produces a yaml document that serializes to a
|
||||
larger string.
|
||||
|
||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||
(yaml, new_yaml, applied, other_results) otherwise.
|
||||
"""
|
||||
result = optimization_pass(yaml, *args, **kwargs)
|
||||
new_yaml, other_results = result[0], result[1:]
|
||||
|
||||
if new_yaml is yaml:
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = post_size <= pre_size
|
||||
if applied:
|
||||
yaml, new_yaml = new_yaml, yaml
|
||||
|
||||
if name:
|
||||
print_delta(name, pre_size, post_size, applied)
|
||||
|
||||
return (yaml, new_yaml, applied, other_results)
|
||||
|
||||
|
||||
def build_histogram(iterator, key):
|
||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||
|
||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||
considered.
|
||||
|
||||
Returns a list of tuples (hash, count, proportion, value), where
|
||||
|
||||
- "hash" is a sha1sum hash of the value.
|
||||
- "count" is the number of occurences of values that hash to "hash".
|
||||
- "proportion" is the proportion of all values considered above that
|
||||
hash to "hash".
|
||||
- "value" is one of the values considered above that hash to "hash".
|
||||
Which value is chosen when multiple values hash to the same "hash" is
|
||||
undefined.
|
||||
|
||||
The list is sorted in descending order by count, yielding the most
|
||||
frequently occuring hashes first.
|
||||
"""
|
||||
buckets = collections.defaultdict(int)
|
||||
values = {}
|
||||
|
||||
num_objects = 0
|
||||
for obj in iterator:
|
||||
num_objects += 1
|
||||
|
||||
try:
|
||||
val = obj[key]
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
values[value_hash] = val
|
||||
|
||||
return [
|
||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||
]
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||
}
|
||||
|
||||
# look for a list of tags that appear frequently
|
||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||
|
||||
# If a list of tags is found, and there are more than one job that uses it,
|
||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||
# add the list to the prototype object.
|
||||
if tags and count > 1 and proportion >= 0.70:
|
||||
common_job["tags"] = tags
|
||||
|
||||
# apply common object factorization
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"general common object factorization", yaml, common_subobject, common_job
|
||||
)
|
||||
|
||||
# look for a common script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"script factorization", yaml, common_subobject, {"script": script}
|
||||
)
|
||||
|
||||
# look for a common before_script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||
)
|
||||
|
||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||
# Try to factor them out.
|
||||
h = build_histogram(
|
||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||
"SPACK_ROOT_SPEC",
|
||||
)
|
||||
|
||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||
# environment variable; not just the one that appears with the greatest
|
||||
# frequency. We only require that more than 1 job uses a given instance's
|
||||
# value, because we expect the value to be very large, and so expect even
|
||||
# few-to-one factorizations to yield large space savings.
|
||||
counter = 0
|
||||
for _, count, proportion, spec in h:
|
||||
if count <= 1:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||
yaml,
|
||||
common_subobject,
|
||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||
)
|
||||
|
||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print("\n")
|
||||
print_delta("overall summary", original_size, new_size)
|
||||
print("\n")
|
||||
return yaml
|
||||
@@ -237,7 +237,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -336,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
"""
|
||||
@@ -360,7 +359,6 @@ def get_arg(name, default=None):
|
||||
groups = get_arg("groups", True)
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -388,13 +386,6 @@ def get_arg(name, default=None):
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
||||
if status_fn:
|
||||
# This was copied from spec.tree's colorization logic
|
||||
# then shortened because it seems like status_fn should
|
||||
# always return an InstallStatus
|
||||
string += colorize(status_fn(s).value)
|
||||
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
@@ -453,7 +444,7 @@ def format_list(specs):
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.CONFIG.writable_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
|
||||
@@ -3,24 +3,28 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list, stable_partition
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -31,13 +35,28 @@
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import ImageReference
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
list_tags,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -51,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"--allow-root",
|
||||
"-a",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned",
|
||||
@@ -93,17 +118,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
with_or_without_build_deps = push.add_mutually_exclusive_group()
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--with-build-dependencies",
|
||||
action="store_true",
|
||||
help="include build dependencies in the buildcache",
|
||||
)
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--without-build-dependencies",
|
||||
action="store_true",
|
||||
help="exclude build dependencies from the buildcache",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
@@ -176,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
||||
keys.set_defaults(func=keys_fn)
|
||||
|
||||
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
|
||||
arguments.add_common_arguments(preview, ["installed_specs"])
|
||||
preview.set_defaults(func=preview_fn)
|
||||
|
||||
# Check if binaries need to be rebuilt on remote mirror
|
||||
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
||||
check.add_argument(
|
||||
@@ -321,6 +339,39 @@ def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
class NoPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
|
||||
|
||||
|
||||
def _make_pool() -> MaybePool:
|
||||
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
|
||||
That leaves only forking"""
|
||||
if multiprocessing.get_start_method() == "fork":
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
else:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
@@ -340,45 +391,6 @@ def _skip_no_redistribute_for_public(specs):
|
||||
return remaining_specs
|
||||
|
||||
|
||||
class PackagesAreNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a list of specs is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
|
||||
)
|
||||
|
||||
|
||||
class PackageNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
|
||||
def _specs_to_be_packaged(
|
||||
requested: List[Spec], things_to_install: str, build_deps: bool
|
||||
) -> List[Spec]:
|
||||
"""Collect all non-external with or without roots and dependencies"""
|
||||
if "dependencies" not in things_to_install:
|
||||
deptype = dt.NONE
|
||||
elif build_deps:
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.RUN | dt.LINK | dt.TEST
|
||||
specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
requested,
|
||||
root="package" in things_to_install,
|
||||
deptype=deptype,
|
||||
order="breadth",
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
specs.reverse()
|
||||
return specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -392,6 +404,11 @@ def push_fn(args):
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
@@ -417,79 +434,84 @@ def push_fn(args):
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
unsigned = True
|
||||
|
||||
# Select a signing key, or None if unsigned.
|
||||
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
|
||||
|
||||
specs = _specs_to_be_packaged(
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
roots,
|
||||
things_to_install=args.things_to_install,
|
||||
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
# Pushing not installed specs is an error. Either fail fast or populate the error list and
|
||||
# push installed package in best effort mode.
|
||||
failed: List[Tuple[Spec, BaseException]] = []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if any(not s.installed for s in specs):
|
||||
specs, not_installed = stable_partition(specs, lambda s: s.installed)
|
||||
if args.fail_fast:
|
||||
raise PackagesAreNotInstalledError(not_installed)
|
||||
else:
|
||||
failed.extend(
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
failed = []
|
||||
|
||||
with bindist.default_push_context() as (tmpdir, executor):
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
skipped, base_images, checksums, upload_errors = bindist._push_oci(
|
||||
# TODO: unify this logic in the future.
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
if upload_errors:
|
||||
failed.extend(upload_errors)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
elif args.tag:
|
||||
if args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
bindist._oci_update_base_images(
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
bindist._oci_put_manifest(
|
||||
base_images, checksums, tagged_image, tmpdir, None, None, *roots
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped, upload_errors = bindist._push(
|
||||
specs,
|
||||
out_url=push_url,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
)
|
||||
failed.extend(upload_errors)
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -512,22 +534,389 @@ def push_fn(args):
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list(
|
||||
[
|
||||
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
|
||||
for spec, e in failed
|
||||
],
|
||||
5,
|
||||
)
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
# Update the OCI index if requested
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(target_image, tmpdir, executor)
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
|
||||
# This is because Singularity / Apptainer is very strict about not mixing them.
|
||||
base_manifest_mediaType = base_manifest.get(
|
||||
"mediaType", "application/vnd.oci.image.manifest.v1+json"
|
||||
)
|
||||
use_docker_format = (
|
||||
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
|
||||
)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
manifest = {
|
||||
"mediaType": base_manifest_mediaType,
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": (
|
||||
"application/vnd.docker.image.rootfs.diff.tar.gzip"
|
||||
if use_docker_format
|
||||
else "application/vnd.oci.image.layer.v1.tar+gzip"
|
||||
),
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
if not use_docker_format and annotations:
|
||||
manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref, manifest=manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: MaybePool,
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
tags = list_tags(image_ref)
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
@@ -571,6 +960,14 @@ def keys_fn(args):
|
||||
bindist.get_keys(args.install, args.trust, args.force)
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
tty.warn(
|
||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||
"now the default. This command will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args: argparse.Namespace):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
@@ -808,15 +1205,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(image_ref, tmpdir, executor)
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_package_index(url, tmpdir)
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
@@ -824,8 +1220,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -74,7 +73,7 @@ def setup_parser(subparser):
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
@@ -82,7 +81,7 @@ def setup_parser(subparser):
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
@@ -201,18 +200,6 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
@@ -225,6 +212,8 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
@@ -245,6 +234,8 @@ def ci_generate(args):
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
if args.update_completion:
|
||||
fs.set_executable(args.update)
|
||||
|
||||
else:
|
||||
prepend_header(args, sys.stdout)
|
||||
formatter(args, sys.stdout)
|
||||
|
||||
@@ -41,7 +41,7 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
class AreDepsInstalledVisitor(traverse.AbstractVisitor):
|
||||
def __init__(self, context: Context = Context.BUILD):
|
||||
if context == Context.BUILD:
|
||||
# TODO: run deps shouldn't be required for build env.
|
||||
@@ -53,27 +53,27 @@ def __init__(self, context: Context = Context.BUILD):
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
def accept(self, item):
|
||||
def accept(self, item: traverse.EdgeAndDepth) -> bool:
|
||||
# The root may be installed or uninstalled.
|
||||
if item.depth == 0:
|
||||
if item[1] == 0:
|
||||
return True
|
||||
|
||||
# Early exit after we've seen an uninstalled dep.
|
||||
if self.has_uninstalled_deps:
|
||||
return False
|
||||
|
||||
spec = item.edge.spec
|
||||
spec = item[0].spec
|
||||
if not spec.external and not spec.installed:
|
||||
self.has_uninstalled_deps = True
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
def neighbors(self, item: traverse.EdgeAndDepth):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
depflag = self.direct_deps if item.depth == 0 else dt.LINK | dt.RUN
|
||||
return item.edge.spec.edges_to_dependencies(depflag=depflag)
|
||||
depflag = self.direct_deps if item[1] == 0 else dt.LINK | dt.RUN
|
||||
return item[0].spec.edges_to_dependencies(depflag=depflag)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context: Context, args):
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.environment as ev
|
||||
@@ -46,9 +43,5 @@ def concretize(parser, args):
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
else:
|
||||
tty.msg("No new specs to concretize.")
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env is not None:
|
||||
pristine = env.manifest.yaml_content
|
||||
pristine = env.manifest.pristine_yaml_content
|
||||
flattened = pristine.copy()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||
else:
|
||||
@@ -264,9 +264,7 @@ def config_remove(args):
|
||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
if isinstance(scope, spack.config.SingleFileScope):
|
||||
return fs.can_access(cfg_file)
|
||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
return False
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
|
||||
|
||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||
@@ -364,11 +362,14 @@ def config_change(args):
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = [
|
||||
x
|
||||
for x in spack.config.CONFIG.format_updates[args.section]
|
||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
||||
]
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -933,7 +934,7 @@ def get_repository(args, name):
|
||||
# Figure out where the new package should live
|
||||
repo_path = args.repo
|
||||
if repo_path is not None:
|
||||
repo = spack.repo.from_path(repo_path)
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die(
|
||||
"Can't create package with namespace {0} in repo with "
|
||||
@@ -941,7 +942,9 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
@@ -63,10 +62,9 @@ def create_db_tarball(args):
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
# Currently --transform and -s are not supported by Windows native tar
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
elif sys.platform != "win32":
|
||||
else:
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
@@ -92,6 +90,7 @@ def report(args):
|
||||
print("* **Spack:**", get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
print("* **Platform:**", architecture)
|
||||
print("* **Concretizer:**", spack.config.get("config:concretizer"))
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
|
||||
@@ -47,6 +47,16 @@ def inverted_dependencies():
|
||||
dependents of, e.g., `mpi`, but virtuals are not included as
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies_by_name():
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
dag = collections.defaultdict(set)
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -71,15 +69,13 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
source_stage.disable_mirrors()
|
||||
package.stage[0].disable_mirrors()
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
|
||||
@@ -12,13 +11,43 @@
|
||||
import spack.cmd
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.spec import Spec
|
||||
from spack.util.editor import editor
|
||||
|
||||
description = "open package files in $EDITOR"
|
||||
section = "packaging"
|
||||
level = "short"
|
||||
|
||||
|
||||
def edit_package(name, repo_path, namespace):
|
||||
"""Opens the requested package file in your favorite $EDITOR.
|
||||
|
||||
Args:
|
||||
name (str): The name of the package
|
||||
repo_path (str): The path to the repository containing this package
|
||||
namespace (str): A valid namespace registered with Spack
|
||||
"""
|
||||
# Find the location of the package
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||
if not os.access(path, os.R_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
else:
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
|
||||
editor(path)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
excl_args = subparser.add_mutually_exclusive_group()
|
||||
|
||||
@@ -69,67 +98,41 @@ def setup_parser(subparser):
|
||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||
|
||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
||||
|
||||
|
||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise spack.repo.UnknownPackageError(name) from e
|
||||
tty.die(f"Cannot edit package: {e}")
|
||||
|
||||
|
||||
def locate_file(name: str, path: str) -> str:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
file_path = os.path.join(path, name)
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
tty.die(f"Cannot edit file: {e}")
|
||||
pass
|
||||
|
||||
# Otherwise try to find a file that starts with the name
|
||||
candidates = glob.glob(file_path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
||||
if len(files) > 1:
|
||||
tty.die(
|
||||
f"Multiple files start with `{name}`:\n"
|
||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
||||
)
|
||||
elif not files:
|
||||
tty.die(f"No file for '{name}' was found in {path}")
|
||||
return files[0]
|
||||
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||
|
||||
|
||||
def edit(parser, args):
|
||||
names = args.package
|
||||
name = args.package
|
||||
|
||||
# By default, edit package files
|
||||
path = spack.paths.packages_path
|
||||
|
||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||
if args.path:
|
||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
||||
spack.util.editor.editor(*paths)
|
||||
elif names:
|
||||
if args.repo:
|
||||
repo = spack.repo.from_path(args.repo)
|
||||
elif args.namespace:
|
||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
paths = [locate_package(name, repo) for name in names]
|
||||
spack.util.editor.editor(*paths)
|
||||
path = args.path
|
||||
if name:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
path = os.path.join(path, name)
|
||||
if not os.path.exists(path):
|
||||
files = glob.glob(path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||
if len(files) > 1:
|
||||
m = "Multiple files exist with the name {0}.".format(name)
|
||||
m += " Please specify a suffix. Files are:\n\n"
|
||||
for f in files:
|
||||
m += " " + os.path.basename(f) + "\n"
|
||||
tty.die(m)
|
||||
if not files:
|
||||
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||
path = files[0] # already confirmed only one entry in files
|
||||
|
||||
editor(path)
|
||||
elif name:
|
||||
edit_package(name, args.repo, args.namespace)
|
||||
else:
|
||||
# By default open the directory where packages live
|
||||
spack.util.editor.editor(spack.paths.packages_path)
|
||||
editor(path)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, Set
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
@@ -139,26 +138,14 @@ def external_find(args):
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
new_specs = spack.detection.update_configuration(
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
|
||||
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
|
||||
# to avoid that the concretizer picks a different mpi provider.
|
||||
if new_specs and args.not_buildable:
|
||||
virtuals: Set[str] = {
|
||||
virtual.name
|
||||
for new_spec in new_specs
|
||||
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
|
||||
for virtual in virtual_specs
|
||||
}
|
||||
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
|
||||
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
|
||||
|
||||
if new_specs:
|
||||
if new_entries:
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
tty.msg(f"The following specs have been detected on this system and added to {path}")
|
||||
spack.cmd.display_specs(new_specs)
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
||||
@@ -46,10 +46,6 @@ def setup_parser(subparser):
|
||||
help="output specs as machine-readable json records",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -297,24 +293,25 @@ def root_decorator(spec, string):
|
||||
)
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
print()
|
||||
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
@@ -335,11 +332,6 @@ def find(parser, args):
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
else:
|
||||
status_fn = None
|
||||
|
||||
# Display the result
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
@@ -348,34 +340,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
installed_suffix = ""
|
||||
concretized_suffix = " to be installed"
|
||||
|
||||
if args.only_roots:
|
||||
installed_suffix += " (not shown)"
|
||||
concretized_suffix += " (not shown)"
|
||||
else:
|
||||
if env and not args.show_concretized:
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
|
||||
if env:
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
||||
@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
|
||||
|
||||
# -e says "also preserve things needed by this particular env"
|
||||
for env_name_or_dir in args.except_environment:
|
||||
print("HMM", env_name_or_dir)
|
||||
if ev.exists(env_name_or_dir):
|
||||
env = ev.read(env_name_or_dir)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
|
||||
@@ -5,12 +5,10 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
@@ -117,7 +115,6 @@ def setup_parser(subparser):
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
@@ -223,10 +220,9 @@ def gpg_publish(args):
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
|
||||
)
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, regenerate_index=args.rebuild_index
|
||||
)
|
||||
|
||||
|
||||
def gpg(parser, args):
|
||||
|
||||
@@ -10,7 +10,6 @@
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.string import plural
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.build_environment
|
||||
@@ -376,9 +375,7 @@ def _maybe_add_and_concretize(args, env, specs):
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
|
||||
@@ -169,9 +169,7 @@ def pkg_hash(args):
|
||||
|
||||
def get_grep(required=False):
|
||||
"""Get a grep command to use with ``spack pkg grep``."""
|
||||
grep = exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||
grep.ignore_quotes = True # allow `spack pkg grep '"quoted string"'` without warning
|
||||
return grep
|
||||
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||
|
||||
|
||||
def pkg_grep(args, unknown_args):
|
||||
|
||||
@@ -91,7 +91,7 @@ def repo_add(args):
|
||||
tty.die("Not a Spack repository: %s" % path)
|
||||
|
||||
# Make sure it's actually a spack repository by constructing it.
|
||||
repo = spack.repo.from_path(canon_path)
|
||||
repo = spack.repo.Repo(canon_path)
|
||||
|
||||
# If that succeeds, finally add it to the configuration.
|
||||
repos = spack.config.get("repos", scope=args.scope)
|
||||
@@ -124,7 +124,7 @@ def repo_remove(args):
|
||||
# If it is a namespace, remove corresponding repo
|
||||
for path in repos:
|
||||
try:
|
||||
repo = spack.repo.from_path(path)
|
||||
repo = spack.repo.Repo(path)
|
||||
if repo.namespace == namespace_or_path:
|
||||
repos.remove(path)
|
||||
spack.config.set("repos", repos, args.scope)
|
||||
@@ -142,7 +142,7 @@ def repo_list(args):
|
||||
repos = []
|
||||
for r in roots:
|
||||
try:
|
||||
repos.append(spack.repo.from_path(r))
|
||||
repos.append(spack.repo.Repo(r))
|
||||
except spack.repo.RepoError:
|
||||
continue
|
||||
|
||||
|
||||
@@ -114,16 +114,15 @@ def _process_result(result, show, required_format, kwargs):
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if "solutions" in show:
|
||||
if required_format:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == "json":
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
|
||||
@@ -105,19 +105,11 @@ def spec(parser, args):
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
|
||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||
)
|
||||
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
if args.specs:
|
||||
specs = [
|
||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||
|
||||
@@ -339,7 +339,7 @@ def add(self, pkg_name, fetcher):
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
npkgs += 1
|
||||
|
||||
for v in list(pkg_cls.versions):
|
||||
for v in pkg_cls.versions:
|
||||
try:
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||
fetcher = fs.for_package_version(pkg, v)
|
||||
|
||||
@@ -38,10 +38,10 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.environment as ev
|
||||
import spack.filesystem_view as fsv
|
||||
import spack.schema.projections
|
||||
import spack.store
|
||||
from spack.config import validate
|
||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||
from spack.util import spack_yaml as s_yaml
|
||||
|
||||
description = "project packages to a compact naming scheme on the filesystem"
|
||||
@@ -193,13 +193,17 @@ def view(parser, args):
|
||||
ordered_projections = {}
|
||||
|
||||
# What method are we using for this view
|
||||
link_type = args.action if args.action in actions_link else "symlink"
|
||||
view = fsv.YamlFilesystemView(
|
||||
if args.action in actions_link:
|
||||
link_fn = view_func_parser(args.action)
|
||||
else:
|
||||
link_fn = view_func_parser("symlink")
|
||||
|
||||
view = YamlFilesystemView(
|
||||
path,
|
||||
spack.store.STORE.layout,
|
||||
projections=ordered_projections,
|
||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||
link_type=link_type,
|
||||
link=link_fn,
|
||||
verbose=args.verbose,
|
||||
)
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
@@ -278,6 +279,11 @@ def debug_flags(self):
|
||||
def opt_flags(self):
|
||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||
|
||||
# Cray PrgEnv name that can be used to load this compiler
|
||||
PrgEnv: Optional[str] = None
|
||||
# Name of module used to switch versions of this compiler
|
||||
PrgEnv_compiler: Optional[str] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cspec,
|
||||
|
||||
@@ -260,7 +260,7 @@ def _init_compiler_config(
|
||||
def compiler_config_files():
|
||||
config_files = list()
|
||||
config = spack.config.CONFIG
|
||||
for scope in config.writable_scopes:
|
||||
for scope in config.file_scopes:
|
||||
name = scope.name
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
@@ -488,7 +488,7 @@ def supported_compilers_for_host_platform() -> List[str]:
|
||||
return supported_compilers_for_platform(host_plat)
|
||||
|
||||
|
||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
||||
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||
"""Return a set of compiler class objects supported by Spack
|
||||
that are also supported by the provided platform
|
||||
|
||||
|
||||
@@ -25,6 +25,9 @@ class Aocc(Compiler):
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
|
||||
PrgEnv = "PrgEnv-aocc"
|
||||
PrgEnv_compiler = "aocc"
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@property
|
||||
|
||||
@@ -34,9 +34,12 @@ def __init__(self, *args, **kwargs):
|
||||
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
|
||||
suffixes = [r"-mp-\d\.\d"]
|
||||
|
||||
PrgEnv = "PrgEnv-cray"
|
||||
PrgEnv_compiler = "cce"
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
if any("PrgEnv-cray" in m for m in self.modules):
|
||||
if any(self.PrgEnv in m for m in self.modules):
|
||||
# Old module-based interface to cray compilers
|
||||
return {
|
||||
"cc": os.path.join("cce", "cc"),
|
||||
|
||||
@@ -40,6 +40,9 @@ class Gcc(spack.compiler.Compiler):
|
||||
"fc": os.path.join("gcc", "gfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-gnu"
|
||||
PrgEnv_compiler = "gcc"
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
return "-v"
|
||||
|
||||
@@ -31,6 +31,9 @@ class Intel(Compiler):
|
||||
"fc": os.path.join("intel", "ifort"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-intel"
|
||||
PrgEnv_compiler = "intel"
|
||||
|
||||
if sys.platform == "win32":
|
||||
version_argument = "/QV"
|
||||
else:
|
||||
@@ -123,14 +126,3 @@ def fc_pic_flag(self):
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ("-cxxlib",)
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
@@ -231,55 +231,24 @@ def msvc_version(self):
|
||||
|
||||
@property
|
||||
def short_msvc_version(self):
|
||||
"""This is the shorthand VCToolset version of form
|
||||
MSVC<short-ver>
|
||||
"""
|
||||
return "MSVC" + self.vc_toolset_ver
|
||||
|
||||
@property
|
||||
def vc_toolset_ver(self):
|
||||
"""
|
||||
The toolset version is the version of the combined set of cl and link
|
||||
This typically relates directly to VS version i.e. VS 2022 is v143
|
||||
VS 19 is v142, etc.
|
||||
This value is defined by the first three digits of the major + minor
|
||||
version of the VS toolset (143 for 14.3x.bbbbb). Traditionally the
|
||||
minor version has remained a static two digit number for a VS release
|
||||
series, however, as of VS22, this is no longer true, both
|
||||
14.4x.bbbbb and 14.3x.bbbbb are considered valid VS22 VC toolset
|
||||
versions due to a change in toolset minor version sentiment.
|
||||
|
||||
This is *NOT* the full version, for that see
|
||||
This is the shorthand VCToolset version of form
|
||||
MSVC<short-ver> *NOT* the full version, for that see
|
||||
Msvc.msvc_version or MSVC.platform_toolset_ver for the
|
||||
raw platform toolset version
|
||||
|
||||
"""
|
||||
ver = self.msvc_version[:2].joined.string[:3]
|
||||
return ver
|
||||
ver = self.platform_toolset_ver
|
||||
return "MSVC" + ver
|
||||
|
||||
@property
|
||||
def platform_toolset_ver(self):
|
||||
"""
|
||||
This is the platform toolset version of current MSVC compiler
|
||||
i.e. 142. The platform toolset is the targeted MSVC library/compiler
|
||||
versions by compilation (this is different from the VC Toolset)
|
||||
|
||||
|
||||
i.e. 142.
|
||||
This is different from the VC toolset version as established
|
||||
by `short_msvc_version`, but typically are represented by the same
|
||||
three digit value
|
||||
by `short_msvc_version`
|
||||
"""
|
||||
# Typically VS toolset version and platform toolset versions match
|
||||
# VS22 introduces the first divergence of VS toolset version
|
||||
# (144 for "recent" releases) and platform toolset version (143)
|
||||
# so it needs additional handling until MS releases v144
|
||||
# (assuming v144 is also for VS22)
|
||||
# or adds better support for detection
|
||||
# TODO: (johnwparent) Update this logic for the next platform toolset
|
||||
# or VC toolset version update
|
||||
toolset_ver = self.vc_toolset_ver
|
||||
vs22_toolset = Version(toolset_ver) > Version("142")
|
||||
return toolset_ver if not vs22_toolset else "143"
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
def _compiler_version(self, compiler):
|
||||
"""Returns version object for given compiler"""
|
||||
|
||||
@@ -29,6 +29,9 @@ class Nvhpc(Compiler):
|
||||
"fc": os.path.join("nvhpc", "nvfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-nvhpc"
|
||||
PrgEnv_compiler = "nvhpc"
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"nv[^ ]* (?:[^ ]+ Dev-r)?([0-9.]+)(?:-[0-9]+)?"
|
||||
|
||||
|
||||
@@ -4,12 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
from os.path import dirname, join
|
||||
from os.path import dirname
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Oneapi(Compiler):
|
||||
@@ -33,6 +32,9 @@ class Oneapi(Compiler):
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-oneapi"
|
||||
PrgEnv_compiler = "oneapi"
|
||||
|
||||
version_argument = "--version"
|
||||
version_regex = r"(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))|(?:\(IFX\))) (\S+)"
|
||||
|
||||
@@ -133,22 +135,8 @@ def setup_custom_environment(self, pkg, env):
|
||||
# It is located in the same directory as the driver. Error message:
|
||||
# clang++: error: unable to execute command:
|
||||
# Executable "sycl-post-link" doesn't exist!
|
||||
# also ensures that shared objects and libraries required by the compiler,
|
||||
# e.g. libonnx, can be found succesfully
|
||||
# due to a fix, this is no longer required for OneAPI versions >= 2024.2
|
||||
if self.cxx and pkg.spec.satisfies("%oneapi@:2024.1"):
|
||||
if self.cxx:
|
||||
env.prepend_path("PATH", dirname(self.cxx))
|
||||
env.prepend_path("LD_LIBRARY_PATH", join(dirname(dirname(self.cxx)), "lib"))
|
||||
|
||||
# Edge cases for Intel's oneAPI compilers when using the legacy classic compilers:
|
||||
# Always pass flags to disable deprecation warnings, since these warnings can
|
||||
# confuse tools that parse the output of compiler commands (e.g. version checks).
|
||||
if self.cc and self.cc.endswith("icc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CFLAGS", "-diag-disable=10441")
|
||||
if self.cxx and self.cxx.endswith("icpc") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_CXXFLAGS", "-diag-disable=10441")
|
||||
if self.fc and self.fc.endswith("ifort") and self.real_version >= Version("2021"):
|
||||
env.append_flags("SPACK_ALWAYS_FFLAGS", "-diag-disable=10448")
|
||||
|
||||
# 2024 release bumped the libsycl version because of an ABI
|
||||
# change, 2024 compilers are required. You will see this
|
||||
|
||||
@@ -30,6 +30,9 @@ class Pgi(Compiler):
|
||||
"fc": os.path.join("pgi", "pgfortran"),
|
||||
}
|
||||
|
||||
PrgEnv = "PrgEnv-pgi"
|
||||
PrgEnv_compiler = "pgi"
|
||||
|
||||
version_argument = "-V"
|
||||
ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
|
||||
version_regex = r"pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on "
|
||||
|
||||
@@ -23,6 +23,9 @@ class Rocmcc(spack.compilers.clang.Clang):
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["amdflang"]
|
||||
|
||||
PrgEnv = "PrgEnv-amd"
|
||||
PrgEnv_compiler = "amd"
|
||||
|
||||
@property
|
||||
def link_paths(self):
|
||||
link_paths = {
|
||||
|
||||
@@ -2,11 +2,29 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
Functions here are used to take abstract specs and make them concrete.
|
||||
For example, if a spec asks for a version between 1.8 and 1.9, these
|
||||
functions might take will take the most recent 1.9 version of the
|
||||
package available. Or, if the user didn't specify a compiler for a
|
||||
spec, then this will assign a compiler to the spec based on defaults
|
||||
or user preferences.
|
||||
|
||||
TODO: make this customizable and allow users to configure
|
||||
concretization policies.
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
from typing import Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
@@ -19,20 +37,639 @@
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
from spack.version import ClosedOpenRange, VersionList, ver
|
||||
|
||||
#: impements rudimentary logic for ABI compatibility
|
||||
_abi: Union[spack.abi.ABI, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
lambda: spack.abi.ABI()
|
||||
)
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class reverse_order:
|
||||
"""Helper for creating key functions.
|
||||
|
||||
This is a wrapper that inverts the sense of the natural
|
||||
comparisons on the object.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return other.value == self.value
|
||||
|
||||
def __lt__(self, other):
|
||||
return other.value < self.value
|
||||
|
||||
|
||||
class Concretizer:
|
||||
"""(DEPRECATED) Only contains logic to enable/disable compiler existence checks."""
|
||||
"""You can subclass this class to override some of the default
|
||||
concretization strategies, or you can override all of them.
|
||||
"""
|
||||
|
||||
#: Controls whether we check that compiler versions actually exist
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
def __init__(self):
|
||||
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||
#: Those will not be considered as providers for virtuals.
|
||||
non_buildable_packages = {"glibc", "musl"}
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
"config:install_missing_compilers", False
|
||||
)
|
||||
self.abstract_spec = abstract_spec
|
||||
self._adjust_target_answer_generator = None
|
||||
|
||||
def concretize_develop(self, spec):
|
||||
"""
|
||||
Add ``dev_path=*`` variant to packages built from local source.
|
||||
"""
|
||||
env = spack.environment.active_environment()
|
||||
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = spack.util.path.canonicalize_path(dev_info["path"], default_wd=env.path)
|
||||
|
||||
if "dev_path" in spec.variants:
|
||||
assert spec.variants["dev_path"].value == path
|
||||
changed = False
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", vt.SingleValuedVariant("dev_path", path))
|
||||
changed = True
|
||||
changed |= spec.constrain(dev_info["spec"])
|
||||
return changed
|
||||
|
||||
def _valid_virtuals_and_externals(self, spec):
|
||||
"""Returns a list of candidate virtual dep providers and external
|
||||
packages that coiuld be used to concretize a spec.
|
||||
|
||||
Preferred specs come first in the list.
|
||||
"""
|
||||
# First construct a list of concrete candidates to replace spec with.
|
||||
candidates = [spec]
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = [
|
||||
s
|
||||
for s in spack.repo.PATH.providers_for(spec)
|
||||
if s.name not in self.non_buildable_packages
|
||||
]
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
# Find nearest spec in the DAG (up then down) that has prefs.
|
||||
spec_w_prefs = find_spec(
|
||||
spec, lambda p: PackagePrefs.has_preferred_providers(p.name, spec.name), spec
|
||||
) # default to spec itself.
|
||||
|
||||
# Create a key to sort candidates by the prefs we found
|
||||
pref_key = PackagePrefs(spec_w_prefs.name, "providers", spec.name)
|
||||
|
||||
# For each candidate package, if it has externals, add those
|
||||
# to the usable list. if it's not buildable, then *only* add
|
||||
# the externals.
|
||||
usable = []
|
||||
for cspec in candidates:
|
||||
if is_spec_buildable(cspec):
|
||||
usable.append(cspec)
|
||||
|
||||
externals = spec_externals(cspec)
|
||||
for ext in externals:
|
||||
if ext.intersects(spec):
|
||||
usable.append(ext)
|
||||
|
||||
# If nothing is in the usable list now, it's because we aren't
|
||||
# allowed to build anything.
|
||||
if not usable:
|
||||
raise NoBuildError(spec)
|
||||
|
||||
# Use a sort key to order the results
|
||||
return sorted(
|
||||
usable,
|
||||
key=lambda spec: (
|
||||
not spec.external, # prefer externals
|
||||
pref_key(spec), # respect prefs
|
||||
spec.name, # group by name
|
||||
reverse_order(spec.versions), # latest version
|
||||
spec, # natural order
|
||||
),
|
||||
)
|
||||
|
||||
def choose_virtual_or_external(self, spec: spack.spec.Spec):
|
||||
"""Given a list of candidate virtual and external packages, try to
|
||||
find one that is most ABI compatible.
|
||||
"""
|
||||
candidates = self._valid_virtuals_and_externals(spec)
|
||||
if not candidates:
|
||||
return candidates
|
||||
|
||||
# Find the nearest spec in the dag that has a compiler. We'll
|
||||
# use that spec to calibrate compiler compatibility.
|
||||
abi_exemplar = find_spec(spec, lambda x: x.compiler)
|
||||
if abi_exemplar is None:
|
||||
abi_exemplar = spec.root
|
||||
|
||||
# Sort candidates from most to least compatibility.
|
||||
# We reverse because True > False.
|
||||
# Sort is stable, so candidates keep their order.
|
||||
return sorted(
|
||||
candidates,
|
||||
reverse=True,
|
||||
key=lambda spec: (
|
||||
_abi.compatible(spec, abi_exemplar, loose=True),
|
||||
_abi.compatible(spec, abi_exemplar),
|
||||
),
|
||||
)
|
||||
|
||||
def concretize_version(self, spec):
|
||||
"""If the spec is already concrete, return. Otherwise take
|
||||
the preferred version from spackconfig, and default to the package's
|
||||
version if there are no available versions.
|
||||
|
||||
TODO: In many cases we probably want to look for installed
|
||||
versions of each package and use an installed version
|
||||
if we can link to it. The policy implemented here will
|
||||
tend to rebuild a lot of stuff becasue it will prefer
|
||||
a compiler in the spec to any compiler already-
|
||||
installed things were built with. There is likely
|
||||
some better policy that finds some middle ground
|
||||
between these two extremes.
|
||||
"""
|
||||
# return if already concrete.
|
||||
if spec.versions.concrete:
|
||||
return False
|
||||
|
||||
# List of versions we could consider, in sorted order
|
||||
pkg_versions = spec.package_class.versions
|
||||
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
|
||||
|
||||
yaml_prefs = PackagePrefs(spec.name, "version")
|
||||
|
||||
# The keys below show the order of precedence of factors used
|
||||
# to select a version when concretizing. The item with
|
||||
# the "largest" key will be selected.
|
||||
#
|
||||
# NOTE: When COMPARING VERSIONS, the '@develop' version is always
|
||||
# larger than other versions. BUT when CONCRETIZING,
|
||||
# the largest NON-develop version is selected by default.
|
||||
keyfn = lambda v: (
|
||||
# ------- Special direction from the user
|
||||
# Respect order listed in packages.yaml
|
||||
-yaml_prefs(v),
|
||||
# The preferred=True flag (packages or packages.yaml or both?)
|
||||
pkg_versions.get(v).get("preferred", False),
|
||||
# ------- Regular case: use latest non-develop version by default.
|
||||
# Avoid @develop version, which would otherwise be the "largest"
|
||||
# in straight version comparisons
|
||||
not v.isdevelop(),
|
||||
# Compare the version itself
|
||||
# This includes the logic:
|
||||
# a) develop > everything (disabled by "not v.isdevelop() above)
|
||||
# b) numeric > non-numeric
|
||||
# c) Numeric or string comparison
|
||||
v,
|
||||
)
|
||||
usable.sort(key=keyfn, reverse=True)
|
||||
|
||||
if usable:
|
||||
spec.versions = ver([usable[0]])
|
||||
else:
|
||||
# We don't know of any SAFE versions that match the given
|
||||
# spec. Grab the spec's versions and grab the highest
|
||||
# *non-open* part of the range of versions it specifies.
|
||||
# Someone else can raise an error if this happens,
|
||||
# e.g. when we go to fetch it and don't know how. But it
|
||||
# *might* work.
|
||||
if not spec.versions or spec.versions == VersionList([":"]):
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
last = spec.versions[-1]
|
||||
if isinstance(last, ClosedOpenRange):
|
||||
range_as_version = VersionList([last]).concrete_range_as_version
|
||||
if range_as_version:
|
||||
spec.versions = ver([range_as_version])
|
||||
else:
|
||||
raise NoValidVersionError(spec)
|
||||
else:
|
||||
spec.versions = ver([last])
|
||||
|
||||
return True # Things changed
|
||||
|
||||
def concretize_architecture(self, spec):
|
||||
"""If the spec is empty provide the defaults of the platform. If the
|
||||
architecture is not a string type, then check if either the platform,
|
||||
target or operating system are concretized. If any of the fields are
|
||||
changed then return True. If everything is concretized (i.e the
|
||||
architecture attribute is a namedtuple of classes) then return False.
|
||||
If the target is a string type, then convert the string into a
|
||||
concretized architecture. If it has no architecture and the root of the
|
||||
DAG has an architecture, then use the root otherwise use the defaults
|
||||
on the platform.
|
||||
"""
|
||||
# ensure type safety for the architecture
|
||||
if spec.architecture is None:
|
||||
spec.architecture = spack.spec.ArchSpec()
|
||||
|
||||
if spec.architecture.concrete:
|
||||
return False
|
||||
|
||||
# Get platform of nearest spec with a platform, including spec
|
||||
# If spec has a platform, easy
|
||||
if spec.architecture.platform:
|
||||
new_plat = spack.platforms.by_name(spec.architecture.platform)
|
||||
else:
|
||||
# Else if anyone else has a platform, take the closest one
|
||||
# Search up, then down, along build/link deps first
|
||||
# Then any nearest. Algorithm from compilerspec search
|
||||
platform_spec = find_spec(spec, lambda x: x.architecture and x.architecture.platform)
|
||||
if platform_spec:
|
||||
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
|
||||
else:
|
||||
# If no platform anywhere in this spec, grab the default
|
||||
new_plat = spack.platforms.host()
|
||||
|
||||
# Get nearest spec with relevant platform and an os
|
||||
# Generally, same algorithm as finding platform, except we only
|
||||
# consider specs that have a platform
|
||||
if spec.architecture.os:
|
||||
new_os = spec.architecture.os
|
||||
else:
|
||||
new_os_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.os
|
||||
),
|
||||
)
|
||||
if new_os_spec:
|
||||
new_os = new_os_spec.architecture.os
|
||||
else:
|
||||
new_os = new_plat.operating_system("default_os")
|
||||
|
||||
# Get the nearest spec with relevant platform and a target
|
||||
# Generally, same algorithm as finding os
|
||||
curr_target = None
|
||||
if spec.architecture.target:
|
||||
curr_target = spec.architecture.target
|
||||
if spec.architecture.target and spec.architecture.target_concrete:
|
||||
new_target = spec.architecture.target
|
||||
else:
|
||||
new_target_spec = find_spec(
|
||||
spec,
|
||||
lambda x: (
|
||||
x.architecture
|
||||
and x.architecture.platform == str(new_plat)
|
||||
and x.architecture.target
|
||||
and x.architecture.target != curr_target
|
||||
),
|
||||
)
|
||||
if new_target_spec:
|
||||
if curr_target:
|
||||
# constrain one target by the other
|
||||
new_target_arch = spack.spec.ArchSpec(
|
||||
(None, None, new_target_spec.architecture.target)
|
||||
)
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, curr_target))
|
||||
curr_target_arch.constrain(new_target_arch)
|
||||
new_target = curr_target_arch.target
|
||||
else:
|
||||
new_target = new_target_spec.architecture.target
|
||||
else:
|
||||
# To get default platform, consider package prefs
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
new_target = self.target_from_package_preferences(spec)
|
||||
else:
|
||||
new_target = new_plat.target("default_target")
|
||||
if curr_target:
|
||||
# convert to ArchSpec to compare satisfaction
|
||||
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
|
||||
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
|
||||
|
||||
if not new_target_arch.intersects(curr_target_arch):
|
||||
# new_target is an incorrect guess based on preferences
|
||||
# and/or default
|
||||
valid_target_ranges = str(curr_target).split(",")
|
||||
for target_range in valid_target_ranges:
|
||||
t_min, t_sep, t_max = target_range.partition(":")
|
||||
if not t_sep:
|
||||
new_target = t_min
|
||||
break
|
||||
elif t_max:
|
||||
new_target = t_max
|
||||
break
|
||||
elif t_min:
|
||||
# TODO: something better than picking first
|
||||
new_target = t_min
|
||||
break
|
||||
|
||||
# Construct new architecture, compute whether spec changed
|
||||
arch_spec = (str(new_plat), str(new_os), str(new_target))
|
||||
new_arch = spack.spec.ArchSpec(arch_spec)
|
||||
spec_changed = new_arch != spec.architecture
|
||||
spec.architecture = new_arch
|
||||
return spec_changed
|
||||
|
||||
def target_from_package_preferences(self, spec):
|
||||
"""Returns the preferred target from the package preferences if
|
||||
there's any.
|
||||
|
||||
Args:
|
||||
spec: abstract spec to be concretized
|
||||
"""
|
||||
target_prefs = PackagePrefs(spec.name, "target")
|
||||
target_specs = [spack.spec.Spec("target=%s" % tname) for tname in archspec.cpu.TARGETS]
|
||||
|
||||
def tspec_filter(s):
|
||||
# Filter target specs by whether the architecture
|
||||
# family is the current machine type. This ensures
|
||||
# we only consider x86_64 targets when on an
|
||||
# x86_64 machine, etc. This may need to change to
|
||||
# enable setting cross compiling as a default
|
||||
target = archspec.cpu.TARGETS[str(s.architecture.target)]
|
||||
arch_family_name = target.family.name
|
||||
return arch_family_name == platform.machine()
|
||||
|
||||
# Sort filtered targets by package prefs
|
||||
target_specs = list(filter(tspec_filter, target_specs))
|
||||
target_specs.sort(key=target_prefs)
|
||||
new_target = target_specs[0].architecture.target
|
||||
return new_target
|
||||
|
||||
def concretize_variants(self, spec):
|
||||
"""If the spec already has variants filled in, return. Otherwise, add
|
||||
the user preferences from packages.yaml or the default variants from
|
||||
the package specification.
|
||||
"""
|
||||
changed = False
|
||||
preferred_variants = PackagePrefs.preferred_variants(spec.name)
|
||||
pkg_cls = spec.package_class
|
||||
for name, entry in pkg_cls.variants.items():
|
||||
variant, when = entry
|
||||
var = spec.variants.get(name, None)
|
||||
if var and "*" in var:
|
||||
# remove variant wildcard before concretizing
|
||||
# wildcard cannot be combined with other variables in a
|
||||
# multivalue variant, a concrete variant cannot have the value
|
||||
# wildcard, and a wildcard does not constrain a variant
|
||||
spec.variants.pop(name)
|
||||
if name not in spec.variants and any(spec.satisfies(w) for w in when):
|
||||
changed = True
|
||||
if name in preferred_variants:
|
||||
spec.variants[name] = preferred_variants.get(name)
|
||||
else:
|
||||
spec.variants[name] = variant.make_default()
|
||||
if name in spec.variants and not any(spec.satisfies(w) for w in when):
|
||||
raise vt.InvalidVariantForSpecError(name, when, spec)
|
||||
|
||||
return changed
|
||||
|
||||
def concretize_compiler(self, spec):
|
||||
"""If the spec already has a compiler, we're done. If not, then take
|
||||
the compiler used for the nearest ancestor with a compiler
|
||||
spec and use that. If the ancestor's compiler is not
|
||||
concrete, then used the preferred compiler as specified in
|
||||
spackconfig.
|
||||
|
||||
Intuition: Use the spackconfig default if no package that depends on
|
||||
this one has a strict compiler requirement. Otherwise, try to
|
||||
build with the compiler that will be used by libraries that
|
||||
link to this one, to maximize compatibility.
|
||||
"""
|
||||
# Pass on concretizing the compiler if the target or operating system
|
||||
# is not yet determined
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
# Only use a matching compiler if it is of the proper style
|
||||
# Takes advantage of the proper logic already existing in
|
||||
# compiler_for_spec Should think whether this can be more
|
||||
# efficient
|
||||
def _proper_compiler_style(cspec, aspec):
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
|
||||
# If the spec passed as argument is concrete we want to check
|
||||
# the versions match exactly
|
||||
if (
|
||||
cspec.concrete
|
||||
and compilers
|
||||
and cspec.version not in [c.version for c in compilers]
|
||||
):
|
||||
return []
|
||||
|
||||
return compilers
|
||||
|
||||
if spec.compiler and spec.compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
spec.compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(spec.compiler, spec.architecture)
|
||||
return False
|
||||
|
||||
# Find another spec that has a compiler, or the root if none do
|
||||
other_spec = spec if spec.compiler else find_spec(spec, lambda x: x.compiler, spec.root)
|
||||
other_compiler = other_spec.compiler
|
||||
assert other_spec
|
||||
|
||||
# Check if the compiler is already fully specified
|
||||
if other_compiler and other_compiler.concrete:
|
||||
if self.check_for_compiler_existence and not _proper_compiler_style(
|
||||
other_compiler, spec.architecture
|
||||
):
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
spec.compiler = other_compiler
|
||||
return True
|
||||
|
||||
if other_compiler: # Another node has abstract compiler information
|
||||
compiler_list = spack.compilers.find_specs_by_arch(other_compiler, spec.architecture)
|
||||
if not compiler_list:
|
||||
# We don't have a matching compiler installed
|
||||
if not self.check_for_compiler_existence:
|
||||
# Concretize compiler spec versions as a package to build
|
||||
cpkg_spec = spack.compilers.pkg_spec_for_compiler(other_compiler)
|
||||
self.concretize_version(cpkg_spec)
|
||||
spec.compiler = spack.spec.CompilerSpec(
|
||||
other_compiler.name, cpkg_spec.versions
|
||||
)
|
||||
return True
|
||||
else:
|
||||
# No compiler with a satisfactory spec was found
|
||||
raise UnavailableCompilerVersionError(other_compiler, spec.architecture)
|
||||
else:
|
||||
# We have no hints to go by, grab any compiler
|
||||
compiler_list = spack.compilers.all_compiler_specs()
|
||||
if not compiler_list:
|
||||
# Spack has no compilers.
|
||||
raise spack.compilers.NoCompilersError()
|
||||
|
||||
# By default, prefer later versions of compilers
|
||||
compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
|
||||
ppk = PackagePrefs(other_spec.name, "compiler")
|
||||
matches = sorted(compiler_list, key=ppk)
|
||||
|
||||
# copy concrete version into other_compiler
|
||||
try:
|
||||
spec.compiler = next(
|
||||
c for c in matches if _proper_compiler_style(c, spec.architecture)
|
||||
).copy()
|
||||
except StopIteration:
|
||||
# No compiler with a satisfactory spec has a suitable arch
|
||||
_compiler_concretization_failure(other_compiler, spec.architecture)
|
||||
|
||||
assert spec.compiler.concrete
|
||||
return True # things changed.
|
||||
|
||||
def concretize_compiler_flags(self, spec):
|
||||
"""
|
||||
The compiler flags are updated to match those of the spec whose
|
||||
compiler is used, defaulting to no compiler flags in the spec.
|
||||
Default specs set at the compiler level will still be added later.
|
||||
"""
|
||||
# Pass on concretizing the compiler flags if the target or operating
|
||||
# system is not set.
|
||||
if not spec.architecture.concrete:
|
||||
# We haven't changed, but other changes need to happen before we
|
||||
# continue. `return True` here to force concretization to keep
|
||||
# running.
|
||||
return True
|
||||
|
||||
compiler_match = lambda other: (
|
||||
spec.compiler == other.compiler and spec.architecture == other.architecture
|
||||
)
|
||||
|
||||
ret = False
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
if flag not in spec.compiler_flags:
|
||||
spec.compiler_flags[flag] = list()
|
||||
try:
|
||||
nearest = next(
|
||||
p
|
||||
for p in spec.traverse(direction="parents")
|
||||
if (compiler_match(p) and (p is not spec) and flag in p.compiler_flags)
|
||||
)
|
||||
nearest_flags = nearest.compiler_flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
if set(nearest_flags) - set(flags):
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(nearest_flags + flags))
|
||||
ret = True
|
||||
except StopIteration:
|
||||
pass
|
||||
|
||||
# Include the compiler flag defaults from the config files
|
||||
# This ensures that spack will detect conflicts that stem from a change
|
||||
# in default compiler flags.
|
||||
try:
|
||||
compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
|
||||
except spack.compilers.NoCompilerForSpecError:
|
||||
if self.check_for_compiler_existence:
|
||||
raise
|
||||
return ret
|
||||
for flag in compiler.flags:
|
||||
config_flags = compiler.flags.get(flag, [])
|
||||
flags = spec.compiler_flags.get(flag, [])
|
||||
spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(config_flags + flags))
|
||||
if set(config_flags) - set(flags):
|
||||
ret = True
|
||||
|
||||
return ret
|
||||
|
||||
def adjust_target(self, spec):
|
||||
"""Adjusts the target microarchitecture if the compiler is too old
|
||||
to support the default one.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if spec was modified, False otherwise
|
||||
"""
|
||||
# To minimize the impact on performance this function will attempt
|
||||
# to adjust the target only at the very first call once necessary
|
||||
# information is set. It will just return False on subsequent calls.
|
||||
# The way this is achieved is by initializing a generator and making
|
||||
# this function return the next answer.
|
||||
if not (spec.architecture and spec.architecture.concrete):
|
||||
# Not ready, but keep going because we have work to do later
|
||||
return True
|
||||
|
||||
def _make_only_one_call(spec):
|
||||
yield self._adjust_target(spec)
|
||||
while True:
|
||||
yield False
|
||||
|
||||
if self._adjust_target_answer_generator is None:
|
||||
self._adjust_target_answer_generator = _make_only_one_call(spec)
|
||||
|
||||
return next(self._adjust_target_answer_generator)
|
||||
|
||||
def _adjust_target(self, spec):
|
||||
"""Assumes that the architecture and the compiler have been
|
||||
set already and checks if the current target microarchitecture
|
||||
is the default and can be optimized by the compiler.
|
||||
|
||||
If not, downgrades the microarchitecture until a suitable one
|
||||
is found. If none can be found raise an error.
|
||||
|
||||
Args:
|
||||
spec: spec to be concretized
|
||||
|
||||
Returns:
|
||||
True if any modification happened, False otherwise
|
||||
"""
|
||||
import archspec.cpu
|
||||
|
||||
# Try to adjust the target only if it is the default
|
||||
# target for this platform
|
||||
current_target = spec.architecture.target
|
||||
current_platform = spack.platforms.by_name(spec.architecture.platform)
|
||||
|
||||
default_target = current_platform.target("default_target")
|
||||
if PackagePrefs.has_preferred_targets(spec.name):
|
||||
default_target = self.target_from_package_preferences(spec)
|
||||
|
||||
if current_target != default_target or (
|
||||
self.abstract_spec
|
||||
and self.abstract_spec.architecture
|
||||
and self.abstract_spec.architecture.concrete
|
||||
):
|
||||
return False
|
||||
|
||||
try:
|
||||
current_target.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
microarchitecture = current_target.microarchitecture
|
||||
for ancestor in microarchitecture.ancestors:
|
||||
candidate = None
|
||||
try:
|
||||
candidate = spack.target.Target(ancestor)
|
||||
candidate.optimization_flags(spec.compiler)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
|
||||
if candidate is not None:
|
||||
msg = (
|
||||
"{0.name}@{0.version} cannot build optimized "
|
||||
'binaries for "{1}". Using best target possible: '
|
||||
'"{2}"'
|
||||
)
|
||||
msg = msg.format(spec.compiler, current_target, candidate)
|
||||
tty.warn(msg)
|
||||
spec.architecture.target = candidate
|
||||
return True
|
||||
else:
|
||||
raise
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -82,6 +719,19 @@ def find_spec(spec, condition, default=None):
|
||||
return default # Nothing matched the condition; return default.
|
||||
|
||||
|
||||
def _compiler_concretization_failure(compiler_spec, arch):
|
||||
# Distinguish between the case that there are compilers for
|
||||
# the arch but not with the given compiler spec and the case that
|
||||
# there are no compilers for the arch at all
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
available_os_targets = set(
|
||||
(c.operating_system, c.target) for c in spack.compilers.all_compilers()
|
||||
)
|
||||
raise NoCompilersForArchError(arch, available_os_targets)
|
||||
else:
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
@@ -94,6 +744,12 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
if spack.config.get("config:concretizer", "clingo") == "original":
|
||||
return _concretize_specs_together_original(*abstract_specs, **kwargs)
|
||||
return _concretize_specs_together_new(*abstract_specs, **kwargs)
|
||||
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
@@ -104,6 +760,51 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def _concretize_specs_together_original(*abstract_specs, **kwargs):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
builder = spack.repo.MockRepositoryBuilder(tmpdir)
|
||||
# Split recursive specs, as it seems the concretizer has issue
|
||||
# respecting conditions on dependents expressed like
|
||||
# depends_on('foo ^bar@1.0'), see issue #11160
|
||||
split_specs = [
|
||||
dep.copy(deps=False) for spec1 in abstract_specs for dep in spec1.traverse(root=True)
|
||||
]
|
||||
builder.add_package(
|
||||
"concretizationroot", dependencies=[(str(x), None, None) for x in split_specs]
|
||||
)
|
||||
|
||||
with spack.repo.use_repositories(builder.root, override=False):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec("concretizationroot")
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [concretization_root[spec.name].copy() for spec in abstract_specs]
|
||||
|
||||
return concrete_specs
|
||||
|
||||
|
||||
class NoCompilersForArchError(spack.error.SpackError):
|
||||
def __init__(self, arch, available_os_targets):
|
||||
err_msg = (
|
||||
"No compilers found"
|
||||
" for operating system %s and target %s."
|
||||
"\nIf previous installations have succeeded, the"
|
||||
" operating system may have been updated." % (arch.os, arch.target)
|
||||
)
|
||||
|
||||
available_os_target_strs = list()
|
||||
for operating_system, t in available_os_targets:
|
||||
os_target_str = "%s-%s" % (operating_system, t) if t else operating_system
|
||||
available_os_target_strs.append(os_target_str)
|
||||
err_msg += (
|
||||
"\nCompilers are defined for the following"
|
||||
" operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
|
||||
)
|
||||
|
||||
super().__init__(err_msg, "Run 'spack compiler find' to add compilers.")
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
@@ -119,3 +820,37 @@ def __init__(self, compiler_spec, arch=None):
|
||||
"'spack compilers' to see which compilers are already recognized"
|
||||
" by spack.",
|
||||
)
|
||||
|
||||
|
||||
class NoValidVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no way to have a concrete version for a
|
||||
particular spec."""
|
||||
|
||||
def __init__(self, spec):
|
||||
super().__init__(
|
||||
"There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
|
||||
)
|
||||
|
||||
|
||||
class InsufficientArchitectureInfoError(spack.error.SpackError):
|
||||
"""Raised when details on architecture cannot be collected from the
|
||||
system"""
|
||||
|
||||
def __init__(self, spec, archs):
|
||||
super().__init__(
|
||||
"Cannot determine necessary architecture information for '%s': %s"
|
||||
% (spec.name, str(archs))
|
||||
)
|
||||
|
||||
|
||||
class NoBuildError(spack.error.SpecError):
|
||||
"""Raised when a package is configured with the buildable option False, but
|
||||
no satisfactory external versions can be found
|
||||
"""
|
||||
|
||||
def __init__(self, spec):
|
||||
msg = (
|
||||
"The spec\n '%s'\n is configured as not buildable, "
|
||||
"and no matching external installs were found"
|
||||
)
|
||||
super().__init__(msg % spec)
|
||||
|
||||
@@ -35,10 +35,11 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||
|
||||
from llnl.util import filesystem, lang, tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.schema
|
||||
@@ -99,6 +100,7 @@
|
||||
"dirty": False,
|
||||
"build_jobs": min(16, cpus_available()),
|
||||
"build_stage": "$tempdir/spack-stage",
|
||||
"concretizer": "clingo",
|
||||
"license_dir": spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
@@ -115,39 +117,21 @@
|
||||
|
||||
|
||||
class ConfigScope:
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.writable = False
|
||||
self.sections = syaml.syaml_dict()
|
||||
"""This class represents a configuration scope.
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError
|
||||
A scope is one directory containing named configuration files.
|
||||
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||
"""
|
||||
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
raise NotImplementedError
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
raise NotImplementedError
|
||||
def __init__(self, name, path) -> None:
|
||||
self.name = name # scope name.
|
||||
self.path = path # path to directory containing configs.
|
||||
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}>"
|
||||
|
||||
|
||||
class DirectoryConfigScope(ConfigScope):
|
||||
"""Config scope backed by a directory containing one file per section."""
|
||||
|
||||
def __init__(self, name: str, path: str, *, writable: bool = True) -> None:
|
||||
super().__init__(name)
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return os.sep in self.name
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
"""Returns the filename associated with a given section"""
|
||||
@@ -164,15 +148,14 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections[section]
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
filename = self.get_section_filename(section)
|
||||
data = self.get_section(section)
|
||||
if data is None:
|
||||
return
|
||||
|
||||
validate(data, SECTION_SCHEMAS[section])
|
||||
# We copy data here to avoid adding defaults at write time
|
||||
validate_data = copy.deepcopy(data)
|
||||
validate(validate_data, SECTION_SCHEMAS[section])
|
||||
|
||||
try:
|
||||
filesystem.mkdirp(self.path)
|
||||
@@ -181,23 +164,19 @@ def _write_section(self, section: str) -> None:
|
||||
except (syaml.SpackYAMLError, OSError) as e:
|
||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
"""Returns true if the scope name is platform specific"""
|
||||
return "/" in self.name
|
||||
def clear(self) -> None:
|
||||
"""Empty cached config information."""
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class SingleFileScope(ConfigScope):
|
||||
"""This class represents a configuration scope in a single YAML file."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
path: str,
|
||||
schema: YamlConfigDict,
|
||||
*,
|
||||
yaml_path: Optional[List[str]] = None,
|
||||
writable: bool = True,
|
||||
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||
) -> None:
|
||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||
|
||||
@@ -216,13 +195,15 @@ def __init__(
|
||||
config:
|
||||
install_tree: $spack/opt/spack
|
||||
"""
|
||||
super().__init__(name)
|
||||
super().__init__(name, path)
|
||||
self._raw_data: Optional[YamlConfigDict] = None
|
||||
self.schema = schema
|
||||
self.path = path
|
||||
self.writable = writable
|
||||
self.yaml_path = yaml_path or []
|
||||
|
||||
@property
|
||||
def is_platform_dependent(self) -> bool:
|
||||
return False
|
||||
|
||||
def get_section_filename(self, section) -> str:
|
||||
return self.path
|
||||
|
||||
@@ -276,8 +257,6 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
return self.sections.get(section, None)
|
||||
|
||||
def _write_section(self, section: str) -> None:
|
||||
if not self.writable:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||
|
||||
# If there is no existing data, this section SingleFileScope has never
|
||||
@@ -322,6 +301,19 @@ def __repr__(self) -> str:
|
||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class ImmutableConfigScope(ConfigScope):
|
||||
"""A configuration scope that cannot be written to.
|
||||
|
||||
This is used for ConfigScopes passed on the command line.
|
||||
"""
|
||||
|
||||
def _write_section(self, section) -> None:
|
||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||
|
||||
|
||||
class InternalConfigScope(ConfigScope):
|
||||
"""An internal configuration scope that is not persisted to a file.
|
||||
|
||||
@@ -331,7 +323,7 @@ class InternalConfigScope(ConfigScope):
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
super().__init__(name)
|
||||
super().__init__(name, None)
|
||||
self.sections = syaml.syaml_dict()
|
||||
|
||||
if data is not None:
|
||||
@@ -341,6 +333,9 @@ def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||
|
||||
def get_section_filename(self, section: str) -> str:
|
||||
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||
|
||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||
"""Just reads from an internal dictionary."""
|
||||
if section not in self.sections:
|
||||
@@ -445,21 +440,27 @@ def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
||||
return scope
|
||||
|
||||
@property
|
||||
def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
||||
"""Generator of writable scopes with an associated file."""
|
||||
return (s for s in self.scopes.values() if s.writable)
|
||||
def file_scopes(self) -> List[ConfigScope]:
|
||||
"""List of writable scopes with an associated file."""
|
||||
return [
|
||||
s
|
||||
for s in self.scopes.values()
|
||||
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||
]
|
||||
|
||||
def highest_precedence_scope(self) -> ConfigScope:
|
||||
"""Writable scope with highest precedence."""
|
||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
||||
"""Non-internal scope with highest precedence."""
|
||||
return next(reversed(self.file_scopes))
|
||||
|
||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||
"""Writable non-platform scope with highest precedence"""
|
||||
return next(
|
||||
s
|
||||
for s in reversed(self.scopes.values()) # type: ignore
|
||||
if s.writable and not s.is_platform_dependent
|
||||
)
|
||||
"""Non-internal non-platform scope with highest precedence
|
||||
|
||||
Platform-specific scopes are of the form scope/platform"""
|
||||
generator = reversed(self.file_scopes)
|
||||
highest = next(generator)
|
||||
while highest and highest.is_platform_dependent:
|
||||
highest = next(generator)
|
||||
return highest
|
||||
|
||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||
"""
|
||||
@@ -754,14 +755,13 @@ def override(
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
scope = DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
||||
)
|
||||
cfg.push_scope(scope)
|
||||
plat_name = os.path.join(name, platform)
|
||||
plat_path = os.path.join(path, platform)
|
||||
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||
|
||||
|
||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
@@ -792,27 +792,22 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||
import spack.environment.environment as env # circular import
|
||||
"""Add additional scopes from the --config-scope argument.
|
||||
|
||||
Command line scopes are named after their position in the arg list.
|
||||
"""
|
||||
for i, path in enumerate(command_line_scopes):
|
||||
name = f"cmd_scope_{i}"
|
||||
# We ensure that these scopes exist and are readable, as they are
|
||||
# provided on the command line by the user.
|
||||
if not os.path.isdir(path):
|
||||
raise ConfigError(f"config scope is not a directory: '{path}'")
|
||||
elif not os.access(path, os.R_OK):
|
||||
raise ConfigError(f"config scope is not readable: '{path}'")
|
||||
|
||||
if env.exists(path): # managed environment
|
||||
manifest = env.EnvironmentManifestFile(env.root(path))
|
||||
elif env.is_env_dir(path): # anonymous environment
|
||||
manifest = env.EnvironmentManifestFile(path)
|
||||
elif os.path.isdir(path): # directory with config files
|
||||
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
||||
_add_platform_scope(cfg, name, path, writable=False)
|
||||
continue
|
||||
else:
|
||||
raise ConfigError(f"Invalid configuration scope: {path}")
|
||||
|
||||
for scope in manifest.env_config_scopes:
|
||||
scope.name = f"{name}:{scope.name}"
|
||||
scope.writable = False
|
||||
cfg.push_scope(scope)
|
||||
# name based on order on the command line
|
||||
name = f"cmd_scope_{i:d}"
|
||||
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||
|
||||
|
||||
def create() -> Configuration:
|
||||
@@ -856,10 +851,10 @@ def create() -> Configuration:
|
||||
|
||||
# add each scope and its platform-specific directory
|
||||
for name, path in configuration_paths:
|
||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
||||
cfg.push_scope(ConfigScope(name, path))
|
||||
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, name, path)
|
||||
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
@@ -974,7 +969,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
||||
def add_default_platform_scope(platform: str) -> None:
|
||||
plat_name = os.path.join("defaults", platform)
|
||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
||||
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||
|
||||
|
||||
def scopes() -> Dict[str, ConfigScope]:
|
||||
@@ -983,10 +978,19 @@ def scopes() -> Dict[str, ConfigScope]:
|
||||
|
||||
|
||||
def writable_scopes() -> List[ConfigScope]:
|
||||
"""Return list of writable scopes. Higher-priority scopes come first in the list."""
|
||||
scopes = [x for x in CONFIG.scopes.values() if x.writable]
|
||||
scopes.reverse()
|
||||
return scopes
|
||||
"""
|
||||
Return list of writable scopes. Higher-priority scopes come first in the
|
||||
list.
|
||||
"""
|
||||
return list(
|
||||
reversed(
|
||||
list(
|
||||
x
|
||||
for x in CONFIG.scopes.values()
|
||||
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def writable_scope_names() -> List[str]:
|
||||
@@ -1076,8 +1080,11 @@ def validate(
|
||||
"""
|
||||
import jsonschema
|
||||
|
||||
# Validate a copy to avoid adding defaults
|
||||
# This allows us to round-trip data without adding to it.
|
||||
test_data = syaml.deepcopy(data)
|
||||
try:
|
||||
spack.schema.Validator(schema).validate(data)
|
||||
spack.schema.Validator(schema).validate(test_data)
|
||||
except jsonschema.ValidationError as e:
|
||||
if hasattr(e.instance, "lc"):
|
||||
line_number = e.instance.lc.line + 1
|
||||
@@ -1086,7 +1093,7 @@ def validate(
|
||||
raise ConfigFormatError(e, data, filename, line_number) from e
|
||||
# return the validated data so that we can access the raw data
|
||||
# mostly relevant for environments
|
||||
return data
|
||||
return test_data
|
||||
|
||||
|
||||
def read_config_file(
|
||||
@@ -1592,7 +1599,7 @@ def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuratio
|
||||
path = os.path.normpath(scope_or_path)
|
||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||
name = os.path.basename(path)
|
||||
scopes.append(DirectoryConfigScope(name, path))
|
||||
scopes.append(ConfigScope(name, path))
|
||||
|
||||
configuration = Configuration(*scopes)
|
||||
return configuration
|
||||
|
||||
@@ -78,17 +78,24 @@
|
||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||
}
|
||||
},
|
||||
"centos:stream9": {
|
||||
"centos:stream": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_stream9.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
"template": "container/centos_stream.dockerfile",
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
},
|
||||
"os_package_manager": "dnf_epel",
|
||||
"build": "spack/centos-stream9",
|
||||
"build": "spack/centos-stream",
|
||||
"final": {
|
||||
"image": "quay.io/centos/centos:stream9"
|
||||
"image": "quay.io/centos/centos:stream"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
"bootstrap": {
|
||||
"template": "container/centos_7.dockerfile"
|
||||
},
|
||||
"os_package_manager": "yum",
|
||||
"build": "spack/centos7"
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
"template": "container/leap-15.dockerfile"
|
||||
|
||||
@@ -2,12 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
executable_prefix,
|
||||
set_virtuals_nonbuildable,
|
||||
update_configuration,
|
||||
)
|
||||
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
@@ -17,6 +12,5 @@
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
"update_configuration",
|
||||
"set_virtuals_nonbuildable",
|
||||
"detection_tests",
|
||||
]
|
||||
|
||||
@@ -136,10 +136,10 @@ def path_to_dict(search_paths: List[str]):
|
||||
# entry overrides later entries
|
||||
for search_path in reversed(search_paths):
|
||||
try:
|
||||
with os.scandir(search_path) as entries:
|
||||
path_to_lib.update(
|
||||
{entry.path: entry.name for entry in entries if entry.is_file()}
|
||||
)
|
||||
for lib in os.listdir(search_path):
|
||||
lib_path = os.path.join(search_path, lib)
|
||||
if llnl.util.filesystem.is_readable_file(lib_path):
|
||||
path_to_lib[lib_path] = lib
|
||||
except OSError as e:
|
||||
msg = f"cannot scan '{search_path}' for external software: {str(e)}"
|
||||
llnl.util.tty.debug(msg)
|
||||
@@ -252,27 +252,6 @@ def update_configuration(
|
||||
return all_new_specs
|
||||
|
||||
|
||||
def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -> List[str]:
|
||||
"""Update packages:virtual:buildable:False for the provided virtual packages, if the property
|
||||
is not set by the user. Returns the list of virtual packages that have been updated."""
|
||||
packages = spack.config.get("packages")
|
||||
new_config = {}
|
||||
for virtual in virtuals:
|
||||
# If the user has set the buildable prop do not override it
|
||||
if virtual in packages and "buildable" in packages[virtual]:
|
||||
continue
|
||||
new_config[virtual] = {"buildable": False}
|
||||
|
||||
# Update the provided scope
|
||||
spack.config.set(
|
||||
"packages",
|
||||
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
||||
scope=scope,
|
||||
)
|
||||
|
||||
return list(new_config.keys())
|
||||
|
||||
|
||||
def _windows_drive() -> str:
|
||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||
which is guaranteed to be defined for all logins.
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Type
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
@@ -187,7 +187,7 @@ def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[s
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def _group_by_prefix(paths: List[str]) -> Dict[str, Set[str]]:
|
||||
def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
|
||||
groups = collections.defaultdict(set)
|
||||
for p in paths:
|
||||
groups[os.path.dirname(p)].add(p)
|
||||
@@ -200,7 +200,7 @@ class Finder:
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return []
|
||||
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
"""Returns the list of patterns used to match candidate files.
|
||||
|
||||
Args:
|
||||
@@ -226,7 +226,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
@@ -243,9 +243,7 @@ def detect_specs(
|
||||
return []
|
||||
|
||||
result = []
|
||||
for candidate_path, items_in_prefix in _group_by_prefix(
|
||||
llnl.util.lang.dedupe(paths)
|
||||
).items():
|
||||
for candidate_path, items_in_prefix in sorted(_group_by_prefix(set(paths)).items()):
|
||||
# TODO: multiple instances of a package can live in the same
|
||||
# prefix, and a package implementation can return multiple specs
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
@@ -301,17 +299,19 @@ def detect_specs(
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
|
||||
self, *, pkg_name: str, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
pkg_name: package being detected
|
||||
repository: repository to retrieve the package
|
||||
initial_guess: initial list of paths to search from the caller if None, default paths
|
||||
are searched. If this is an empty list, nothing will be searched.
|
||||
initial_guess: initial list of paths to search from the caller
|
||||
if None, default paths are searched. If this
|
||||
is an empty list, nothing will be searched.
|
||||
"""
|
||||
pkg_cls = repository.get_pkg_class(pkg_name)
|
||||
import spack.repo
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
patterns = self.search_patterns(pkg=pkg_cls)
|
||||
if not patterns:
|
||||
return []
|
||||
@@ -327,7 +327,7 @@ class ExecutablesFinder(Finder):
|
||||
def default_path_hints(self) -> List[str]:
|
||||
return spack.util.environment.get_path("PATH")
|
||||
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||
result = pkg.platform_executables()
|
||||
@@ -335,10 +335,13 @@ def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> Lis
|
||||
|
||||
def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]:
|
||||
executables_by_path = executables_in_path(path_hints=paths)
|
||||
joined_pattern = re.compile(r"|".join(patterns))
|
||||
result = [path for path, exe in executables_by_path.items() if joined_pattern.search(exe)]
|
||||
result.sort()
|
||||
return result
|
||||
patterns = [re.compile(x) for x in patterns]
|
||||
result = []
|
||||
for compiled_re in patterns:
|
||||
for path, exe in executables_by_path.items():
|
||||
if compiled_re.search(exe):
|
||||
result.append(path)
|
||||
return list(sorted(set(result)))
|
||||
|
||||
def prefix_from_path(self, *, path: str) -> str:
|
||||
result = executable_prefix(path)
|
||||
@@ -353,7 +356,7 @@ class LibrariesFinder(Finder):
|
||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||
"""
|
||||
|
||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
||||
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||
result = []
|
||||
if hasattr(pkg, "libraries"):
|
||||
result = pkg.libraries
|
||||
@@ -382,7 +385,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
|
||||
def by_path(
|
||||
packages_to_search: Iterable[str],
|
||||
packages_to_search: List[str],
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
@@ -396,28 +399,19 @@ def by_path(
|
||||
path_hints: initial list of paths to be searched
|
||||
max_workers: maximum number of workers to search for packages in parallel
|
||||
"""
|
||||
import spack.repo
|
||||
|
||||
# TODO: Packages should be able to define both .libraries and .executables in the future
|
||||
# TODO: determine_spec_details should get all relevant libraries and executables in one call
|
||||
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
|
||||
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
|
||||
|
||||
result = collections.defaultdict(list)
|
||||
repository = spack.repo.PATH.ensure_unwrapped()
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
|
||||
for pkg in packages_to_search:
|
||||
executable_future = executor.submit(
|
||||
executables_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
library_future = executor.submit(
|
||||
libraries_finder.find,
|
||||
pkg_name=pkg,
|
||||
initial_guess=path_hints,
|
||||
repository=repository,
|
||||
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
|
||||
)
|
||||
detected_specs_by_package[pkg] = executable_future, library_future
|
||||
|
||||
|
||||
@@ -81,17 +81,7 @@ class OpenMpi(Package):
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = [
|
||||
"arch",
|
||||
"architecture",
|
||||
"dev_path",
|
||||
"namespace",
|
||||
"operating_system",
|
||||
"os",
|
||||
"patches",
|
||||
"platform",
|
||||
"target",
|
||||
]
|
||||
reserved_names = ["patches", "dev_path"]
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
@@ -100,14 +90,14 @@ class OpenMpi(Package):
|
||||
_patch_order_index = 0
|
||||
|
||||
|
||||
SpecType = str
|
||||
SpecType = Union["spack.spec.Spec", str]
|
||||
DepType = Union[Tuple[str, ...], str]
|
||||
WhenType = Optional[Union["spack.spec.Spec", str, bool]]
|
||||
Patcher = Callable[[Union["spack.package_base.PackageBase", Dependency]], None]
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
@@ -485,7 +475,7 @@ def _execute_version(pkg, ver, **kwargs):
|
||||
|
||||
def _depends_on(
|
||||
pkg: "spack.package_base.PackageBase",
|
||||
spec: "spack.spec.Spec",
|
||||
spec: SpecType,
|
||||
*,
|
||||
when: WhenType = None,
|
||||
type: DepType = dt.DEFAULT_TYPES,
|
||||
@@ -495,10 +485,11 @@ def _depends_on(
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
if not spec.name:
|
||||
raise DependencyError(f"Invalid dependency specification in package '{pkg.name}':", spec)
|
||||
if pkg.name == spec.name:
|
||||
raise CircularReferenceError(f"Package '{pkg.name}' cannot depend on itself.")
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
if not dep_spec.name:
|
||||
raise DependencyError("Invalid dependency specification in package '%s':" % pkg.name, spec)
|
||||
if pkg.name == dep_spec.name:
|
||||
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||
|
||||
depflag = dt.canonicalize(type)
|
||||
|
||||
@@ -514,7 +505,7 @@ def _depends_on(
|
||||
# ensure `Spec.virtual` is a valid thing to call in a directive.
|
||||
# For now, we comment out the following check to allow for virtual packages
|
||||
# with package files.
|
||||
# if patches and spec.virtual:
|
||||
# if patches and dep_spec.virtual:
|
||||
# raise DependencyPatchError("Cannot patch a virtual dependency.")
|
||||
|
||||
# ensure patches is a list
|
||||
@@ -529,13 +520,13 @@ def _depends_on(
|
||||
|
||||
# this is where we actually add the dependency to this package
|
||||
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
||||
dependency = deps_by_name.get(spec.name)
|
||||
dependency = deps_by_name.get(dep_spec.name)
|
||||
|
||||
if not dependency:
|
||||
dependency = Dependency(pkg, spec, depflag=depflag)
|
||||
deps_by_name[spec.name] = dependency
|
||||
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||
deps_by_name[dep_spec.name] = dependency
|
||||
else:
|
||||
dependency.spec.constrain(spec, deps=False)
|
||||
dependency.spec.constrain(dep_spec, deps=False)
|
||||
dependency.depflag |= depflag
|
||||
|
||||
# apply patches to the dependency
|
||||
@@ -600,13 +591,12 @@ def depends_on(
|
||||
@see The section "Dependency specs" in the Spack Packaging Guide.
|
||||
|
||||
"""
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
if dep_spec.name in SUPPORTED_LANGUAGES:
|
||||
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
|
||||
assert type == "build", "languages must be of 'build' type"
|
||||
return _language(lang_spec_str=spec, when=when)
|
||||
|
||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
|
||||
return _execute_depends_on
|
||||
|
||||
@@ -676,24 +666,25 @@ def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||
|
||||
keyword arguments can be passed to extends() so that extension
|
||||
packages can pass parameters to the extendee's extension
|
||||
mechanism."""
|
||||
mechanism.
|
||||
|
||||
"""
|
||||
|
||||
def _execute_extends(pkg):
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
dep_spec = spack.spec.Spec(spec)
|
||||
|
||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
spec_obj = spack.spec.Spec(spec)
|
||||
|
||||
# When extending python, also add a dependency on python-venv. This is done so that
|
||||
# Spack environment views are Python virtual environments.
|
||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
||||
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||
|
||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||
|
||||
return _execute_extends
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ def __init__(
|
||||
self.buildcache_flag = ""
|
||||
|
||||
|
||||
class DepfileSpecVisitor:
|
||||
class DepfileSpecVisitor(traverse.AbstractVisitor):
|
||||
"""This visitor produces an adjacency list of a (reduced) DAG, which
|
||||
is used to generate depfile targets with their prerequisites. Currently
|
||||
it only drops build deps when using buildcache only mode.
|
||||
@@ -75,17 +75,17 @@ def __init__(self, pkg_buildcache: UseBuildCache, deps_buildcache: UseBuildCache
|
||||
self.depflag_root = _deptypes(pkg_buildcache)
|
||||
self.depflag_deps = _deptypes(deps_buildcache)
|
||||
|
||||
def neighbors(self, node):
|
||||
def neighbors(self, node: traverse.EdgeAndDepth) -> List[spack.spec.DependencySpec]:
|
||||
"""Produce a list of spec to follow from node"""
|
||||
depflag = self.depflag_root if node.depth == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(depflag=depflag))
|
||||
depflag = self.depflag_root if node[1] == 0 else self.depflag_deps
|
||||
return traverse.sort_edges(node[0].spec.edges_to_dependencies(depflag=depflag))
|
||||
|
||||
def accept(self, node):
|
||||
def accept(self, node: traverse.EdgeAndDepth) -> bool:
|
||||
self.adjacency_list.append(
|
||||
DepfileNode(
|
||||
target=node.edge.spec,
|
||||
target=node[0].spec,
|
||||
prereqs=[edge.spec for edge in self.neighbors(node)],
|
||||
buildcache=self.pkg_buildcache if node.depth == 0 else self.deps_buildcache,
|
||||
buildcache=self.pkg_buildcache if node[1] == 0 else self.deps_buildcache,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import collections
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import errno
|
||||
import copy
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
@@ -24,15 +24,12 @@
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.filesystem_view as fsv
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.main
|
||||
@@ -55,6 +52,7 @@
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
from spack import traverse
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
@@ -269,7 +267,9 @@ def root(name):
|
||||
|
||||
def exists(name):
|
||||
"""Whether an environment with this name exists or not."""
|
||||
return valid_env_name(name) and os.path.isdir(_root(name))
|
||||
if not valid_env_name(name):
|
||||
return False
|
||||
return os.path.isdir(root(name))
|
||||
|
||||
|
||||
def active(name):
|
||||
@@ -528,8 +528,8 @@ def _read_yaml(str_or_file):
|
||||
)
|
||||
|
||||
filename = getattr(str_or_file, "name", None)
|
||||
spack.config.validate(data, spack.schema.env.schema, filename)
|
||||
return data
|
||||
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
|
||||
return data, default_data
|
||||
|
||||
|
||||
def _write_yaml(data, str_or_file):
|
||||
@@ -606,7 +606,7 @@ def __init__(
|
||||
self.projections = projections
|
||||
self.select = select
|
||||
self.exclude = exclude
|
||||
self.link_type = fsv.canonicalize_link_type(link_type)
|
||||
self.link_type = view_func_parser(link_type)
|
||||
self.link = link
|
||||
|
||||
def select_fn(self, spec):
|
||||
@@ -640,7 +640,7 @@ def to_dict(self):
|
||||
if self.exclude:
|
||||
ret["exclude"] = self.exclude
|
||||
if self.link_type:
|
||||
ret["link_type"] = self.link_type
|
||||
ret["link_type"] = inverse_view_func_parser(self.link_type)
|
||||
if self.link != default_view_link:
|
||||
ret["link"] = self.link
|
||||
return ret
|
||||
@@ -690,7 +690,7 @@ def get_projection_for_spec(self, spec):
|
||||
to exist on the filesystem."""
|
||||
return self._view(self.root).get_projection_for_spec(spec)
|
||||
|
||||
def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
||||
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
||||
"""
|
||||
Returns a view object for the *underlying* view directory. This means that the
|
||||
self.root symlink is followed, and that the view has to exist on the filesystem
|
||||
@@ -710,14 +710,14 @@ def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
||||
)
|
||||
return self._view(path)
|
||||
|
||||
def _view(self, root: str) -> fsv.SimpleFilesystemView:
|
||||
def _view(self, root: str) -> SimpleFilesystemView:
|
||||
"""Returns a view object for a given root dir."""
|
||||
return fsv.SimpleFilesystemView(
|
||||
return SimpleFilesystemView(
|
||||
root,
|
||||
spack.store.STORE.layout,
|
||||
ignore_conflicts=True,
|
||||
projections=self.projections,
|
||||
link_type=self.link_type,
|
||||
link=self.link_type,
|
||||
)
|
||||
|
||||
def __contains__(self, spec):
|
||||
@@ -789,23 +789,6 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
root_dirname = os.path.dirname(self.root)
|
||||
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
|
||||
|
||||
# Remove self.root if is it an empty dir, since we need a symlink there. Note that rmdir
|
||||
# fails if self.root is a symlink.
|
||||
try:
|
||||
os.rmdir(self.root)
|
||||
except (FileNotFoundError, NotADirectoryError):
|
||||
pass
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOTEMPTY:
|
||||
msg = "it is a non-empty directory"
|
||||
elif e.errno == errno.EACCES:
|
||||
msg = "of insufficient permissions"
|
||||
else:
|
||||
raise
|
||||
raise SpackEnvironmentViewError(
|
||||
f"The environment view in {self.root} cannot not be created because {msg}."
|
||||
) from e
|
||||
|
||||
# Create a new view
|
||||
try:
|
||||
fs.mkdirp(new_root)
|
||||
@@ -937,7 +920,7 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
def _load_manifest_file(self):
|
||||
"""Instantiate and load the manifest file contents into memory."""
|
||||
with lk.ReadTransaction(self.txlock):
|
||||
self.manifest = EnvironmentManifestFile(self.path, self.name)
|
||||
self.manifest = EnvironmentManifestFile(self.path)
|
||||
with self.manifest.use_config():
|
||||
self._read()
|
||||
|
||||
@@ -974,25 +957,18 @@ def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||
|
||||
def _process_definition(self, entry):
|
||||
def _process_definition(self, item):
|
||||
"""Process a single spec definition item."""
|
||||
when_string = entry.get("when")
|
||||
if when_string is not None:
|
||||
when = _eval_conditional(when_string)
|
||||
assert len([x for x in entry if x != "when"]) == 1
|
||||
else:
|
||||
when = True
|
||||
assert len(entry) == 1
|
||||
|
||||
entry = copy.deepcopy(item)
|
||||
when = _eval_conditional(entry.pop("when", "True"))
|
||||
assert len(entry) == 1
|
||||
if when:
|
||||
for name, spec_list in entry.items():
|
||||
if name == "when":
|
||||
continue
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
name, spec_list = next(iter(entry.items()))
|
||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||
if name in self.spec_lists:
|
||||
self.spec_lists[name].extend(user_specs)
|
||||
else:
|
||||
self.spec_lists[name] = user_specs
|
||||
|
||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||
"""Process view option(s), which can be boolean, string, or None.
|
||||
@@ -1644,8 +1620,9 @@ def _concretize_separately(self, tests=False):
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -2496,21 +2473,27 @@ def _equiv_dict(first, second):
|
||||
return same_values and same_keys_with_same_overrides
|
||||
|
||||
|
||||
def display_specs(specs):
|
||||
"""Displays a list of specs traversed breadth-first, covering nodes, with install status.
|
||||
def display_specs(concretized_specs):
|
||||
"""Displays the list of specs returned by `Environment.concretize()`.
|
||||
|
||||
Args:
|
||||
specs (list): list of specs
|
||||
concretized_specs (list): list of specs returned by
|
||||
`Environment.concretize()`
|
||||
"""
|
||||
tree_string = spack.spec.tree(
|
||||
specs,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
hashes=True,
|
||||
hashlen=7,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
print(tree_string)
|
||||
|
||||
def _tree_to_display(spec):
|
||||
return spec.tree(
|
||||
recurse_dependencies=True,
|
||||
format=spack.spec.DISPLAY_FORMAT,
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
)
|
||||
|
||||
for user_spec, concrete_spec in concretized_specs:
|
||||
tty.msg("Concretized {0}".format(user_spec))
|
||||
sys.stdout.write(_tree_to_display(concrete_spec))
|
||||
print("")
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
@@ -2564,7 +2547,7 @@ def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
path = spack.repo.RepoPath()
|
||||
|
||||
if os.path.isdir(root):
|
||||
for repo_root in os.listdir(root):
|
||||
@@ -2573,7 +2556,7 @@ def make_repo_path(root):
|
||||
if not os.path.isdir(repo_root):
|
||||
continue
|
||||
|
||||
repo = spack.repo.from_path(repo_root)
|
||||
repo = spack.repo.Repo(repo_root)
|
||||
path.put_last(repo)
|
||||
|
||||
return path
|
||||
@@ -2774,11 +2757,10 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
||||
manifest.flush()
|
||||
return manifest
|
||||
|
||||
def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] = None) -> None:
|
||||
def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.manifest_dir = pathlib.Path(manifest_dir)
|
||||
self.name = name or str(manifest_dir)
|
||||
self.manifest_file = self.manifest_dir / manifest_name
|
||||
self.scope_name = f"env:{self.name}"
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
@@ -2790,8 +2772,12 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
with self.manifest_file.open() as f:
|
||||
self.yaml_content = _read_yaml(f)
|
||||
raw, with_defaults_added = _read_yaml(f)
|
||||
|
||||
#: Pristine YAML content, without defaults being added
|
||||
self.pristine_yaml_content = raw
|
||||
#: YAML content with defaults added by Spack, if they're missing
|
||||
self.yaml_content = with_defaults_added
|
||||
self.changed = False
|
||||
|
||||
def _all_matches(self, user_spec: str) -> List[str]:
|
||||
@@ -2805,7 +2791,7 @@ def _all_matches(self, user_spec: str) -> List[str]:
|
||||
ValueError: if no equivalent match is found
|
||||
"""
|
||||
result = []
|
||||
for yaml_spec_str in self.configuration["specs"]:
|
||||
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||
result.append(yaml_spec_str)
|
||||
|
||||
@@ -2820,6 +2806,7 @@ def add_user_spec(self, user_spec: str) -> None:
|
||||
Args:
|
||||
user_spec: user spec to be appended
|
||||
"""
|
||||
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||
self.configuration.setdefault("specs", []).append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
@@ -2834,6 +2821,7 @@ def remove_user_spec(self, user_spec: str) -> None:
|
||||
"""
|
||||
try:
|
||||
for key in self._all_matches(user_spec):
|
||||
self.pristine_configuration["specs"].remove(key)
|
||||
self.configuration["specs"].remove(key)
|
||||
except ValueError as e:
|
||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||
@@ -2851,6 +2839,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
SpackEnvironmentError: when the user spec cannot be overridden
|
||||
"""
|
||||
try:
|
||||
self.pristine_configuration["specs"][idx] = user_spec
|
||||
self.configuration["specs"][idx] = user_spec
|
||||
except ValueError as e:
|
||||
msg = f"cannot override {user_spec} from {self}"
|
||||
@@ -2863,10 +2852,10 @@ def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
"""
|
||||
self.configuration[included_concrete_name] = []
|
||||
self.pristine_configuration[included_concrete_name] = []
|
||||
|
||||
for env_path in include_concrete:
|
||||
self.configuration[included_concrete_name].append(env_path)
|
||||
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||
|
||||
self.changed = True
|
||||
|
||||
@@ -2880,13 +2869,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
Raises:
|
||||
SpackEnvironmentError: is no valid definition exists already
|
||||
"""
|
||||
defs = self.configuration.get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
item[list_name].append(user_spec)
|
||||
break
|
||||
|
||||
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
@@ -2900,7 +2890,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||
or the list does not exist
|
||||
"""
|
||||
defs = self.configuration.get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = (
|
||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||
f"no valid list exists"
|
||||
@@ -2913,6 +2903,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||
self.changed = True
|
||||
|
||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||
@@ -2927,7 +2918,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
Raises:
|
||||
SpackEnvironmentError: if the user spec cannot be overridden
|
||||
"""
|
||||
defs = self.configuration.get("definitions", [])
|
||||
defs = self.pristine_configuration.get("definitions", [])
|
||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||
|
||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||
@@ -2938,6 +2929,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||
self.changed = True
|
||||
|
||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||
@@ -2969,6 +2961,7 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
True the default view is used for the environment, if False there's no view.
|
||||
"""
|
||||
if isinstance(view, dict):
|
||||
self.pristine_configuration["view"][default_view_name].update(view)
|
||||
self.configuration["view"][default_view_name].update(view)
|
||||
self.changed = True
|
||||
return
|
||||
@@ -2976,13 +2969,15 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
||||
if not isinstance(view, bool):
|
||||
view = str(view)
|
||||
|
||||
self.pristine_configuration["view"] = view
|
||||
self.configuration["view"] = view
|
||||
self.changed = True
|
||||
|
||||
def remove_default_view(self) -> None:
|
||||
"""Removes the default view from the manifest file"""
|
||||
view_data = self.configuration.get("view")
|
||||
view_data = self.pristine_configuration.get("view")
|
||||
if isinstance(view_data, collections.abc.Mapping):
|
||||
self.pristine_configuration["view"].pop(default_view_name)
|
||||
self.configuration["view"].pop(default_view_name)
|
||||
self.changed = True
|
||||
return
|
||||
@@ -2995,12 +2990,17 @@ def flush(self) -> None:
|
||||
return
|
||||
|
||||
with fs.write_tmp_and_move(os.path.realpath(self.manifest_file)) as f:
|
||||
_write_yaml(self.yaml_content, f)
|
||||
_write_yaml(self.pristine_yaml_content, f)
|
||||
self.changed = False
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
def pristine_configuration(self):
|
||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
@property
|
||||
def configuration(self):
|
||||
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||
return self.yaml_content[TOP_LEVEL_KEY]
|
||||
|
||||
def __len__(self):
|
||||
@@ -3032,11 +3032,12 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
SpackEnvironmentError: if the manifest includes a remote file but
|
||||
no configuration stage directory has been identified
|
||||
"""
|
||||
scopes: List[spack.config.ConfigScope] = []
|
||||
scopes = []
|
||||
|
||||
# load config scopes added via 'include:', in reverse so that
|
||||
# highest-precedence scopes are last.
|
||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||
env_name = environment_name(self.manifest_dir)
|
||||
missing = []
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
@@ -3099,22 +3100,24 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
|
||||
if os.path.isdir(config_path):
|
||||
# directories are treated as regular ConfigScopes
|
||||
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
||||
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
|
||||
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||
scope = spack.config.ConfigScope(config_name, config_path)
|
||||
elif os.path.exists(config_path):
|
||||
# files are assumed to be SingleFileScopes
|
||||
config_name = f"env:{self.name}:{config_path}"
|
||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
||||
scopes.append(
|
||||
spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
config_name = "env:%s:%s" % (env_name, config_path)
|
||||
tty.debug(
|
||||
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||
)
|
||||
scope = spack.config.SingleFileScope(
|
||||
config_name, config_path, spack.schema.merged.schema
|
||||
)
|
||||
else:
|
||||
missing.append(config_path)
|
||||
continue
|
||||
|
||||
scopes.append(scope)
|
||||
|
||||
if missing:
|
||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||
msg += "\n {0}".format("\n ".join(missing))
|
||||
@@ -3131,10 +3134,7 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name,
|
||||
str(self.manifest_file),
|
||||
spack.schema.env.schema,
|
||||
yaml_path=[TOP_LEVEL_KEY],
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
import shutil
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -274,7 +273,10 @@ def __init__(self, url=None, checksum=None, **kwargs):
|
||||
@property
|
||||
def curl(self):
|
||||
if not self._curl:
|
||||
self._curl = web_util.require_curl()
|
||||
try:
|
||||
self._curl = which("curl", required=True)
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
return self._curl
|
||||
|
||||
def source_id(self):
|
||||
@@ -295,23 +297,27 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
errors: List[Exception] = []
|
||||
url = None
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
if not web_util.url_exists(url):
|
||||
tty.debug("URL does not exist: " + url)
|
||||
continue
|
||||
|
||||
try:
|
||||
self._fetch_from_url(url)
|
||||
break
|
||||
except FailedDownloadError as e:
|
||||
errors.extend(e.exceptions)
|
||||
else:
|
||||
raise FailedDownloadError(*errors)
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
raise FailedDownloadError(url)
|
||||
|
||||
def _fetch_from_url(self, url):
|
||||
if spack.config.get("config:url_fetch_method") == "curl":
|
||||
@@ -330,20 +336,19 @@ def _check_headers(self, headers):
|
||||
@_needs_stage
|
||||
def _fetch_urllib(self, url):
|
||||
save_file = self.stage.save_filename
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
|
||||
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||
|
||||
# Run urllib but grab the mime type from the http headers
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
url, headers, response = web_util.read_from_url(url)
|
||||
except web_util.SpackWebError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.msg(f"Fetching {url}")
|
||||
msg = "urllib failed to fetch with error {0}".format(e)
|
||||
raise FailedDownloadError(url, msg)
|
||||
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
@@ -351,7 +356,7 @@ def _fetch_urllib(self, url):
|
||||
with open(save_file, "wb") as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
|
||||
self._check_headers(str(response.headers))
|
||||
self._check_headers(str(headers))
|
||||
|
||||
@_needs_stage
|
||||
def _fetch_curl(self, url):
|
||||
@@ -360,7 +365,7 @@ def _fetch_curl(self, url):
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + ".part"
|
||||
tty.msg(f"Fetching {url}")
|
||||
tty.msg("Fetching {0}".format(url))
|
||||
if partial_file:
|
||||
save_args = [
|
||||
"-C",
|
||||
@@ -400,8 +405,8 @@ def _fetch_curl(self, url):
|
||||
|
||||
try:
|
||||
web_util.check_curl_code(curl.returncode)
|
||||
except spack.error.FetchError as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
except spack.error.FetchError as err:
|
||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
||||
|
||||
self._check_headers(headers)
|
||||
|
||||
@@ -549,13 +554,13 @@ def fetch(self):
|
||||
|
||||
try:
|
||||
response = self._urlopen(self.url)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
except urllib.error.URLError as e:
|
||||
# clean up archive on failure.
|
||||
if self.archive_file:
|
||||
os.remove(self.archive_file)
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
raise FailedDownloadError(e) from e
|
||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
||||
|
||||
if os.path.lexists(file):
|
||||
os.remove(file)
|
||||
@@ -1307,41 +1312,35 @@ def __init__(self, *args, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
llnl.util.filesystem.rename(
|
||||
os.path.join(self.stage.path, basename), self.stage.save_filename
|
||||
)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1367,23 +1366,17 @@ def fetch(self):
|
||||
if parsed_url.scheme != "gs":
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
tty.debug("Fetching {0}".format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
_, headers, stream = web_util.read_from_url(self.url)
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
shutil.copyfileobj(stream, f)
|
||||
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
content_type = web_util.get_header(headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
@@ -1392,9 +1385,7 @@ def fetch(self):
|
||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
)
|
||||
raise FailedDownloadError(self.url)
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1731,9 +1722,9 @@ class NoCacheError(spack.error.FetchError):
|
||||
class FailedDownloadError(spack.error.FetchError):
|
||||
"""Raised when a download fails."""
|
||||
|
||||
def __init__(self, *exceptions: Exception):
|
||||
super().__init__("Failed to download")
|
||||
self.exceptions = exceptions
|
||||
def __init__(self, url, msg=""):
|
||||
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
||||
self.url = url
|
||||
|
||||
|
||||
class NoArchiveFileError(spack.error.FetchError):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user